def alphabeta(node, depth, alpha=float("-inf"), beta=float("inf"), cache=None): if cache is None: cache = Cache(1000) cached_result = cache.get(node) if (cached_result is not None): if (cached_result[0] >= depth): return cached_result[1] child_nodes = [m.get_board() for m in get_moves(node)] if (depth == 0) or (len(child_nodes) == 0): return score_board(node) if node.white_plays: bestvalue = alpha for child in child_nodes: ab_score = alphabeta(child, depth - 1, bestvalue, beta, cache=cache) # , bla = ab_score2) bestvalue = max(bestvalue, ab_score) if beta <= bestvalue: break cache.add(node, bestvalue, depth) return bestvalue else: bestvalue = beta for child in child_nodes: ab_score = alphabeta(child, depth - 1, alpha, bestvalue, cache=cache) # , bla = ab_score2) bestvalue = min(bestvalue, ab_score) if bestvalue <= alpha: break cache.add(node, bestvalue, depth) return bestvalue
def testOverflow(): c = Cache(cacheSize=5) for i in range(0, 6): c.add(i, i) if c.get(0) is not None: print( "Failed to overflow correctly, the oldest entry is still in cache." ) print(c.getCacheValues()) return if c.get(1) is None: print("We overflowed too much.") print(c.getCacheValues()) return c.add(6, 6) #Test to make sure that we moved 1 to be near the front and not the tail #since we used .get(1) right before the add(6) if c.get(1) is None: print("We deleted a recently accessed entry.") print(c.getCacheValues()) return if c.get(2) is not None or c.get(6) is None: print("We failed to overflow correctly and remove an entry.") print(c.getCacheValues()) return print("Passed the overflow tests.")
class VideoPlayer: def __init__(self): self.yt_player = YouTubePlayer() self.streamer = Streamer() self.file_player = FilePlayer() self.players_cache = Cache(_cache_size) def set_status_func(self, status_func): self.yt_player.set_status_func(status_func) self.streamer.set_status_func(status_func) def _get_player(self, url): c = self.players_cache.get(url) if c is not None: return c if self.file_player.can_play(url): c = self.file_player elif self.yt_player.can_play(url): c = self.yt_player elif self.streamer.can_play(url): c = self.streamer if c is not None: self.players_cache.add(url, c) return c return None def get_qualities(self, url): p = self._get_player(url) if p is None: return None else: return p.get_qualities(url) def can_play(self, url): try: return self._get_player(url) is not None except: return False def is_playing(self): return self.yt_player.is_playing() or self.streamer.is_playing() \ or self.file_player.is_playing() def play(self, url, quality): p = self._get_player(url) if p is None: raise Exception('No player found') p.play(url, quality) def is_playlist(self): return self.yt_player.is_playlist() def playlist_next(self): self.yt_player.playlist_next() def stop(self): self.yt_player.stop() self.streamer.stop() self.file_player.stop()
def test_ttl(self): current_time = 0 cache = Cache(lambda: current_time, {}) cache.add('key', 'value', 1) current_time = 1 has_value = 'key' in cache current_time = 2 has_value2 = 'key' in cache self.assertTrue(has_value) self.assertFalse(has_value2)
def test_save_read(self): cache = Cache(lambda: 0, {}) cache.add('key1', 'value1', 10) cache.add('key2', 'value2', 20) write_file = StringIO() cache.save_to_file(write_file) read_file = StringIO(write_file.getvalue()) read_cache = Cache.from_file(read_file, lambda: 0) self.assertEqual(read_cache['key1'], 'value1') self.assertEqual(read_cache['key2'], 'value2') self.assertTrue('key1' in read_cache) self.assertTrue('key2' in read_cache)
def basicVisualTests(): c = Cache() c.add(1, 56) c.add(3, 59) c.add(39, 1159) c.writeToDisk() print(c.head) print(c.head.next) print(c.head.next.next) print(c.writeToDisk()) print(c.get(1)) print(c.writeToDisk()) print(c.get(6546)) print(c.writeToDisk()) c.add(6546, "testing") print(c.writeToDisk()) print(c.get(39)) print(c.writeToDisk()) print("Testing overflow") print(c.getCacheValues()) for i in range(0, 12): c.add(i, "%s : %s" % (i, i + 7)) print(c.getCacheValues()) print(c.getCacheValues())
def main(): LOGGER.info('ADF Version %s', ADF_VERSION) LOGGER.info("ADF Log Level is %s", ADF_LOG_LEVEL) _create_inputs_folder() parameter_store = ParameterStore( DEPLOYMENT_ACCOUNT_REGION, boto3 ) s3 = S3(DEPLOYMENT_ACCOUNT_REGION, SHARED_MODULES_BUCKET) deployment_map = DeploymentMap( parameter_store, s3, ADF_PIPELINE_PREFIX ) sts = STS() role = sts.assume_cross_account_role( 'arn:aws:iam::{0}:role/{1}-readonly'.format( MASTER_ACCOUNT_ID, parameter_store.fetch_parameter('cross_account_access_role') ), 'pipeline' ) organizations = Organizations(role) clean(parameter_store, deployment_map) ensure_event_bus_status(ORGANIZATION_ID) try: auto_create_repositories = parameter_store.fetch_parameter('auto_create_repositories') except ParameterNotFoundError: auto_create_repositories = 'enabled' threads = [] _cache = Cache() for p in deployment_map.map_contents.get('pipelines', []): _source_account_id = p.get('default_providers', {}).get('source', {}).get('properties', {}).get('account_id', {}) if _source_account_id and int(_source_account_id) != int(DEPLOYMENT_ACCOUNT_ID) and not _cache.check(_source_account_id): rule = Rule(p['default_providers']['source']['properties']['account_id']) rule.create_update() _cache.add(p['default_providers']['source']['properties']['account_id'], True) thread = PropagatingThread(target=worker_thread, args=( p, organizations, auto_create_repositories, deployment_map, parameter_store )) thread.start() threads.append(thread) for thread in threads: thread.join()
def writeReadTest(): c = Cache() for i in range(0, 5): c.add(i, str(i)) oldValues = c.getCacheValues() c.writeToDisk() c = Cache() c.loadFromDisk() newValues = c.getCacheValues() if newValues[0] != oldValues[0] or newValues[1] != oldValues[1]: print("Reloading values differ from original.") print("Old values:") print(oldValues) print("New values:") print(newValues) else: print("Write Read test successful")
def alphabeta(node, depth, alpha=float("-inf"), beta=float("inf"), cache=None): if cache is None: cache = Cache(1000) cached_result = cache.get(node) if (cached_result is not None): if (cached_result[0] >= depth): return cached_result[1] child_nodes = [m.get_board() for m in get_moves(node)] if (depth == 0) or (len(child_nodes) == 0): return score_board(node) if node.white_plays: bestvalue = alpha for child in child_nodes: ab_score = alphabeta(child, depth - 1, bestvalue, beta, cache=cache) # , bla = ab_score2) bestvalue = max(bestvalue, ab_score) if beta <= bestvalue: break cache.add(node, bestvalue, depth) return bestvalue else: bestvalue = beta for child in child_nodes: ab_score = alphabeta(child, depth - 1, alpha, bestvalue, cache=cache) # , bla = ab_score2) bestvalue = min(bestvalue, ab_score) if bestvalue <= alpha: break cache.add(node, bestvalue, depth) return bestvalue
class Player(object): def __init__(self): self.config = Config() self.ui = Ui() self.popen_handler = None # flag stop, prevent thread start self.playing_flag = False self.pause_flag = False self.process_length = 0 self.process_location = 0 self.process_first = False self.storage = Storage() self.info = self.storage.database['player_info'] self.songs = self.storage.database['songs'] self.playing_id = -1 self.cache = Cache() self.notifier = self.config.get_item('notifier') self.mpg123_parameters = self.config.get_item('mpg123_parameters') self.end_callback = None self.playing_song_changed_callback = None def popen_recall(self, onExit, popenArgs): ''' Runs the given args in subprocess.Popen, and then calls the function onExit when the subprocess completes. onExit is a callable object, and popenArgs is a lists/tuple of args that would give to subprocess.Popen. ''' def runInThread(onExit, arg): para = ['mpg123', '-R'] para[1:1] = self.mpg123_parameters self.popen_handler = subprocess.Popen(para, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) self.popen_handler.stdin.write('V ' + str(self.info['playing_volume']) + '\n') if arg: self.popen_handler.stdin.write('L ' + arg + '\n') else: self.next_idx() onExit() return self.process_first = True while True: if self.playing_flag is False: break strout = self.popen_handler.stdout.readline() if re.match('^\@F.*$', strout): process_data = strout.split(' ') process_location = float(process_data[4]) if self.process_first: self.process_length = process_location self.process_first = False self.process_location = 0 else: self.process_location = self.process_length - process_location # NOQA continue elif strout[:2] == '@E': # get a alternative url from new api sid = popenArgs['song_id'] new_url = NetEase().songs_detail_new_api([sid])[0]['url'] if new_url is None: log.warning(('Song {} is unavailable ' 'due to copyright issue').format(sid)) break log.warning( 'Song {} is not compatible with old api.'.format(sid)) popenArgs['mp3_url'] = new_url self.popen_handler.stdin.write('\nL ' + new_url + '\n') self.popen_handler.stdout.readline() elif strout == '@P 0\n': self.popen_handler.stdin.write('Q\n') self.popen_handler.kill() break if self.playing_flag: self.next_idx() onExit() return def getLyric(): if 'lyric' not in self.songs[str(self.playing_id)].keys(): self.songs[str(self.playing_id)]['lyric'] = [] if len(self.songs[str(self.playing_id)]['lyric']) > 0: return netease = NetEase() lyric = netease.song_lyric(self.playing_id) if lyric == [] or lyric == '未找到歌词': return lyric = lyric.split('\n') self.songs[str(self.playing_id)]['lyric'] = lyric return def gettLyric(): if 'tlyric' not in self.songs[str(self.playing_id)].keys(): self.songs[str(self.playing_id)]['tlyric'] = [] if len(self.songs[str(self.playing_id)]['tlyric']) > 0: return netease = NetEase() tlyric = netease.song_tlyric(self.playing_id) if tlyric == [] or tlyric == '未找到歌词翻译': return tlyric = tlyric.split('\n') self.songs[str(self.playing_id)]['tlyric'] = tlyric return def cacheSong(song_id, song_name, artist, song_url): def cacheExit(song_id, path): self.songs[str(song_id)]['cache'] = path self.cache.add(song_id, song_name, artist, song_url, cacheExit) self.cache.start_download() if 'cache' in popenArgs.keys() and os.path.isfile(popenArgs['cache']): thread = threading.Thread(target=runInThread, args=(onExit, popenArgs['cache'])) else: thread = threading.Thread(target=runInThread, args=(onExit, popenArgs['mp3_url'])) cache_thread = threading.Thread( target=cacheSong, args=(popenArgs['song_id'], popenArgs['song_name'], popenArgs['artist'], popenArgs['mp3_url'])) cache_thread.start() thread.start() lyric_download_thread = threading.Thread(target=getLyric, args=()) lyric_download_thread.start() tlyric_download_thread = threading.Thread(target=gettLyric, args=()) tlyric_download_thread.start() # returns immediately after the thread starts return thread def get_playing_id(self): return self.playing_id def recall(self): if self.info['idx'] >= len( self.info['player_list']) and self.end_callback is not None: log.debug('Callback') self.end_callback() if self.info['idx'] < 0 or self.info['idx'] >= len( self.info['player_list']): self.info['idx'] = 0 self.stop() return self.playing_flag = True self.pause_flag = False item = self.songs[self.info['player_list'][self.info['idx']]] self.ui.build_playinfo(item['song_name'], item['artist'], item['album_name'], item['quality'], time.time()) if self.notifier: self.ui.notify('Now playing', item['song_name'], item['album_name'], item['artist']) self.playing_id = item['song_id'] self.popen_recall(self.recall, item) def generate_shuffle_playing_list(self): del self.info['playing_list'][:] for i in range(0, len(self.info['player_list'])): self.info['playing_list'].append(i) random.shuffle(self.info['playing_list']) self.info['ridx'] = 0 def new_player_list(self, type, title, datalist, offset): self.info['player_list_type'] = type self.info['player_list_title'] = title self.info['idx'] = offset del self.info['player_list'][:] del self.info['playing_list'][:] self.info['ridx'] = 0 for song in datalist: self.info['player_list'].append(str(song['song_id'])) if str(song['song_id']) not in self.songs.keys(): self.songs[str(song['song_id'])] = song else: database_song = self.songs[str(song['song_id'])] if (database_song['song_name'] != song['song_name'] or database_song['quality'] != song['quality']): self.songs[str(song['song_id'])] = song def append_songs(self, datalist): for song in datalist: self.info['player_list'].append(str(song['song_id'])) if str(song['song_id']) not in self.songs.keys(): self.songs[str(song['song_id'])] = song else: database_song = self.songs[str(song['song_id'])] cond = any([ database_song[k] != song[k] for k in ('song_name', 'quality', 'mp3_url') ]) if cond: if 'cache' in self.songs[str(song['song_id'])].keys(): song['cache'] = self.songs[str( song['song_id'])]['cache'] self.songs[str(song['song_id'])] = song if len(datalist) > 0 and self.info['playing_mode'] == 3 or self.info[ 'playing_mode'] == 4: self.generate_shuffle_playing_list() def play_and_pause(self, idx): # if same playlists && idx --> same song :: pause/resume it if self.info['idx'] == idx: if self.pause_flag: self.resume() else: self.pause() else: self.info['idx'] = idx # if it's playing if self.playing_flag: self.switch() # start new play else: self.recall() # play another def switch(self): self.stop() # wait process be killed time.sleep(0.1) self.recall() def stop(self): if self.playing_flag and self.popen_handler: self.playing_flag = False self.popen_handler.stdin.write('Q\n') try: self.popen_handler.kill() except OSError as e: log.error(e) return def pause(self): if not self.playing_flag and not self.popen_handler: return self.pause_flag = True self.popen_handler.stdin.write('P\n') item = self.songs[self.info['player_list'][self.info['idx']]] self.ui.build_playinfo(item['song_name'], item['artist'], item['album_name'], item['quality'], time.time(), pause=True) def resume(self): self.pause_flag = False self.popen_handler.stdin.write('P\n') item = self.songs[self.info['player_list'][self.info['idx']]] self.ui.build_playinfo(item['song_name'], item['artist'], item['album_name'], item['quality'], time.time()) self.playing_id = item['song_id'] def _swap_song(self): plist = self.info['playing_list'] now_songs = plist.index(self.info['idx']) plist[0], plist[now_songs] = plist[now_songs], plist[0] def _is_idx_valid(self): return 0 <= self.info['idx'] < len(self.info['player_list']) def _inc_idx(self): if self.info['idx'] < len(self.info['player_list']): self.info['idx'] += 1 def _dec_idx(self): if self.info['idx'] > 0: self.info['idx'] -= 1 def _need_to_shuffle(self): playing_list = self.info['playing_list'] ridx = self.info['ridx'] idx = self.info['idx'] if ridx >= len(playing_list) or playing_list[ridx] != idx: return True else: return False def next_idx(self): if not self._is_idx_valid(): self.stop() return playlist_len = len(self.info['player_list']) playinglist_len = len(self.info['playing_list']) # Playing mode. 0 is ordered. 1 is orderde loop. # 2 is single song loop. 3 is single random. 4 is random loop if self.info['playing_mode'] == 0: self._inc_idx() elif self.info['playing_mode'] == 1: self.info['idx'] = (self.info['idx'] + 1) % playlist_len elif self.info['playing_mode'] == 2: self.info['idx'] = self.info['idx'] elif self.info['playing_mode'] == 3 or self.info['playing_mode'] == 4: if self._need_to_shuffle(): self.generate_shuffle_playing_list() playinglist_len = len(self.info['playing_list']) # When you regenerate playing list # you should keep previous song same. try: self._swap_song() except Exception as e: log.error(e) self.info['ridx'] += 1 # Out of border if self.info['playing_mode'] == 4: self.info['ridx'] %= playinglist_len if self.info['ridx'] >= playinglist_len: self.info['idx'] = playlist_len else: self.info['idx'] = self.info['playing_list'][self.info['ridx']] else: self.info['idx'] += 1 if self.playing_song_changed_callback is not None: self.playing_song_changed_callback() def next(self): self.stop() time.sleep(0.01) self.next_idx() self.recall() def prev_idx(self): if not self._is_idx_valid(): self.stop() return playlist_len = len(self.info['player_list']) playinglist_len = len(self.info['playing_list']) # Playing mode. 0 is ordered. 1 is orderde loop. # 2 is single song loop. 3 is single random. 4 is random loop if self.info['playing_mode'] == 0: self._dec_idx() elif self.info['playing_mode'] == 1: self.info['idx'] = (self.info['idx'] - 1) % playlist_len elif self.info['playing_mode'] == 2: self.info['idx'] = self.info['idx'] elif self.info['playing_mode'] == 3 or self.info['playing_mode'] == 4: if self._need_to_shuffle(): self.generate_shuffle_playing_list() playinglist_len = len(self.info['playing_list']) self.info['ridx'] -= 1 if self.info['ridx'] < 0: if self.info['playing_mode'] == 3: self.info['ridx'] = 0 else: self.info['ridx'] %= playinglist_len self.info['idx'] = self.info['playing_list'][self.info['ridx']] else: self.info['idx'] -= 1 if self.playing_song_changed_callback is not None: self.playing_song_changed_callback() def prev(self): self.stop() time.sleep(0.01) self.prev_idx() self.recall() def shuffle(self): self.stop() time.sleep(0.01) self.info['playing_mode'] = 3 self.generate_shuffle_playing_list() self.info['idx'] = self.info['playing_list'][self.info['ridx']] self.recall() def volume_up(self): self.info['playing_volume'] = self.info['playing_volume'] + 7 if (self.info['playing_volume'] > 100): self.info['playing_volume'] = 100 if not self.playing_flag: return self.popen_handler.stdin.write('V ' + str(self.info['playing_volume']) + '\n') def volume_down(self): self.info['playing_volume'] = self.info['playing_volume'] - 7 if (self.info['playing_volume'] < 0): self.info['playing_volume'] = 0 if not self.playing_flag: return self.popen_handler.stdin.write('V ' + str(self.info['playing_volume']) + '\n') def update_size(self): try: self.ui.update_size() item = self.songs[self.info['player_list'][self.info['idx']]] if self.playing_flag: self.ui.build_playinfo(item['song_name'], item['artist'], item['album_name'], item['quality'], time.time()) if self.pause_flag: self.ui.build_playinfo(item['song_name'], item['artist'], item['album_name'], item['quality'], time.time(), pause=True) except Exception as e: log.error(e) pass def cacheSong1time(self, song_id, song_name, artist, song_url): def cacheExit(song_id, path): self.songs[str(song_id)]['cache'] = path self.cache.enable = False self.cache.enable = True self.cache.add(song_id, song_name, artist, song_url, cacheExit) self.cache.start_download()
def timerTest(): print("Testing the timed expiry of elements.") import time c = Cache(expiryTime=4) c.add(1, "one threading") print("Sleeping for 3 seconds.") time.sleep(3) c.add(2, "two") c.add(3, "two") print("Sleeping for 2 seconds.") time.sleep(2) #We should not be seeing 1 here. #print(c.getCacheValues()) if c.get(1) is not None: print("timer test for key 1 failed.") else: print("Successfully removed key 1 after its time expired.") if c.get(2) is None or c.get(3) is None: print("timer test for keys 2 or 3 failed as they expired too early.") print("Sleeping for 3 seconds") time.sleep(3) if c.get(2) is not None or c.get(3) is not None: print("timer test for 2&3 failed") else: print("Keys 2 and 3 also expired as expected.") #We check if stopping then restarting the timer works #as expected. print("Testing if stopping the timer removes nothing.") c.add(4, "four") c.stopTimer() print("Sleeping for 6 seconds.") time.sleep(6) if c.get(4) is None: print("Failure: timer deleted key 4 when it was turned off.") else: print("Stopping the timer successful") print("Restarting timer and sleeping for 2 seconds.") c.restartTimer() time.sleep(2) if c.get(4) is not None: print("Failure: timer did not delete key 4 when it was restarted.") else: print("Restarting the timer successful") print( "Testing if post restart the timer can also stop properly when desired." ) c.add(5, "five") c.stopTimer() print("Sleeping for 6 seconds.") time.sleep(6) if c.get(5) is None: print("Failure: timer deleted key 5 when it was turned off.") else: print("Stopping the timer successful")
class YouTubePlayer(PlayerBase): def __init__(self, status_func=None): PlayerBase.__init__(self, status_func) self.video_cache = Cache(_cache_size) self.playlist_cache = Cache(_cache_size) self.lock = threading.Lock() self.alive_threads = [] self.playing_playlist = False def can_play(self, url): self._status('checking video status') return (pafy.playlist.extract_playlist_id(url) is not None) or \ (self._get_video(url) is not None) def _get_video(self, url): v = self.video_cache.get(url) if v is not None: return v try: v = pafy.new(url) if v.length == 0.0: return None #live stream self.video_cache.add(url, v) return v except: return None def _get_playlist(self, url): v = self.playlist_cache.get(url) if v is not None: return v try: plid = pafy.playlist.extract_playlist_id(url) if not plid: return None gurl = pafy.g.urls['playlist'] % plid allinfo = pafy.pafy.fetch_decode(gurl) allinfo = json.loads(allinfo) pafys = [] for v in allinfo['video']: plentry = [v.get('encrypted_id'), None] pafys.append(plentry) self.playlist_cache.add(url, pafys) return pafys except: _log.info('%s is not a playlist' % url) return None def _get_video_qualities(self, pfy): mp4s = filter(lambda s: s.extension == 'mp4', pfy.streams) resolutions = [mp4.quality for mp4 in mp4s] ret = [] for r in resolutions: i = r.find('x') if i == -1: ret.append(r) else: ret.append(r[i + 1:] + 'p') ret = ret + ['worst', 'default', 'best'] return ret def _get_playlist_qualities(self, pfys): # get all qualities for all lists # then return a list of those that appear everywhere sz = len(pfys) i = 1 qualities = [] for pfy in pfys: i = i + 1 qualities.append(self._get_video_qualities(pfy)) first = qualities[0] rest = qualities[1:] ret = [] for f in first: t = [q for q in rest if f in q] if len(t) == len(rest): ret.append(f) return ret def _get_video_url(self, pfy, quality): # quality is a string with the second part of resolution and 'p' # e.g. 1280x720 -> 720p # or 'best' # or just some string # return a list, just easier like that try: if pfy is None: return [] title = pfy.title author = pfy.author mp4s = filter(lambda s: s.extension == 'mp4', pfy.streams) if len(mp4s) == 0: return [] if quality == 'worst' or len(mp4s) == 1: return [(title, author, mp4s[0].url)] if quality == 'best': return [(title, author, pfy.getbest('mp4').url)] if quality[-1] == 'p': s = 'x' + quality[:-1] try: nq = int(quality[:-1]) except: nq = _default_res else: s = quality nq = _default_res if quality != 'default': for ss in mp4s: if s in ss.quality: return [(title, author, ss.url)] # not found, look for first smaller than nq smaller = [] for ss in mp4s: try: q = int(ss.quality.split('x')[-1]) except: break if q < nq: smaller.append(ss) if len(smaller) > 0: return [(title, author, smaller[-1].url)] # if nothing else worked, get me the best one return [(title, author, pfy.getbest('mp4').url)] except: _log.exception('exception while getting video url') return [] def _get_first_url(self, url, quality, urls): pl = self._get_playlist(url) sz = 1 if pl is None: _log.info('getting first url') pfy = self._get_video(url) urls += self._get_video_url(pfy, quality) else: _log.info('getting first url for playlist') pfy = self._get_video(url) sz = len(pl) if sz == 0: return sz pfy = pl[0][1] if pfy is None: pfy = pafy.new(pl[0][0]) pl[0][1] = pfy urls += self._get_video_url(pfy, quality) return sz def _get_remaining_urls(self, url, quality, urls, thread_id): try: pl = self._get_playlist(url) if pl is None: return rest = pl[1:] sz = len(rest) + 1 j = 2 for i in rest: _log.info('getting data for video %i of %i' % (j, sz)) j += 1 pfy = i[1] if pfy is None: try: pfy = pafy.new(i[0]) except: continue i[1] = pfy u = self._get_video_url(pfy, quality) with self.lock: urls += u if thread_id not in self.alive_threads: return except: _log.exception('exception while getting remaining you tube urls') raise def _finish_playing(self, thread_id): self._status('') with self.lock: if thread_id in self.alive_threads: self.alive_threads.remove(thread_id) self.playing_playlist = False def _play_loop_impl(self, url, quality): thread_id = _next_thread_id() try: with self.lock: self.alive_threads = [] self.alive_threads.append(thread_id) self._status('retrieving videos...') urls = [] s = self._get_first_url(url, quality, urls) self.playing_playlist = (s > 1) if self.playing_playlist: threading.Thread(target=self._get_remaining_urls, args=(url, quality, urls, thread_id)).start() prev_sz = 1 first = 0 with self.lock: sz = len(urls) while True: for i in range(first, sz): if not self.playing: self._finish_playing(thread_id) return with self.lock: (name, author, u) = urls[i] if thread_id not in self.alive_threads: self._finish_playing(thread_id) return self._status('playing\n%s\n%s' % (name, author)) if s == 1: cmd = '%s "%s"' % (self._player_cmd, u) else: cmd = '%s "%s"' % (self._player_pl_cmd, u) subprocess.call(cmd, shell=True) with self.lock: sz = len(urls) if sz > prev_sz: first = prev_sz prev_sz = sz else: first = 0 except: _log.exception('exception while playing ' + url) finally: self._finish_playing(thread_id) return def _stop_threads(self): with self.lock: self.alive_threads = [] def is_playlist(self): return self.playing_playlist def playlist_next(self): with self.lock: if self.playing_playlist: self._kill_player() def get_qualities(self, url): self._status('getting playlist information for ' + url) v = self._get_playlist(url) if v is not None: self._status('getting available video qualities for the playlist') return ['default'] #self._get_playlist_qualities(v) _log.info('getting qualities') v = self._get_video(url) if v is not None: self._status('getting available video qualities') return self._get_video_qualities(v) return None
class Player: def __init__(self): self.config = Config() # Hong, tmply comment out #self.ui = Ui() self.popen_handler = None # flag stop, prevent thread start self.playing_flag = False self.pause_flag = False self.process_length = 0 self.process_location = 0 self.process_first = False self.storage = Storage() self.info = self.storage.database["player_info"] self.songs = self.storage.database["songs"] self.playing_id = -1 self.cache = Cache() self.notifier = self.config.get_item("notifier") self.mpg123_parameters = self.config.get_item("mpg123_parameters") self.end_callback = None self.playing_song_changed_callback = None def popen_recall(self, onExit, popenArgs): """ Runs the given args in a subprocess.Popen, and then calls the function onExit when the subprocess completes. onExit is a callable object, and popenArgs is a lists/tuple of args that would give to subprocess.Popen. """ def runInThread2(onExit, popenArgs): print '>' + popenArgs + '<' def runInThread(onExit, popenArgs): import os os.system('mpg321 ' + popenArgs) #para = ['mpg123', '-R'] #para[1:1] = self.mpg123_parameters #para = ['mpg321', popenArgs] #self.popen_handler = subprocess.Popen(para, stdin=subprocess.PIPE, # stdout=subprocess.PIPE, # stderr=subprocess.PIPE) #self.popen_handler.stdin.write("V " + str(self.info["playing_volume"]) + "\n") #self.popen_handler.stdin.write("L " + popenArgs + "\n") #self.process_first = True #while (True): # if self.playing_flag == False: # print '222' # break # try: # strout = self.popen_handler.stdout.readline() # print '111' # print strout # except IOError: # break # if re.match("^\@F.*$", strout): # process_data = strout.split(" ") # process_location = float(process_data[4]) # if self.process_first: # self.process_length = process_location # self.process_first = False # self.process_location = 0 # else: # self.process_location = self.process_length - process_location # continue # if strout == "@P 0\n": # self.popen_handler.stdin.write("Q\n") # self.popen_handler.kill() # break if self.playing_flag: #self.next_idx() onExit() return def getLyric(): if 'lyric' not in self.songs[str(self.playing_id)].keys(): self.songs[str(self.playing_id)]["lyric"] = [] if len(self.songs[str(self.playing_id)]["lyric"]) > 0: return netease = NetEase() lyric = netease.song_lyric(self.playing_id) if lyric == [] or lyric == '未找到歌词': return lyric = lyric.split('\n') self.songs[str(self.playing_id)]["lyric"] = lyric return def gettLyric(): if 'tlyric' not in self.songs[str(self.playing_id)].keys(): self.songs[str(self.playing_id)]["tlyric"] = [] if len(self.songs[str(self.playing_id)]["tlyric"]) > 0: return netease = NetEase() tlyric = netease.song_tlyric(self.playing_id) if tlyric == [] or tlyric == '未找到歌词翻译': return tlyric = tlyric.split('\n') self.songs[str(self.playing_id)]["tlyric"] = tlyric return def cacheSong(song_id, song_name, artist, song_url): def cacheExit(song_id, path): # hong, comment out #self.songs[str(song_id)]['cache'] = path pass self.cache.add(song_id, song_name, artist, song_url, cacheExit) self.cache.start_download() cached_song = '~/.netease-musicbox/cached/' + str( popenArgs['song_id']) + '.mp3' cached_song = '/home/pi/.netease-musicbox/cached/' + str( popenArgs['song_id']) + '.mp3' print cached_song if os.path.isfile(cached_song): #if 'cache' in popenArgs.keys() and os.path.isfile(popenArgs['cache']): print 'cached found' #thread = threading.Thread(target=runInThread, args=(onExit, popenArgs['cache'])) #thread = threading.Thread(target=runInThread, args=(onExit, cached_song)) runInThread(onExit, cached_song) else: print 'cache not found' #thread = threading.Thread(target=runInThread, args=(onExit, popenArgs['mp3_url'])) runInThread(onExit, popenArgs['mp3_url']) cache_thread = threading.Thread( target=cacheSong, args=(popenArgs['song_id'], popenArgs['song_name'], popenArgs['artist'], popenArgs['mp3_url'])) cache_thread.start() #thread.start() # Hong, comment out following 4 lines #lyric_download_thread = threading.Thread(target=getLyric, args=()) #lyric_download_thread.start() #tlyric_download_thread = threading.Thread(target=gettLyric, args=()) #tlyric_download_thread.start() # returns immediately after the thread starts #return thread def get_playing_id(self): return self.playing_id def recall(self): if self.info["idx"] >= len( self.info["player_list"]) and self.end_callback != None: self.end_callback() if self.info["idx"] < 0 or self.info["idx"] >= len( self.info["player_list"]): self.info["idx"] = 0 self.stop() return self.playing_flag = True self.pause_flag = False item = self.songs[self.info["player_list"][self.info["idx"]]] self.ui.build_playinfo(item['song_name'], item['artist'], item['album_name'], item['quality'], time.time()) if self.notifier == True: self.ui.notify("Now playing", item['song_name'], item['album_name'], item['artist']) self.playing_id = item['song_id'] # Hong, function are first-class object, so 'recall' can be referred and passed around self.popen_recall(self.recall, item) def generate_shuffle_playing_list(self): del self.info["playing_list"][:] for i in range(0, len(self.info["player_list"])): self.info["playing_list"].append(i) random.shuffle(self.info["playing_list"]) self.info["ridx"] = 0 def new_player_list(self, type, title, datalist, offset): self.info["player_list_type"] = type self.info["player_list_title"] = title self.info["idx"] = offset del self.info["player_list"][:] del self.info["playing_list"][:] self.info["ridx"] = 0 for song in datalist: self.info["player_list"].append(str(song["song_id"])) if str(song["song_id"]) not in self.songs.keys(): self.songs[str(song["song_id"])] = song else: database_song = self.songs[str(song["song_id"])] if (database_song["song_name"] != song["song_name"] or database_song["quality"] != song["quality"]): self.songs[str(song["song_id"])] = song def append_songs(self, datalist): for song in datalist: self.info["player_list"].append(str(song["song_id"])) if str(song["song_id"]) not in self.songs.keys(): self.songs[str(song["song_id"])] = song else: database_song = self.songs[str(song["song_id"])] if database_song["song_name"] != song["song_name"] or \ database_song["quality"] != song["quality"] or \ database_song["mp3_url"] != song["mp3_url"]: if "cache" in self.songs[str(song["song_id"])].keys(): song["cache"] = self.songs[str( song["song_id"])]["cache"] self.songs[str(song["song_id"])] = song if len(datalist) > 0 and self.info["playing_mode"] == 3 or self.info[ "playing_mode"] == 4: self.generate_shuffle_playing_list() def play_and_pause(self, idx): # if same playlists && idx --> same song :: pause/resume it if self.info["idx"] == idx: if self.pause_flag: self.resume() else: self.pause() else: self.info["idx"] = idx # if it's playing if self.playing_flag: self.switch() # start new play else: self.recall() # play another def switch(self): self.stop() # wait process be killed time.sleep(0.1) self.recall() def stop(self): if self.playing_flag and self.popen_handler: self.playing_flag = False try: self.popen_handler.stdin.write("Q\n") except: pass try: self.popen_handler.kill() except: return def pause(self): if not self.playing_flag and not self.popen_handler: return self.pause_flag = True os.kill(self.popen_handler.pid, signal.SIGSTOP) item = self.songs[self.info["player_list"][self.info["idx"]]] self.ui.build_playinfo(item['song_name'], item['artist'], item['album_name'], item['quality'], time.time(), pause=True) def resume(self): self.pause_flag = False os.kill(self.popen_handler.pid, signal.SIGCONT) item = self.songs[self.info["player_list"][self.info["idx"]]] self.ui.build_playinfo(item['song_name'], item['artist'], item['album_name'], item['quality'], time.time()) self.playing_id = item['song_id'] def next_idx(self): if self.info["idx"] < 0 or self.info["idx"] >= len( self.info["player_list"]): self.stop() return # Playing mode. 0 is ordered. 1 is orderde loop. 2 is single song loop. 3 is single random. 4 is random loop if self.info["playing_mode"] == 0: self.info["idx"] += 1 elif self.info["playing_mode"] == 1: self.info["idx"] = (self.info["idx"] + 1) % len( self.info["player_list"]) elif self.info["playing_mode"] == 2: self.info["idx"] = self.info["idx"] elif self.info["playing_mode"] == 3: if self.info["ridx"] >= len(self.info["playing_list"]): self.generate_shuffle_playing_list() try: now_songs = self.info["playing_list"].index( self.info["idx"]) temp = self.info["playing_list"][0] self.info["playing_list"][0] = self.info["playing_list"][ now_songs] self.info["playing_list"][now_songs] = temp except: self.generate_shuffle_playing_list() elif self.info["playing_list"][ self.info["ridx"]] != self.info["idx"]: self.generate_shuffle_playing_list() try: now_songs = self.info["playing_list"].index( self.info["idx"]) temp = self.info["playing_list"][0] self.info["playing_list"][0] = self.info["playing_list"][ now_songs] self.info["playing_list"][now_songs] = temp except: self.generate_shuffle_playing_list() self.info["ridx"] += 1 if self.info["ridx"] >= len(self.info["playing_list"]): self.info["idx"] = len(self.info["playing_list"]) else: self.info["idx"] = self.info["playing_list"][self.info["ridx"]] elif self.info["playing_mode"] == 4: if self.info["ridx"] >= len(self.info["playing_list"]): self.generate_shuffle_playing_list() try: now_songs = self.info["playing_list"].index( self.info["idx"]) temp = self.info["playing_list"][0] self.info["playing_list"][0] = self.info["playing_list"][ now_songs] self.info["playing_list"][now_songs] = temp except: self.generate_shuffle_playing_list() elif self.info["playing_list"][ self.info["ridx"]] != self.info["idx"]: self.generate_shuffle_playing_list() try: now_songs = self.info["playing_list"].index( self.info["idx"]) temp = self.info["playing_list"][0] self.info["playing_list"][0] = self.info["playing_list"][ now_songs] self.info["playing_list"][now_songs] = temp except: self.generate_shuffle_playing_list() self.info["ridx"] = (self.info["ridx"] + 1) % len( self.info["player_list"]) self.info["idx"] = self.info["playing_list"][self.info["ridx"]] else: self.info["idx"] += 1 if self.playing_song_changed_callback is not None: self.playing_song_changed_callback() def next(self): self.stop() time.sleep(0.01) self.next_idx() self.recall() def prev_idx(self): if self.info["idx"] < 0 or self.info["idx"] >= len( self.info["player_list"]): self.stop() return # Playing mode. 0 is ordered. 1 is orderde loop. 2 is single song loop. 3 is single random. 4 is random loop if self.info["playing_mode"] == 0: self.info["idx"] -= 1 elif self.info["playing_mode"] == 1: self.info["idx"] = (self.info["idx"] - 1) % len( self.info["player_list"]) elif self.info["playing_mode"] == 2: self.info["idx"] = self.info["idx"] elif self.info["playing_mode"] == 3: if self.info["ridx"] >= len(self.info["playing_list"]): self.generate_shuffle_playing_list() elif self.info["playing_list"][ self.info["ridx"]] != self.info["idx"]: self.generate_shuffle_playing_list() self.info["ridx"] -= 1 if self.info["ridx"] < 0: self.info["ridx"] = 0 return self.info["idx"] = self.info["playing_list"][self.info["ridx"]] elif self.info["playing_mode"] == 4: if self.info["ridx"] >= len(self.info["playing_list"]): self.generate_shuffle_playing_list() elif self.info["playing_list"][ self.info["ridx"]] != self.info["idx"]: self.generate_shuffle_playing_list() self.info["ridx"] = (self.info["ridx"] - 1) % len( self.info["player_list"]) self.info["idx"] = self.info["playing_list"][self.info["ridx"]] else: self.info["idx"] -= 1 if self.playing_song_changed_callback is not None: self.playing_song_changed_callback() def prev(self): self.stop() time.sleep(0.01) self.prev_idx() self.recall() def shuffle(self): self.stop() time.sleep(0.01) self.info["playing_mode"] = 3 self.generate_shuffle_playing_list() self.info["idx"] = self.info["playing_list"][self.info["ridx"]] self.recall() def volume_up(self): self.info["playing_volume"] = self.info["playing_volume"] + 7 if (self.info["playing_volume"] > 100): self.info["playing_volume"] = 100 if not self.playing_flag: return try: self.popen_handler.stdin.write("V " + str(self.info["playing_volume"]) + "\n") except: self.switch() def volume_down(self): self.info["playing_volume"] = self.info["playing_volume"] - 7 if (self.info["playing_volume"] < 0): self.info["playing_volume"] = 0 if not self.playing_flag: return try: self.popen_handler.stdin.write("V " + str(self.info["playing_volume"]) + "\n") except: self.switch() def update_size(self): try: self.ui.update_size() item = self.songs[self.info["player_list"][self.info["idx"]]] if self.playing_flag: self.ui.build_playinfo(item['song_name'], item['artist'], item['album_name'], item['quality'], time.time()) if self.pause_flag: self.ui.build_playinfo(item['song_name'], item['artist'], item['album_name'], item['quality'], time.time(), pause=True) except: pass def cacheSong1time(self, song_id, song_name, artist, song_url): def cacheExit(song_id, path): self.songs[str(song_id)]['cache'] = path self.cache.enable = False self.cache.enable = True self.cache.add(song_id, song_name, artist, song_url, cacheExit) self.cache.start_download()
class DataManager(DataTable): """ This class provides high level access to a database table. It loads and saves data from the table into an object or a DataSet, or a subclass thereof. The class of objects and data sets that are returned can be set using the set_object() and set_dataset() functions. You can request a single object (row), or you can specify a set of criteria, which are then translated into a WHERE clause. """ def __init__(self, table_name, field_dictionary): DataTable.__init__(self, table_name, field_dictionary) self.table = DataTable(table_name, field_dictionary) self.reset_cache() def reset_cache(self): """ Initialize this data manager's object cache. Any contents in the cache are discarded. """ self.cache = Cache() self.all_cached = 0 self.last_synched = '' def preload(self): """ Handles a client request to preload all objects from the database, fully populating the local cache and preparing for subsequent client requests. Preloading can result in significant performance benefits, because data managers who have preloaded their cache can fill more client requests from cache, avoiding expensive database accesses. Not all requests for preloading are honored. To be preloaded, a datamanager's cache size must be set to CACHE_UNLIMITED. Also, only one call is honored. Subsequent calls are silently ignored. This makes it safe and efficient for clients to request preloading, without needing to know whether or not the data manager has already been preloaded. """ if self.all_cached == 1: return # FIXME: Also try to preload caches that are not unlimited, # but are large enough to hold all existing objects. # Use a SQL COUNT(*) function to make this determination, # so we don't waste lots of time attempting to preload # a cache that cannot be preloaded. if self.cache.size <> CACHE_UNLIMITED: return #print 'Preloading ' + self.table.name self.get_all() #i18n_table = self.table.name + '_i18n' #if self.dms.has_key(i18n_table): # i18n_dm = self.dms[i18n_table] # i18n_dm.preload() def get_by_id(self, id): """ Returns an individual object whose primary key field matches the specified id. If the object is in the cache, the cached object is returned. Objects which have more than one primary key field cannot be reliably retrieved using this function. In this event, only the first matching object will be returned. """ object = self.cache.get_by_key(id) if object == None: data_field = self.table.id_field() sql = self.table.select + ' WHERE ' + data_field.field_name + '=' + data_field.attr_to_field( id) cursor = db.select(sql) row = cursor.fetchone() if row == None: return object = self.row_to_object(row) return object def get_by_keys(self, filters): """ Returns all objects which match the supplied filters. """ if self.cache.filled == 1: self.all_cached = 0 if self.all_cached == 1: return self.get_cached_by_keys(filters) else: sql = self.filters_to_sql(filters) return self.get_sql(sql) def get_cached_by_keys(self, filters): """ This private function fills keyed requests directly from the object cache. No checking is performed to determine whether the cache contains all objects which fit the request. Therefore, this function should only be called by data managers whose caches are preloaded. See the preload() function for more information on preloading. """ sql = self.filters_to_sql(filters) function_text = self.filters_to_function(filters) print 'Function text: ' print function_text code = compile(function_text, '<string>', 'exec') print 'Code: ' + str(code) print 'Code has %s arguments.' % code.co_argcount self.test_object_filters.im_func.func_code = code #print 'Method code: ' + str(self.test_object_filters.im_func.func_code) good_keys = filter(self.test_object_filters, self.cache.keys()) print 'Good keys: ' + str(good_keys) dataset = self.new_dataset() for key in good_keys: dataset[key] = self.cache[key] return dataset def test_object_filters(self, key): return 1 def filters_to_function(self, filters): """ Converts a list of filters into a Python function which tests an object to see if it matches the filters. Precompiling filter tests speeds up key filtering enormously. The generated function accepts a single parameter, "key". It retrieves the object with that key in the object cache and tests for a match. If the object matches all the filters, the generated function returns 1. Otherwise, it returns 0. """ code = WOStringIO() code.write('def test_cached_object(key):\n') code.write(' object = self.cache[key]\n') for filter in filters: attribute, operator, value = filter test_value = repr(value) code.write(' obj_value = object.%s\n' % (attribute)) if operator.upper() == 'LIKE': code.write(' if %s > len(%s): return 0\n' % (len(value), obj_value)) code.write(' return (%s <> obj_value.upper()[:%s])\n' % (test_value.upper(), len(value))) elif operator in ['<>', '<', '<=', '=', '>=', '>']: if operator == '=': operator = '==' code.write(' return (object.%s %s %s)\n' % (attribute, operator, repr(value))) else: raise UnknownOperator('Unrecognized operator: %s' % (operator)) return code.get_value() def filters_to_sql(self, filters): """ Converts a list of filters into the SQL statement which will retrieve matching records from the database. """ wheres = [] for filter in filters: attribute, operator, value = filter field = self.table.fields.find_attribute(attribute) if operator.upper() == 'LIKE': wheres.append('upper(' + field_name + ') LIKE ' + field.attr_to_field(value.upper() + '%')) else: wheres.append(field.field_name + operator + field.attr_to_field(value)) where = ' WHERE ' + string.join(wheres, ' AND ') return self.table.select + where def get_all(self): """ Returns a set of all objects managed by this data manager. If the data manager's cache proves sufficient to cache all objects, the cache will subsequently be considered preloaded, i.e., subsequent calls to get_by_keys() will be served directly from the cache, bypassing expensive database accesses. See the preload() function for more information on preloading. """ if self.cache.filled == 1: self.all_cached = 0 if self.all_cached == 0: #print 'Loading all of ' + self.table.name + ' into cache.' set = self.get_sql(self.table.select) if self.cache.filled == 0: self.all_cached = 1 return set return self.get_cached() def synch(self): """ Synchronize objects in the object cache with the database. Objects which have been deleted in the database are removed from the object cache. Objects which are out of synch with their database record have their attribute set to match the data in the database. """ #print 'Synchronizing ' + self.table.name + ' with database' last_synched = self.last_synched # Remember this, because we're about to overwrite it. self.last_synched = now_string() # Delete any newly deleted objects. sql = 'SELECT identifier FROM deleted WHERE table_name=' + wsq( self.table.name) + ' AND deleted >= ' + wsq(last_synched) cursor = db.select(sql) while (1): row = cursor.fetchone() if row == None: break # Load keys for the deleted object object = self.new_object() if len(self.table.key_list) == 1: field = self.table.fields[self.table.key_list[0]] value = field.field_to_attr(row[0]) setattr(object, field.attribute, value) else: values = row[0].split() for key in self.table.key_list: field = self.table.fields[key] value = field.field_to_attr(values[field.index]) setattr(object, field.attribute, value) object.key = self.table.get_key(object) #print 'Deleting from ' + self.table.name + ' cache: ' + str(value) self.cache.delete(object) # FIXME: Delete the object from all data sets which contain it! # Update any newly updated objects. sql = self.table.select + ' WHERE updated >= ' + wsq(last_synched) cursor = db.select(sql) while (1): row = cursor.fetchone() if row == None: break key = self.table.get_row_key(row) if self.cache.has_key(key): object = self.cache[key] self.table.load_row(object, row) #print 'Updating in ' + self.table.name + ' cache: ' + str(object.key) else: object = self.row_to_object(row) self.cache.add(object) #print 'Adding in ' + self.table.name + ' cache: ' + str(object.key) # FIXME: Add the object to all data sets whose filters it matches. def get_cached(self): """ Returns a dataset containing all objects in the object cache. """ #print 'Pulling ' + self.table.name + ' from cache.' dataset = self.new_dataset() for key in self.cache.keys(): dataset[key] = self.cache[key] return dataset def get_sql(self, sql): """ Accepts a SQL statement, instantiates the corresponding objects from the database, and stores those objects in the data cache if possible. """ #print 'Cache miss, loading: ' + self.table.name dataset = self.new_dataset() cursor = db.select(sql) while (1): row = cursor.fetchone() if row == None: break object = self.row_to_object(row) dataset[object.key] = object self.cache.add(object) return dataset def set_object_class(self, object_class): self.object_class = object_class def set_dataset_class(self, dataset_class): self.dataset_class = dataset_class def new_object(self): object = self.object_class(self.dms, self) for key in self.table.fields.keys(): field = self.table.fields[key] setattr(object, field.attribute, field.get_default()) object.changed = 0 object.in_database = 0 return object def new_dataset(self): return self.dataset_class(self) def row_to_object(self, row): object = self.new_object() self.table.load_row(object, row) return object def add(self, object): self.save(object) def save(self, object): object.key = self.table.get_key( object) # New objects need their key calculated. if object.changed == 0 and object.in_database == 0: return if object.in_database == 0: field_list = [] value_list = [] for key in self.table.field_list: field = self.table.fields[key] if field.data_type == 'created': # The database is responsible for setting the timestamp. continue if field.data_type == 'sequence': # When inserting, always increment the value. new_id = db.next_id(self.name, field.field_name) setattr(object, field.attribute, new_id) value = field.attr_to_field(getattr(object, field.attribute)) field_list.append(field.field_name) value_list.append(value) sql = 'INSERT INTO %s (%s) VALUES (%s)' % ( self.table.name, string.join( field_list, ', '), string.join(value_list, ', ')) else: update_list = [] where_list = [] for key in self.table.field_list: field = self.table.fields[key] if field.data_type == 'created': continue if field.data_type == 'updated': value = wsq(now_string()) else: value = field.attr_to_field( getattr(object, field.attribute)) update_list.append(field.field_name + '=' + value) if field.key_field == 1: where_list.append(field.field_name + '=' + value) sql = 'UPDATE %s SET %s WHERE %s' % ( self.table.name, string.join( update_list, ', '), string.join(where_list, ' AND ')) # print sql db.runsql(sql) db.commit() self.cache.add(object) object.in_database = 1 object.changed = 0 def delete(self, object): if object.in_database == 0: return self.cache.delete(object) wheres = [] for key in self.table.key_list: data_field = self.table.fields[key] value = data_field.attr_to_field( getattr(object, data_field.attribute)) wheres.append(data_field.field_name + '=' + value) where = ' WHERE ' + string.join(wheres, ' AND ') sql = 'DELETE FROM %s %s' % (self.table.name, where) db.runsql(sql) db.commit() sql = 'INSERT INTO deleted (table_name, identifier) VALUES (%s, %s)' % ( wsq(self.table.name), wsq(str(object.key))) db.runsql(sql) db.commit() def delete_by_keys(self, filters): dataset = self.get_by_keys(filters) for key in dataset.keys(): object = dataset[key] self.delete(object) def clear(self, dataset): for key in dataset.keys(): self.delete(dataset[key])
class Resolver: def __init__(self, parameter_store, stage_parameters, comparison_parameters): self.parameter_store = parameter_store self.stage_parameters = stage_parameters self.comparison_parameters = comparison_parameters self.sts = STS() self.cache = Cache() @staticmethod def _is_optional(value): return value.endswith('?') def fetch_stack_output(self, value, key, optional=False): # pylint: disable=too-many-statements partition = get_partition(DEFAULT_REGION) try: [_, account_id, region, stack_name, output_key] = str(value).split(':') except ValueError as error: raise ValueError( f"{value} is not a valid import string. Syntax should be " "import:account_id:region:stack_name:output_key") from error if Resolver._is_optional(output_key): LOGGER.info("Parameter %s is considered optional", output_key) optional = True output_key = output_key[:-1] if optional else output_key try: role = self.sts.assume_cross_account_role( f'arn:{partition}:iam::{account_id}:role/adf-readonly-automation-role', 'importer') cloudformation = CloudFormation( region=region, deployment_account_region=os.environ["AWS_REGION"], role=role, stack_name=stack_name, account_id=account_id) stack_output = self.cache.check( value) or cloudformation.get_stack_output(output_key) if stack_output: LOGGER.info("Stack output value is %s", stack_output) self.cache.add(value, stack_output) except ClientError: if not optional: raise stack_output = "" try: parent_key = list( Resolver.determine_parent_key(self.comparison_parameters, key))[0] if optional: self.stage_parameters[parent_key][key] = stack_output else: if not stack_output: raise Exception( f"No Stack Output found on {account_id} in {region} " f"with stack name {stack_name} and " f"output key {output_key}") self.stage_parameters[parent_key][key] = stack_output except IndexError as error: if stack_output: if self.stage_parameters.get(key): self.stage_parameters[key] = stack_output else: raise Exception( "Could not determine the structure of the file in order " "to import from CloudFormation", ) from error return True def upload(self, value, key, file_name): if not any(item in value for item in S3.supported_path_styles()): raise Exception( 'When uploading to S3 you need to specify a path style' 'to use for the returned value to be used. ' f'Supported path styles include: {S3.supported_path_styles()}' ) from None if str(value).count(':') > 2: [_, region, style, value] = value.split(':') else: [_, style, value] = value.split(':') region = DEFAULT_REGION bucket_name = self.parameter_store.fetch_parameter( f'/cross_region/s3_regional_bucket/{region}') client = S3(region, bucket_name) try: parent_key = list( Resolver.determine_parent_key(self.comparison_parameters, key))[0] except IndexError: if self.stage_parameters.get(key): self.stage_parameters[key] = client.put_object( f"adf-upload/{value}/{file_name}".format(value, file_name), str(value), style, True # pre-check ) return True self.stage_parameters[parent_key][key] = client.put_object( f"adf-upload/{value}/{file_name}", str(value), style, True # pre-check ) return True @staticmethod def determine_parent_key(d, target_key, parent_key=None): for key, value in d.items(): if key == target_key: yield parent_key if isinstance(value, dict): for result in Resolver.determine_parent_key( value, target_key, key): yield result def fetch_parameter_store_value(self, value, key, optional=False): # pylint: disable=too-many-statements if self._is_optional(value): LOGGER.info("Parameter %s is considered optional", value) optional = True if str(value).count(':') > 1: [_, region, value] = value.split(':') else: [_, value] = value.split(':') region = DEFAULT_REGION value = value[:-1] if optional else value client = ParameterStore(region, boto3) try: parameter = self.cache.check( f'{region}/{value}') or client.fetch_parameter(value) except ParameterNotFoundError: if optional: LOGGER.info("Parameter %s not found, returning empty string", value) parameter = "" else: raise try: parent_key = list( Resolver.determine_parent_key(self.comparison_parameters, key))[0] if parameter: self.cache.add(f'{region}/{value}', parameter) self.stage_parameters[parent_key][key] = parameter except IndexError as error: if parameter: if self.stage_parameters.get(key): self.stage_parameters[key] = parameter else: LOGGER.error( "Parameter was not found, unable to fetch it from parameter store" ) raise Exception( "Parameter was not found, unable to fetch it from parameter store" ) from error return True def update(self, key): for k, _ in self.comparison_parameters.items(): if not self.stage_parameters.get( k) and not self.stage_parameters.get(k, {}).get(key): self.stage_parameters[k] = self.comparison_parameters[k] if key not in self.stage_parameters[ k] and self.comparison_parameters.get(k, {}).get(key): self.stage_parameters[k][key] = self.comparison_parameters[k][ key]
class Player: def __init__(self): self.config = Config() self.ui = Ui() self.popen_handler = None # flag stop, prevent thread start self.playing_flag = False self.pause_flag = False self.process_length = 0 self.process_location = 0 self.process_first = False self.storage = Storage() self.info = self.storage.database["player_info"] self.songs = self.storage.database["songs"] self.playing_id = -1 self.cache = Cache() self.notifier = self.config.get_item("notifier") self.mpg123_parameters = self.config.get_item("mpg123_parameters") self.end_callback = None self.playing_song_changed_callback = None def popen_recall(self, onExit, popenArgs): """ Runs the given args in subprocess.Popen, and then calls the function onExit when the subprocess completes. onExit is a callable object, and popenArgs is a lists/tuple of args that would give to subprocess.Popen. """ def runInThread(onExit, arg): para = ['mpg123', '-R'] para[1:1] = self.mpg123_parameters self.popen_handler = subprocess.Popen(para, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) self.popen_handler.stdin.write("V " + str(self.info["playing_volume"]) + "\n") if arg: self.popen_handler.stdin.write("L " + arg + "\n") self.process_first = True while True: if self.playing_flag is False: break try: strout = self.popen_handler.stdout.readline() except IOError: break if re.match("^\@F.*$", strout): process_data = strout.split(" ") process_location = float(process_data[4]) if self.process_first: self.process_length = process_location self.process_first = False self.process_location = 0 else: self.process_location = self.process_length - process_location # NOQA continue elif strout[:2] == '@E': # get a alternative url from new api sid = popenArgs['song_id'] new_url = NetEase().songs_detail_new_api([sid])[0]['url'] if new_url is None: log.warning(('Song {} is unavailable ' 'due to copyright issue').format(sid)) break log.error( 'Song {} is not compatible with old api.'.format(sid)) self.popen_handler.stdin.write("\nL " + new_url + "\n") self.popen_handler.stdout.readline() elif strout == "@P 0\n": self.popen_handler.stdin.write("Q\n") self.popen_handler.kill() break if self.playing_flag: self.next_idx() onExit() return def getLyric(): if 'lyric' not in self.songs[str(self.playing_id)].keys(): self.songs[str(self.playing_id)]["lyric"] = [] if len(self.songs[str(self.playing_id)]["lyric"]) > 0: return netease = NetEase() lyric = netease.song_lyric(self.playing_id) if lyric == [] or lyric == '未找到歌词': return lyric = lyric.split('\n') self.songs[str(self.playing_id)]["lyric"] = lyric return def gettLyric(): if 'tlyric' not in self.songs[str(self.playing_id)].keys(): self.songs[str(self.playing_id)]["tlyric"] = [] if len(self.songs[str(self.playing_id)]["tlyric"]) > 0: return netease = NetEase() tlyric = netease.song_tlyric(self.playing_id) if tlyric == [] or tlyric == '未找到歌词翻译': return tlyric = tlyric.split('\n') self.songs[str(self.playing_id)]["tlyric"] = tlyric return def cacheSong(song_id, song_name, artist, song_url): def cacheExit(song_id, path): self.songs[str(song_id)]['cache'] = path self.cache.add(song_id, song_name, artist, song_url, cacheExit) self.cache.start_download() if 'cache' in popenArgs.keys() and os.path.isfile(popenArgs['cache']): thread = threading.Thread(target=runInThread, args=(onExit, popenArgs['cache'])) else: thread = threading.Thread(target=runInThread, args=(onExit, popenArgs['mp3_url'])) cache_thread = threading.Thread( target=cacheSong, args=(popenArgs['song_id'], popenArgs['song_name'], popenArgs['artist'], popenArgs['mp3_url'])) cache_thread.start() thread.start() lyric_download_thread = threading.Thread(target=getLyric, args=()) lyric_download_thread.start() tlyric_download_thread = threading.Thread(target=gettLyric, args=()) tlyric_download_thread.start() # returns immediately after the thread starts return thread def get_playing_id(self): return self.playing_id def recall(self): if self.info["idx"] >= len( self.info["player_list"]) and self.end_callback is not None: self.end_callback() if self.info["idx"] < 0 or self.info["idx"] >= len( self.info["player_list"]): self.info["idx"] = 0 self.stop() return self.playing_flag = True self.pause_flag = False item = self.songs[self.info["player_list"][self.info["idx"]]] self.ui.build_playinfo(item['song_name'], item['artist'], item['album_name'], item['quality'], time.time()) if self.notifier: self.ui.notify("Now playing", item['song_name'], item['album_name'], item['artist']) self.playing_id = item['song_id'] self.popen_recall(self.recall, item) def generate_shuffle_playing_list(self): del self.info["playing_list"][:] for i in range(0, len(self.info["player_list"])): self.info["playing_list"].append(i) random.shuffle(self.info["playing_list"]) self.info["ridx"] = 0 def new_player_list(self, type, title, datalist, offset): self.info["player_list_type"] = type self.info["player_list_title"] = title self.info["idx"] = offset del self.info["player_list"][:] del self.info["playing_list"][:] self.info["ridx"] = 0 for song in datalist: self.info["player_list"].append(str(song["song_id"])) if str(song["song_id"]) not in self.songs.keys(): self.songs[str(song["song_id"])] = song else: database_song = self.songs[str(song["song_id"])] if (database_song["song_name"] != song["song_name"] or database_song["quality"] != song["quality"]): self.songs[str(song["song_id"])] = song def append_songs(self, datalist): for song in datalist: self.info["player_list"].append(str(song["song_id"])) if str(song["song_id"]) not in self.songs.keys(): self.songs[str(song["song_id"])] = song else: database_song = self.songs[str(song["song_id"])] if database_song["song_name"] != song["song_name"] or \ database_song["quality"] != song["quality"] or \ database_song["mp3_url"] != song["mp3_url"]: if "cache" in self.songs[str(song["song_id"])].keys(): song["cache"] = self.songs[str( song["song_id"])]["cache"] self.songs[str(song["song_id"])] = song if len(datalist) > 0 and self.info["playing_mode"] == 3 or self.info[ "playing_mode"] == 4: self.generate_shuffle_playing_list() def play_and_pause(self, idx): # if same playlists && idx --> same song :: pause/resume it if self.info["idx"] == idx: if self.pause_flag: self.resume() else: self.pause() else: self.info["idx"] = idx # if it's playing if self.playing_flag: self.switch() # start new play else: self.recall() # play another def switch(self): self.stop() # wait process be killed time.sleep(0.1) self.recall() def stop(self): if self.playing_flag and self.popen_handler: self.playing_flag = False try: self.popen_handler.stdin.write("Q\n") except: pass try: self.popen_handler.kill() except: return def pause(self): if not self.playing_flag and not self.popen_handler: return self.pause_flag = True try: self.popen_handler.stdin.write("P\n") except: self.switch() item = self.songs[self.info["player_list"][self.info["idx"]]] self.ui.build_playinfo(item['song_name'], item['artist'], item['album_name'], item['quality'], time.time(), pause=True) def resume(self): self.pause_flag = False try: self.popen_handler.stdin.write("P\n") except: self.switch() item = self.songs[self.info["player_list"][self.info["idx"]]] self.ui.build_playinfo(item['song_name'], item['artist'], item['album_name'], item['quality'], time.time()) self.playing_id = item['song_id'] def next_idx(self): if self.info["idx"] < 0 or self.info["idx"] >= len( self.info["player_list"]): self.stop() return # Playing mode. 0 is ordered. 1 is orderde loop. # 2 is single song loop. 3 is single random. 4 is random loop if self.info["playing_mode"] == 0: self.info["idx"] += 1 elif self.info["playing_mode"] == 1: self.info["idx"] = (self.info["idx"] + 1) % len( self.info["player_list"]) elif self.info["playing_mode"] == 2: self.info["idx"] = self.info["idx"] elif self.info["playing_mode"] == 3: if self.info["ridx"] >= len(self.info["playing_list"]): self.generate_shuffle_playing_list() try: now_songs = self.info["playing_list"].index( self.info["idx"]) temp = self.info["playing_list"][0] self.info["playing_list"][0] = self.info["playing_list"][ now_songs] self.info["playing_list"][now_songs] = temp except: self.generate_shuffle_playing_list() elif self.info["playing_list"][ self.info["ridx"]] != self.info["idx"]: self.generate_shuffle_playing_list() try: now_songs = self.info["playing_list"].index( self.info["idx"]) temp = self.info["playing_list"][0] self.info["playing_list"][0] = self.info["playing_list"][ now_songs] self.info["playing_list"][now_songs] = temp except: self.generate_shuffle_playing_list() self.info["ridx"] += 1 if self.info["ridx"] >= len(self.info["playing_list"]): self.info["idx"] = len(self.info["playing_list"]) else: self.info["idx"] = self.info["playing_list"][self.info["ridx"]] elif self.info["playing_mode"] == 4: if self.info["ridx"] >= len(self.info["playing_list"]): self.generate_shuffle_playing_list() try: now_songs = self.info["playing_list"].index( self.info["idx"]) temp = self.info["playing_list"][0] self.info["playing_list"][0] = self.info["playing_list"][ now_songs] self.info["playing_list"][now_songs] = temp except: self.generate_shuffle_playing_list() elif self.info["playing_list"][ self.info["ridx"]] != self.info["idx"]: self.generate_shuffle_playing_list() try: now_songs = self.info["playing_list"].index( self.info["idx"]) temp = self.info["playing_list"][0] self.info["playing_list"][0] = self.info["playing_list"][ now_songs] self.info["playing_list"][now_songs] = temp except: self.generate_shuffle_playing_list() self.info["ridx"] = (self.info["ridx"] + 1) % len( self.info["player_list"]) self.info["idx"] = self.info["playing_list"][self.info["ridx"]] else: self.info["idx"] += 1 if self.playing_song_changed_callback is not None: self.playing_song_changed_callback() def next(self): self.stop() time.sleep(0.01) self.next_idx() self.recall() def prev_idx(self): if self.info["idx"] < 0 or self.info["idx"] >= len( self.info["player_list"]): self.stop() return # Playing mode. 0 is ordered. 1 is orderde loop. # 2 is single song loop. 3 is single random. 4 is random loop if self.info["playing_mode"] == 0: self.info["idx"] -= 1 elif self.info["playing_mode"] == 1: self.info["idx"] = (self.info["idx"] - 1) % len( self.info["player_list"]) elif self.info["playing_mode"] == 2: self.info["idx"] = self.info["idx"] elif self.info["playing_mode"] == 3: if self.info["ridx"] >= len(self.info["playing_list"]): self.generate_shuffle_playing_list() elif self.info["playing_list"][ self.info["ridx"]] != self.info["idx"]: self.generate_shuffle_playing_list() self.info["ridx"] -= 1 if self.info["ridx"] < 0: self.info["ridx"] = 0 return self.info["idx"] = self.info["playing_list"][self.info["ridx"]] elif self.info["playing_mode"] == 4: if self.info["ridx"] >= len(self.info["playing_list"]): self.generate_shuffle_playing_list() elif self.info["playing_list"][ self.info["ridx"]] != self.info["idx"]: self.generate_shuffle_playing_list() self.info["ridx"] = (self.info["ridx"] - 1) % len( self.info["player_list"]) self.info["idx"] = self.info["playing_list"][self.info["ridx"]] else: self.info["idx"] -= 1 if self.playing_song_changed_callback is not None: self.playing_song_changed_callback() def prev(self): self.stop() time.sleep(0.01) self.prev_idx() self.recall() def shuffle(self): self.stop() time.sleep(0.01) self.info["playing_mode"] = 3 self.generate_shuffle_playing_list() self.info["idx"] = self.info["playing_list"][self.info["ridx"]] self.recall() def volume_up(self): self.info["playing_volume"] = self.info["playing_volume"] + 7 if (self.info["playing_volume"] > 100): self.info["playing_volume"] = 100 if not self.playing_flag: return try: self.popen_handler.stdin.write("V " + str(self.info["playing_volume"]) + "\n") except: self.switch() def volume_down(self): self.info["playing_volume"] = self.info["playing_volume"] - 7 if (self.info["playing_volume"] < 0): self.info["playing_volume"] = 0 if not self.playing_flag: return try: self.popen_handler.stdin.write("V " + str(self.info["playing_volume"]) + "\n") except: self.switch() def update_size(self): try: self.ui.update_size() item = self.songs[self.info["player_list"][self.info["idx"]]] if self.playing_flag: self.ui.build_playinfo(item['song_name'], item['artist'], item['album_name'], item['quality'], time.time()) if self.pause_flag: self.ui.build_playinfo(item['song_name'], item['artist'], item['album_name'], item['quality'], time.time(), pause=True) except: pass def cacheSong1time(self, song_id, song_name, artist, song_url): def cacheExit(song_id, path): self.songs[str(song_id)]['cache'] = path self.cache.enable = False self.cache.enable = True self.cache.add(song_id, song_name, artist, song_url, cacheExit) self.cache.start_download()
def deleteTest(): c = Cache() c.add(1, "one") c.add('h', "letter h") c.add('12', "twelve") c.expire(1) if c.get(1) is not None: print("deletion of 1 failed") print(c.getCacheValues()) c.expireAll() y = c.getCacheValues() for entry in y: if entry: print('failed to delete all.') break else: print("Successfully deleted all entries.") #We cache some numbers, note that 0 will be the "least recently used" member #of the cache. for i in range(0, 10): c.add(i, i) #We remove the tail c.expire(0) #We remove the head c.expire(9) #We remove a number in the middle. c.expire(5) values = c.getCacheValues()[1] expectedValues = list(range(8, 0, -1)) if values != expectedValues: print('values and expected values dont match') print('cache ', values) print('expected ', expectedValues) reversedValues = [x.key for x in c.reverse_iterate()] # print('F---------R') # print(values) # print(reversedValues) # print('-----------') reversedValues.reverse() if values != reversedValues: print("Forward and reverse don't match") print('cache forward', values) print('cache reverse ', reversedValues) else: print("Tested forward and reverse values and they match.") #make sure we can add in new things still. c.add(1, "one again") c.add(11, "eleven") if c.get(1) is None or c.get(11) is None: print("adding new values after deletion failed") else: print("Successfully able to add new values after deletion.")
class Player: def __init__(self): self.config = Config() self.ui = Ui() self.popen_handler = None # flag stop, prevent thread start self.playing_flag = False self.pause_flag = False self.process_length = 0 self.process_location = 0 self.process_first = False self.storage = Storage() self.info = self.storage.database["player_info"] self.songs = self.storage.database["songs"] self.playing_id = -1 self.cache = Cache() self.mpg123_parameters = self.config.get_item("mpg123_parameters") def popen_recall(self, onExit, popenArgs): """ Runs the given args in a subprocess.Popen, and then calls the function onExit when the subprocess completes. onExit is a callable object, and popenArgs is a lists/tuple of args that would give to subprocess.Popen. """ def runInThread(onExit, popenArgs): para = ['mpg123', '-R'] para[1:1] = self.mpg123_parameters self.popen_handler = subprocess.Popen(para, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) self.popen_handler.stdin.write("V " + str(self.info["playing_volume"]) + "\n") self.popen_handler.stdin.write("L " + popenArgs + "\n") self.process_first = True while (True): if self.playing_flag == False: break try: strout = self.popen_handler.stdout.readline() except IOError: break if re.match("^\@F.*$", strout): process_data = strout.split(" ") process_location = float(process_data[4]) if self.process_first: self.process_length = process_location self.process_first = False self.process_location = 0 else: self.process_location = self.process_length - process_location continue if strout == "@P 0\n": self.popen_handler.stdin.write("Q\n") self.popen_handler.kill() break if self.playing_flag: self.next_idx() onExit() return def getLyric(): if 'lyric' not in self.songs[str(self.playing_id)].keys(): self.songs[str(self.playing_id)]["lyric"] = [] if len(self.songs[str(self.playing_id)]["lyric"]) > 0: return netease = NetEase() lyric = netease.song_lyric(self.playing_id) if (not lyric == []) or lyric == '未找到歌词': lyric = lyric.split('\n') self.songs[str(self.playing_id)]["lyric"] = lyric return def cacheSong(song_id, song_url): def cacheExit(song_id, path): self.songs[str(song_id)]['cache'] = path self.cache.add(song_id, song_url, cacheExit) self.cache.start_download() if 'cache' in popenArgs.keys() and os.path.isfile(popenArgs['cache']): thread = threading.Thread(target=runInThread, args=(onExit, popenArgs['cache'])) else: thread = threading.Thread(target=runInThread, args=(onExit, popenArgs['mp3_url'])) cache_thread = threading.Thread(target=cacheSong, args=(popenArgs['song_id'], popenArgs['mp3_url'])) cache_thread.start() thread.start() lyric_download_thread = threading.Thread(target=getLyric, args=()) lyric_download_thread.start() # returns immediately after the thread starts return thread def recall(self): if self.info["idx"] < 0 or self.info["idx"] >= len( self.info["player_list"]): self.stop() return self.playing_flag = True item = self.songs[self.info["player_list"][self.info["idx"]]] self.ui.build_playinfo(item['song_name'], item['artist'], item['album_name'], item['quality'], time.time()) self.playing_id = item['song_id'] self.popen_recall(self.recall, item) def generate_shuffle_playing_list(self): del self.info["playing_list"][:] for i in range(0, len(self.info["player_list"])): self.info["playing_list"].append(i) random.shuffle(self.info["playing_list"]) self.info["ridx"] = 0 def new_player_list(self, type, title, datalist, offset): self.info["player_list_type"] = type self.info["player_list_title"] = title self.info["idx"] = offset del self.info["player_list"][:] del self.info["playing_list"][:] self.info["ridx"] = 0 for song in datalist: self.info["player_list"].append(str(song["song_id"])) if str(song["song_id"]) not in self.songs.keys(): self.songs[str(song["song_id"])] = song else: database_song = self.songs[str(song["song_id"])] if (database_song["song_name"] != song["song_name"] or database_song["quality"] != song["quality"]): self.songs[str(song["song_id"])] = song def play_and_pause(self, idx): # if same playlists && idx --> same song :: pause/resume it if self.info["idx"] == idx: if self.pause_flag: self.resume() else: self.pause() else: self.info["idx"] = idx # if it's playing if self.playing_flag: self.switch() # start new play else: self.recall() # play another def switch(self): self.stop() # wait process be killed time.sleep(0.1) self.recall() def stop(self): if self.playing_flag and self.popen_handler: self.playing_flag = False try: self.popen_handler.stdin.write("Q\n") self.popen_handler.kill() except: return def pause(self): if not self.playing_flag and not self.popen_handler: return self.pause_flag = True os.kill(self.popen_handler.pid, signal.SIGSTOP) item = self.songs[self.info["player_list"][self.info["idx"]]] self.ui.build_playinfo(item['song_name'], item['artist'], item['album_name'], item['quality'], time.time(), pause=True) def resume(self): self.pause_flag = False os.kill(self.popen_handler.pid, signal.SIGCONT) item = self.songs[self.info["player_list"][self.info["idx"]]] self.ui.build_playinfo(item['song_name'], item['artist'], item['album_name'], item['quality'], time.time()) self.playing_id = item['song_id'] def next_idx(self): if self.info["idx"] < 0 or self.info["idx"] >= len( self.info["player_list"]): self.stop() return # Playing mode. 0 is ordered. 1 is orderde loop. 2 is single song loop. 3 is single random. 4 is random loop if self.info["playing_mode"] == 0: self.info["idx"] += 1 elif self.info["playing_mode"] == 1: self.info["idx"] = (self.info["idx"] + 1) % len( self.info["player_list"]) elif self.info["playing_mode"] == 2: self.info["idx"] = self.info["idx"] elif self.info["playing_mode"] == 3: if self.info["ridx"] >= len(self.info["playing_list"]): self.generate_shuffle_playing_list() try: now_songs = self.info["playing_list"].index( self.info["idx"]) temp = self.info["playing_list"][0] self.info["playing_list"][0] = self.info["playing_list"][ now_songs] self.info["playing_list"][now_songs] = temp except: self.generate_shuffle_playing_list() elif self.info["playing_list"][ self.info["ridx"]] != self.info["idx"]: self.generate_shuffle_playing_list() try: now_songs = self.info["playing_list"].index( self.info["idx"]) temp = self.info["playing_list"][0] self.info["playing_list"][0] = self.info["playing_list"][ now_songs] self.info["playing_list"][now_songs] = temp except: self.generate_shuffle_playing_list() self.info["ridx"] += 1 if self.info["ridx"] >= len(self.info["playing_list"]): self.stop() return self.info["idx"] = self.info["playing_list"][self.info["ridx"]] elif self.info["playing_mode"] == 4: if self.info["ridx"] >= len(self.info["playing_list"]): self.generate_shuffle_playing_list() try: now_songs = self.info["playing_list"].index( self.info["idx"]) temp = self.info["playing_list"][0] self.info["playing_list"][0] = self.info["playing_list"][ now_songs] self.info["playing_list"][now_songs] = temp except: self.generate_shuffle_playing_list() elif self.info["playing_list"][ self.info["ridx"]] != self.info["idx"]: self.generate_shuffle_playing_list() try: now_songs = self.info["playing_list"].index( self.info["idx"]) temp = self.info["playing_list"][0] self.info["playing_list"][0] = self.info["playing_list"][ now_songs] self.info["playing_list"][now_songs] = temp except: self.generate_shuffle_playing_list() self.info["ridx"] = (self.info["ridx"] + 1) % len( self.info["player_list"]) self.info["idx"] = self.info["playing_list"][self.info["ridx"]] else: self.info["idx"] += 1 def next(self): self.stop() time.sleep(0.01) self.next_idx() self.recall() def prev_idx(self): if self.info["idx"] < 0 or self.info["idx"] >= len( self.info["player_list"]): self.stop() return # Playing mode. 0 is ordered. 1 is orderde loop. 2 is single song loop. 3 is single random. 4 is random loop if self.info["playing_mode"] == 0: self.info["idx"] -= 1 elif self.info["playing_mode"] == 1: self.info["idx"] = (self.info["idx"] - 1) % len( self.info["player_list"]) elif self.info["playing_mode"] == 2: self.info["idx"] = self.info["idx"] elif self.info["playing_mode"] == 3: if self.info["ridx"] >= len(self.info["playing_list"]): self.generate_shuffle_playing_list() elif self.info["playing_list"][ self.info["ridx"]] != self.info["idx"]: self.generate_shuffle_playing_list() self.info["ridx"] -= 1 if self.info["ridx"] < 0: self.info["ridx"] = 0 return self.info["idx"] = self.info["playing_list"][self.info["ridx"]] elif self.info["playing_mode"] == 4: if self.info["ridx"] >= len(self.info["playing_list"]): self.generate_shuffle_playing_list() elif self.info["playing_list"][ self.info["ridx"]] != self.info["idx"]: self.generate_shuffle_playing_list() self.info["ridx"] = (self.info["ridx"] - 1) % len( self.info["player_list"]) self.info["idx"] = self.info["playing_list"][self.info["ridx"]] else: self.info["idx"] -= 1 def prev(self): self.stop() time.sleep(0.01) self.prev_idx() self.recall() def shuffle(self): self.stop() time.sleep(0.01) self.info["playing_mode"] = 3 self.generate_shuffle_playing_list() self.info["idx"] = self.info["playing_list"][self.info["ridx"]] self.recall() def volume_up(self): self.info["playing_volume"] = self.info["playing_volume"] + 7 if (self.info["playing_volume"] > 100): self.info["playing_volume"] = 100 if not self.playing_flag: return try: self.popen_handler.stdin.write("V " + str(self.info["playing_volume"]) + "\n") except: self.switch() def volume_down(self): self.info["playing_volume"] = self.info["playing_volume"] - 7 if (self.info["playing_volume"] < 0): self.info["playing_volume"] = 0 if not self.playing_flag: return try: self.popen_handler.stdin.write("V " + str(self.info["playing_volume"]) + "\n") except: self.switch() def update_size(self): try: self.ui.update_size() item = self.songs[self.info["player_list"][self.info["idx"]]] if self.playing_flag: self.ui.build_playinfo(item['song_name'], item['artist'], item['album_name'], item['quality'], time.time()) if self.pause_flag: self.ui.build_playinfo(item['song_name'], item['artist'], item['album_name'], item['quality'], time.time(), pause=True) except: pass
class Player: def __init__(self): self.config = Config() self.ui = Ui() self.popen_handler = None # flag stop, prevent thread start self.playing_flag = False self.pause_flag = False self.process_length = 0 self.process_location = 0 self.process_first = False self.storage = Storage() self.info = self.storage.database["player_info"] self.songs = self.storage.database["songs"] self.playing_id = -1 self.cache = Cache() self.mpg123_parameters = self.config.get_item("mpg123_parameters") self.end_callback = None def popen_recall(self, onExit, popenArgs): """ Runs the given args in a subprocess.Popen, and then calls the function onExit when the subprocess completes. onExit is a callable object, and popenArgs is a lists/tuple of args that would give to subprocess.Popen. """ def runInThread(onExit, popenArgs): para = ['mpg123', '-R'] para[1:1] = self.mpg123_parameters self.popen_handler = subprocess.Popen(para, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) self.popen_handler.stdin.write("V " + str(self.info["playing_volume"]) + "\n") self.popen_handler.stdin.write("L " + popenArgs + "\n") self.process_first = True while (True): if self.playing_flag == False: break try: strout = self.popen_handler.stdout.readline() except IOError: break if re.match("^\@F.*$", strout): process_data = strout.split(" ") process_location = float(process_data[4]) if self.process_first: self.process_length = process_location self.process_first = False self.process_location = 0 else: self.process_location = self.process_length - process_location continue if strout == "@P 0\n": self.popen_handler.stdin.write("Q\n") self.popen_handler.kill() break if self.playing_flag: self.next_idx() onExit() return def getLyric(): if 'lyric' not in self.songs[str(self.playing_id)].keys(): self.songs[str(self.playing_id)]["lyric"] = [] if len(self.songs[str(self.playing_id)]["lyric"]) > 0: return netease = NetEase() lyric = netease.song_lyric(self.playing_id) if (not lyric == []) or lyric == '未找到歌词': lyric = lyric.split('\n') self.songs[str(self.playing_id)]["lyric"] = lyric return def cacheSong(song_id, song_name, artist, song_url): def cacheExit(song_id, path): self.songs[str(song_id)]['cache'] = path self.cache.add(song_id, song_name, artist, song_url, cacheExit) self.cache.start_download() if 'cache' in popenArgs.keys() and os.path.isfile(popenArgs['cache']): thread = threading.Thread(target=runInThread, args=(onExit, popenArgs['cache'])) else: thread = threading.Thread(target=runInThread, args=(onExit, popenArgs['mp3_url'])) cache_thread = threading.Thread(target=cacheSong, args=( popenArgs['song_id'], popenArgs['song_name'], popenArgs['artist'], popenArgs['mp3_url'])) cache_thread.start() thread.start() lyric_download_thread = threading.Thread(target=getLyric, args=()) lyric_download_thread.start() # returns immediately after the thread starts return thread def get_playing_id(self): return self.playing_id def recall(self): if self.info["idx"] >= len(self.info["player_list"]) and self.end_callback != None: self.end_callback() if self.info["idx"] < 0 or self.info["idx"] >= len(self.info["player_list"]): self.info["idx"] = 0 self.stop() return self.playing_flag = True self.pause_flag = False item = self.songs[self.info["player_list"][self.info["idx"]]] self.ui.build_playinfo(item['song_name'], item['artist'], item['album_name'], item['quality'], time.time()) self.playing_id = item['song_id'] self.popen_recall(self.recall, item) def generate_shuffle_playing_list(self): del self.info["playing_list"][:] for i in range(0, len(self.info["player_list"])): self.info["playing_list"].append(i) random.shuffle(self.info["playing_list"]) self.info["ridx"] = 0 def new_player_list(self, type, title, datalist, offset): self.info["player_list_type"] = type self.info["player_list_title"] = title self.info["idx"] = offset del self.info["player_list"][:] del self.info["playing_list"][:] self.info["ridx"] = 0 for song in datalist: self.info["player_list"].append(str(song["song_id"])) if str(song["song_id"]) not in self.songs.keys(): self.songs[str(song["song_id"])] = song else: database_song = self.songs[str(song["song_id"])] if (database_song["song_name"] != song["song_name"] or database_song["quality"] != song["quality"]): self.songs[str(song["song_id"])] = song def append_songs(self, datalist): for song in datalist: self.info["player_list"].append(str(song["song_id"])) if str(song["song_id"]) not in self.songs.keys(): self.songs[str(song["song_id"])] = song else: database_song = self.songs[str(song["song_id"])] if (database_song["song_name"] != song["song_name"] or database_song["quality"] != song["quality"]): self.songs[str(song["song_id"])] = song if len(datalist) > 0 and self.info["playing_mode"] == 3 or self.info["playing_mode"] == 4: self.generate_shuffle_playing_list() def play_and_pause(self, idx): # if same playlists && idx --> same song :: pause/resume it if self.info["idx"] == idx: if self.pause_flag: self.resume() else: self.pause() else: self.info["idx"] = idx # if it's playing if self.playing_flag: self.switch() # start new play else: self.recall() # play another def switch(self): self.stop() # wait process be killed time.sleep(0.1) self.recall() def stop(self): if self.playing_flag and self.popen_handler: self.playing_flag = False try: self.popen_handler.stdin.write("Q\n") except: pass try: self.popen_handler.kill() except: return def pause(self): if not self.playing_flag and not self.popen_handler: return self.pause_flag = True os.kill(self.popen_handler.pid, signal.SIGSTOP) item = self.songs[self.info["player_list"][self.info["idx"]]] self.ui.build_playinfo(item['song_name'], item['artist'], item['album_name'], item['quality'], time.time(), pause=True) def resume(self): self.pause_flag = False os.kill(self.popen_handler.pid, signal.SIGCONT) item = self.songs[self.info["player_list"][self.info["idx"]]] self.ui.build_playinfo(item['song_name'], item['artist'], item['album_name'], item['quality'], time.time()) self.playing_id = item['song_id'] def next_idx(self): if self.info["idx"] < 0 or self.info["idx"] >= len(self.info["player_list"]): self.stop() return # Playing mode. 0 is ordered. 1 is orderde loop. 2 is single song loop. 3 is single random. 4 is random loop if self.info["playing_mode"] == 0: self.info["idx"] += 1 elif self.info["playing_mode"] == 1: self.info["idx"] = (self.info["idx"] + 1) % len(self.info["player_list"]) elif self.info["playing_mode"] == 2: self.info["idx"] = self.info["idx"] elif self.info["playing_mode"] == 3: if self.info["ridx"] >= len(self.info["playing_list"]): self.generate_shuffle_playing_list() try: now_songs = self.info["playing_list"].index(self.info["idx"]) temp = self.info["playing_list"][0] self.info["playing_list"][0] = self.info["playing_list"][now_songs] self.info["playing_list"][now_songs] = temp except: self.generate_shuffle_playing_list() elif self.info["playing_list"][self.info["ridx"]] != self.info["idx"]: self.generate_shuffle_playing_list() try: now_songs = self.info["playing_list"].index(self.info["idx"]) temp = self.info["playing_list"][0] self.info["playing_list"][0] = self.info["playing_list"][now_songs] self.info["playing_list"][now_songs] = temp except: self.generate_shuffle_playing_list() self.info["ridx"] += 1 if self.info["ridx"] >= len(self.info["playing_list"]): self.info["idx"] = len(self.info["playing_list"]) else: self.info["idx"] = self.info["playing_list"][self.info["ridx"]] elif self.info["playing_mode"] == 4: if self.info["ridx"] >= len(self.info["playing_list"]): self.generate_shuffle_playing_list() try: now_songs = self.info["playing_list"].index(self.info["idx"]) temp = self.info["playing_list"][0] self.info["playing_list"][0] = self.info["playing_list"][now_songs] self.info["playing_list"][now_songs] = temp except: self.generate_shuffle_playing_list() elif self.info["playing_list"][self.info["ridx"]] != self.info["idx"]: self.generate_shuffle_playing_list() try: now_songs = self.info["playing_list"].index(self.info["idx"]) temp = self.info["playing_list"][0] self.info["playing_list"][0] = self.info["playing_list"][now_songs] self.info["playing_list"][now_songs] = temp except: self.generate_shuffle_playing_list() self.info["ridx"] = (self.info["ridx"] + 1) % len(self.info["player_list"]) self.info["idx"] = self.info["playing_list"][self.info["ridx"]] else: self.info["idx"] += 1 def next(self): self.stop() time.sleep(0.01) self.next_idx() self.recall() def prev_idx(self): if self.info["idx"] < 0 or self.info["idx"] >= len(self.info["player_list"]): self.stop() return # Playing mode. 0 is ordered. 1 is orderde loop. 2 is single song loop. 3 is single random. 4 is random loop if self.info["playing_mode"] == 0: self.info["idx"] -= 1 elif self.info["playing_mode"] == 1: self.info["idx"] = (self.info["idx"] - 1) % len(self.info["player_list"]) elif self.info["playing_mode"] == 2: self.info["idx"] = self.info["idx"] elif self.info["playing_mode"] == 3: if self.info["ridx"] >= len(self.info["playing_list"]): self.generate_shuffle_playing_list() elif self.info["playing_list"][self.info["ridx"]] != self.info["idx"]: self.generate_shuffle_playing_list() self.info["ridx"] -= 1 if self.info["ridx"] < 0: self.info["ridx"] = 0 return self.info["idx"] = self.info["playing_list"][self.info["ridx"]] elif self.info["playing_mode"] == 4: if self.info["ridx"] >= len(self.info["playing_list"]): self.generate_shuffle_playing_list() elif self.info["playing_list"][self.info["ridx"]] != self.info["idx"]: self.generate_shuffle_playing_list() self.info["ridx"] = (self.info["ridx"] - 1) % len(self.info["player_list"]) self.info["idx"] = self.info["playing_list"][self.info["ridx"]] else: self.info["idx"] -= 1 def prev(self): self.stop() time.sleep(0.01) self.prev_idx() self.recall() def shuffle(self): self.stop() time.sleep(0.01) self.info["playing_mode"] = 3 self.generate_shuffle_playing_list() self.info["idx"] = self.info["playing_list"][self.info["ridx"]] self.recall() def volume_up(self): self.info["playing_volume"] = self.info["playing_volume"] + 7 if (self.info["playing_volume"] > 100): self.info["playing_volume"] = 100 if not self.playing_flag: return try: self.popen_handler.stdin.write("V " + str(self.info["playing_volume"]) + "\n") except: self.switch() def volume_down(self): self.info["playing_volume"] = self.info["playing_volume"] - 7 if (self.info["playing_volume"] < 0): self.info["playing_volume"] = 0 if not self.playing_flag: return try: self.popen_handler.stdin.write("V " + str(self.info["playing_volume"]) + "\n") except: self.switch() def update_size(self): try: self.ui.update_size() item = self.songs[self.info["player_list"][self.info["idx"]]] if self.playing_flag: self.ui.build_playinfo(item['song_name'], item['artist'], item['album_name'], item['quality'], time.time()) if self.pause_flag: self.ui.build_playinfo(item['song_name'], item['artist'], item['album_name'], item['quality'], time.time(), pause=True) except: pass
class Search_APIs(object): def __init__(self, data_dir, fetcher): google_api_key = "" if not google_api_key: print "Error! google_api_key is missing" sys.exit(1) google_cse_id = "" # Google custome search engine id if not google_cse_id: print "Error! google_cse_id is missing" sys.exit(1) self.google = Google_Search(google_api_key, google_cse_id) self.google_delay = 1 # 5QPS limit: https://developers.google.com/webmaster-tools/search-console-api-original/v3/limits bing_api_key = "" if not bing_api_key: print "Error! bing_api_key is missing" sys.exit(1) self.bing = Bing_Search(bing_api_key) self.bing_delay = 1 # Setting cache for related search related_cache_file = data_dir + "/related_search.json" self.related_cache = Cache(related_cache_file) print "Loaded ", self.related_cache.length( ), " queries from related search cache" # Setting cache for backlink search access_id = "" if not access_id: print "Error! access_id is missing" sys.exit(1) secret_key = "" if not secret_key: print "Error! secret_key is missing" sys.exit(1) self.moz = Moz_Search(access_id, secret_key) backlink_cache_file = data_dir + "/backlink_search.json" self.backlink_cache = Cache(backlink_cache_file) print "Loaded ", self.backlink_cache.length( ), " queries from backlink search cache" self.moz_delay = 1 # Setting cache for keyword search keyword_cache_file = data_dir + "/keyword_search.json" self.keyword_cache = Cache(keyword_cache_file) print "Loaded ", self.keyword_cache.length( ), " queries from keyword search cache" # Setting cache for forward search #self.fetcher = Fetcher(data_dir, "/forward_search.json") self.fetcher = fetcher self.link_extractor = Link_Extractor() self.k = 10 # Number of keywords selected in each extraction self.max_urls = 10 # maximum number of urls to extract from each pages self.keywords = set() # Keywords extracted from relevant sites def set_max_keywords(self, max_kw): self.k = max_kw def _extract_keywords(self, sites, k=10): """ Extract top k most frequent keywords. Skip ones that were selected. """ stop = stopwords.words('english') counter = Counter() for site in sites: for p in site: text = p.get_text('meta') text = URLUtility.clean_text(text) words = nltk.word_tokenize(text) words = [ word for word in words if word not in stop and len(word) > 2 ] bigram_words = [ words[i] + ' ' + words[i + 1] for i in xrange(len(words) - 1) ] counter += Counter(words + bigram_words) # Get the topk words """ counter = [(counter[w], w) for w in counter if counter[w]>1] # convert to array heapq.heapify(counter) topk = heapq.nlargest(k, counter) return [w[1] for w in topk] """ top_words = counter.most_common(k + len(self.keywords)) result = [] # list of keywords to return i = 0 while len(result) < k and i < len(top_words): if top_words[i][0] not in self.keywords: result.append(top_words[i][0]) self.keywords.add(top_words[i][0]) i += 1 print " List of selected keywords: ", result return result def search(self, sites, searchop, seed_keyword="", max_results=50): """ Args: max_results: Maximum number of results to return in Bing/Google search search: str - potential values: 'rl', 'kw', 'fw', 'bl' """ #sites = self.fetcher.fetch_sites(urls) results = set() if searchop == 'rl': for w in sites: print " Running related search..." urls = self.search_related(w.get_host(), max_results) results.update(urls) elif searchop == 'bl': """ for w in sites: print " Search backlinks..." urls = self.search_backward_forward(w.get_host()) results.update(urls) """ urls = self.search_backward_forward_batch(sites) results.update(urls) elif searchop == 'fw': #urls = [w.get_url() for w in sites] print " Forward search...", len(sites), " urls" urls = self.search_forward_sites(sites) results.update(urls) # Run keyword search elif searchop == 'kw': print " Searching by keyword" keywords = self._extract_keywords(sites, self.k) for keyword in keywords: if seed_keyword: keyword = seed_keyword + ' ' + keyword urls = self.search_keywords(keyword, max_results, se='bing') results.update(urls) print " Found ", len(results), " urls" return results def search_backward_forward(self, url): """ Search related pages using backlink search and forward search Returns: - list of urls (potentially duplicated) """ t = time.time() backlinks = self.search_backward(url) print "Backlink search time: ", time.time() - t t = time.time() fwlinks = self.search_forward(backlinks) print "Forward search time: ", time.time() - t return backlinks + fwlinks def search_backward_forward_batch(self, sites): """ Search related pages using backlink search and forward search Parameters: - sites: list of Website objects Returns: - list of urls (potentially duplicated) """ t = time.time() backlinks = set() for site in sites: backlinks.update(self.search_backward(site.get_host())) backlinks = list(backlinks) print "Backlink search time: ", time.time() - t t = time.time() fwlinks = self.search_forward(backlinks) print "Forward search time: ", time.time() - t return backlinks + fwlinks def search_backward(self, url): """ Search backlinks using MOZ APIs Returns: - list of urls """ if self.backlink_cache.contains(url): results = self.backlink_cache.get(url) print "hit backlink query: ", url else: #time.sleep(self.moz_delay) results = self.moz.search_backlinks(url) self.backlink_cache.add(url, results) print "Backlink Search - Query: ", url, " - Number of results: ", len( results) return results def search_keywords(self, keyword, max_results, se='google'): """ Search relevant pages by keyword using Google Args: max_results: maximum number of results to return """ urls = [] if self.keyword_cache.contains(keyword): urls = self.keyword_cache.get(keyword) print "hit keyword query: ", keyword else: if se == 'google': time.sleep(self.google_delay) urls = self.google.search(keyword, max_results) else: # default: 'bing' time.sleep(self.bing_delay) urls = self.bing.search(keyword, max_results) self.keyword_cache.add(keyword, urls) """ if 'items' in results: for item in results['items']: urls.append(url_normalize(item['link'])) """ print "Keyword Search - Query: ", keyword, " - Number of results: ", len( urls) return urls def search_forward_sites(self, sites, insite=False): """ Fetch the pages and extract external links. Args - sites: list of Website objects - insite: False if extracting links outside the host. """ outlinks = set() for site in sites: for page in site: if insite: links = self.link_extractor.extract_insite_links( page.get_url(), page.get_html()) else: links = self.link_extractor.extract_external_links( page.get_url(), page.get_html()) links = self.select_subset(links) outlinks.update(links) print "Forward Search ", " - Number of results: ", len(outlinks) return list(outlinks) def search_forward(self, urls, insite=False): """ Fetch the pages and extract external links Args - urls: list of urls - insite: False if extracting links outside the host. """ sites = self.fetcher.fetch_sites(urls, allow_fetch_later=True) outlinks = set() for site in sites: for page in site: if insite: links = self.link_extractor.extract_insite_links( page.get_url(), page.get_html()) else: links = self.link_extractor.extract_external_links( page.get_url(), page.get_html()) links = self.select_subset(links) outlinks.update(links) print "Forward Search ", " - Number of results: ", len(outlinks) return list(outlinks) def select_subset(self, urls): """ Each page might contain thousand of external urls which pollute the results, so we only keep a fixed number of links from each page How this works: - Pick one url in each site - If not yet reaching max, select random urls Returns: - list of urls """ if len(urls) <= self.max_urls: return urls results = [] """ cur = urls while len(results)<self.max_urls: sites = set() next = [] for url in cur: site = URLUtility.get_host(url) if site not in sites: sites.add(site) results.append(url) else: next.append(url) if len(results) == self.max_urls: break cur = next """ sites = set() for url in urls: site = URLUtility.get_host(url) if site not in sites: sites.add(site) results.append(url) if len(results) == self.max_urls: break return results def search_related(self, url, k): """ Return list of related urls using Google related search """ query = "related:" + url urls = [] if self.related_cache.contains(query): urls = self.related_cache.get(query) print "hit related query: ", query else: time.sleep(self.google_delay) urls = self.google.search(query, k) self.related_cache.add(query, urls) """ urls = [] if 'items' in results: for item in results['items']: urls.append(url_normalize(item['link'])) """ print "Related Search - Query: ", url, " - Number of results: ", len( urls) return urls
class Player(object): MODE_ORDERED = 0 MODE_ORDERED_LOOP = 1 MODE_SINGLE_LOOP = 2 MODE_RANDOM = 3 MODE_RANDOM_LOOP = 4 def __init__(self): self.config = Config() # self.ui = Ui() self.popen_handler = None # flag stop, prevent thread start self.playing_flag = False self.refrese_url_flag = False self.process_length = 0 self.process_location = 0 self.storage = Storage() self.cache = Cache() self.end_callback = None self.playing_song_changed_callback = None self.api = NetEase() @property def info(self): return self.storage.database["player_info"] @property def songs(self): return self.storage.database["songs"] @property def index(self): return self.info["idx"] @property def list(self): return self.info["player_list"] @property def order(self): return self.info["playing_order"] @property def mode(self): return self.info["playing_mode"] @property def is_ordered_mode(self): return self.mode == Player.MODE_ORDERED @property def is_ordered_loop_mode(self): return self.mode == Player.MODE_ORDERED_LOOP @property def is_single_loop_mode(self): return self.mode == Player.MODE_SINGLE_LOOP @property def is_random_mode(self): return self.mode == Player.MODE_RANDOM @property def is_random_loop_mode(self): return self.mode == Player.MODE_RANDOM_LOOP @property def config_notifier(self): return self.config.get("notifier") @property def config_mpg123(self): return self.config.get("mpg123_parameters") @property def current_song(self): if not self.songs: return {} if not self.is_index_valid: return {} song_id = self.list[self.index] return self.songs.get(song_id, {}) @property def playing_id(self): return self.current_song["song_id"] @property def playing_name(self): return self.current_song["song_name"] @property def is_empty(self): return len(self.list) == 0 @property def is_index_valid(self): return 0 <= self.index < len(self.list) def notify_playing(self): if not self.current_song: return if not self.config_notifier: return song = self.current_song notify("正在播放: {}\n{}-{}".format(song["song_name"], song["artist"], song["album_name"])) def notify_copyright_issue(self): log.warning("Song {} is unavailable due to copyright issue.".format( self.playing_id)) notify("版权限制,无法播放此歌曲") def change_mode(self, step=1): self.info["playing_mode"] = (self.info["playing_mode"] + step) % 5 def build_playinfo(self): if not self.current_song: return # self.ui.build_playinfo( # self.current_song["song_name"], # self.current_song["artist"], # self.current_song["album_name"], # self.current_song["quality"], # time.time(), # pause=not self.playing_flag, # ) def add_songs(self, songs): for song in songs: song_id = str(song["song_id"]) self.info["player_list"].append(song_id) if song_id in self.songs: self.songs[song_id].update(song) else: self.songs[song_id] = song def refresh_urls(self): songs = self.api.dig_info(self.list, "refresh_urls") if songs: for song in songs: song_id = str(song["song_id"]) if song_id in self.songs: self.songs[song_id]["mp3_url"] = song["mp3_url"] self.songs[song_id]["expires"] = song["expires"] self.songs[song_id]["get_time"] = song["get_time"] else: self.songs[song_id] = song self.refrese_url_flag = True def stop(self): if not self.popen_handler: return self.playing_flag = False self.popen_handler.stdin.write(b"Q\n") self.popen_handler.stdin.flush() self.popen_handler.kill() self.popen_handler = None # wait process to be killed time.sleep(0.01) def tune_volume(self, up=0): if not self.popen_handler: return new_volume = self.info["playing_volume"] + up if new_volume > 100: new_volume = 100 elif new_volume < 0: new_volume = 0 self.info["playing_volume"] = new_volume self.popen_handler.stdin.write("V {}\n".format( self.info["playing_volume"]).encode()) self.popen_handler.stdin.flush() def switch(self): if not self.popen_handler: return self.playing_flag = not self.playing_flag self.popen_handler.stdin.write(b"P\n") self.popen_handler.stdin.flush() self.build_playinfo() def run_mpg123(self, on_exit, url, expires=-1, get_time=-1): para = ["mpg123", "-R"] + self.config_mpg123 self.popen_handler = subprocess.Popen(para, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) self.tune_volume() self.popen_handler.stdin.write(b"L " + url.encode("utf-8") + b"\n") self.popen_handler.stdin.flush() endless_loop_cnt = 0 while True: if not self.popen_handler: break strout = self.popen_handler.stdout.readline().decode( "utf-8").strip() if strout[:2] == "@F": # playing, update progress out = strout.split(" ") self.process_location = int(float(out[3])) self.process_length = int(float(out[3]) + float(out[4])) elif strout[:2] == "@E": self.playing_flag = True if (expires >= 0 and get_time >= 0 and time.time() - expires - get_time >= 0): # 刷新URL self.refresh_urls() else: # error, stop song and move to next self.notify_copyright_issue() break elif strout == "@P 0": # end, moving to next self.playing_flag = True break elif strout == "": endless_loop_cnt += 1 # 有播放后没有退出,mpg123一直在发送空消息的情况,此处直接终止处理 if endless_loop_cnt > 100: log.warning( "mpg123 error, halt, endless loop and high cpu use, then we kill it" ) break if self.playing_flag: if self.refrese_url_flag: self.stop() self.replay() self.refrese_url_flag = False else: self.next() else: self.stop() def download_lyric(self, is_transalted=False): key = "lyric" if not is_transalted else "tlyric" if key not in self.songs[str(self.playing_id)]: self.songs[str(self.playing_id)][key] = [] if len(self.songs[str(self.playing_id)][key]) > 0: return if not is_transalted: lyric = self.api.song_lyric(self.playing_id) else: lyric = self.api.song_tlyric(self.playing_id) self.songs[str(self.playing_id)][key] = lyric def download_song(self, song_id, song_name, artist, url): def write_path(song_id, path): self.songs[str(song_id)]["cache"] = path self.cache.add(song_id, song_name, artist, url, write_path) self.cache.start_download() def start_playing(self, on_exit, args): """ Runs the given args in subprocess.Popen, and then calls the function on_exit when the subprocess completes. on_exit is a callable object, and args is a lists/tuple of args that would give to subprocess.Popen. """ # log.debug("%s,%s,%s" % (args['song_id'], args['song_name'], args['mp3_url'])) if "cache" in args.keys() and os.path.isfile(args["cache"]): thread = threading.Thread(target=self.run_mpg123, args=(on_exit, args["cache"])) else: new_url = NetEase().songs_url([args["song_id"]])[0]["url"] #使用新地址 if not new_url: #如果没有获得新地址 new_url = args["mp3_url"] #使用老地址传给mpg123 thread = threading.Thread( target=self.run_mpg123, args=(on_exit, new_url, args["expires"], args["get_time"]), ) cache_thread = threading.Thread( target=self.download_song, args=( args["song_id"], args["song_name"], args["artist"], args["mp3_url"], ), ) cache_thread.start() thread.start() lyric_download_thread = threading.Thread(target=self.download_lyric) lyric_download_thread.start() tlyric_download_thread = threading.Thread(target=self.download_lyric, args=(True, )) tlyric_download_thread.start() # returns immediately after the thread starts return thread def replay(self): if not self.is_index_valid: self.stop() if self.end_callback: log.debug("Callback") self.end_callback() return if not self.current_song: return self.stop() self.playing_flag = True self.build_playinfo() self.notify_playing() self.start_playing(lambda: 0, self.current_song) def shuffle_order(self): del self.order[:] self.order.extend(list(range(0, len(self.list)))) random.shuffle(self.order) self.info["random_index"] = 0 def new_player_list(self, type, title, datalist, offset): self.info["player_list_type"] = type self.info["player_list_title"] = title # self.info['idx'] = offset self.info["player_list"] = [] self.info["playing_order"] = [] self.info["random_index"] = 0 self.songs.clear() self.add_songs(datalist) def append_songs(self, datalist): self.add_songs(datalist) # switch_flag为true表示: # 在播放列表中 || 当前所在列表类型不在"songs"、"djchannels"、"fmsongs"中 def play_or_pause(self, idx, switch_flag): if self.is_empty: return # print('flag:',switch_flag) # if same "list index" and "playing index" --> same song :: pause/resume it if self.index == idx and switch_flag: if not self.popen_handler: # print('aaaaaa') self.stop() self.replay() else: # print('bbbbbb') self.switch() else: # print('cccccccc') self.info["idx"] = idx self.stop() self.replay() def _swap_song(self): now_songs = self.order.index(self.index) self.order[0], self.order[now_songs] = self.order[ now_songs], self.order[0] def _need_to_shuffle(self): playing_order = self.order random_index = self.info["random_index"] if (random_index >= len(playing_order) or playing_order[random_index] != self.index): return True else: return False def next_idx(self): if not self.is_index_valid: return self.stop() playlist_len = len(self.list) if self.mode == Player.MODE_ORDERED: # make sure self.index will not over if self.info["idx"] < playlist_len: self.info["idx"] += 1 elif self.mode == Player.MODE_ORDERED_LOOP: self.info["idx"] = (self.index + 1) % playlist_len elif self.mode == Player.MODE_SINGLE_LOOP: self.info["idx"] = self.info["idx"] else: playing_order_len = len(self.order) if self._need_to_shuffle(): self.shuffle_order() # When you regenerate playing list # you should keep previous song same. self._swap_song() playing_order_len = len(self.order) self.info["random_index"] += 1 # Out of border if self.mode == Player.MODE_RANDOM_LOOP: self.info["random_index"] %= playing_order_len # Random but not loop, out of border, stop playing. if self.info["random_index"] >= playing_order_len: self.info["idx"] = playlist_len else: self.info["idx"] = self.order[self.info["random_index"]] if self.playing_song_changed_callback is not None: self.playing_song_changed_callback() def next(self): self.stop() self.next_idx() self.replay() def prev_idx(self): if not self.is_index_valid: self.stop() return playlist_len = len(self.list) if self.mode == Player.MODE_ORDERED: if self.info["idx"] > 0: self.info["idx"] -= 1 elif self.mode == Player.MODE_ORDERED_LOOP: self.info["idx"] = (self.info["idx"] - 1) % playlist_len elif self.mode == Player.MODE_SINGLE_LOOP: self.info["idx"] = self.info["idx"] else: playing_order_len = len(self.order) if self._need_to_shuffle(): self.shuffle_order() playing_order_len = len(self.order) self.info["random_index"] -= 1 if self.info["random_index"] < 0: if self.mode == Player.MODE_RANDOM: self.info["random_index"] = 0 else: self.info["random_index"] %= playing_order_len self.info["idx"] = self.order[self.info["random_index"]] if self.playing_song_changed_callback is not None: self.playing_song_changed_callback() def prev(self): self.stop() self.prev_idx() self.replay() def shuffle(self): self.stop() self.info["playing_mode"] = Player.MODE_RANDOM self.shuffle_order() self.info["idx"] = self.info["playing_order"][ self.info["random_index"]] self.replay() def volume_up(self): self.tune_volume(5) def volume_down(self): self.tune_volume(-5) def update_size(self): self.ui.update_size() self.build_playinfo() def cache_song(self, song_id, song_name, artist, song_url): def on_exit(song_id, path): self.songs[str(song_id)]["cache"] = path self.cache.enable = False self.cache.enable = True self.cache.add(song_id, song_name, artist, song_url, on_exit) self.cache.start_download()
class Bing_Search(object): def __init__(self, api_key, data_dir=None): self.cache = None if data_dir: cache_file = data_dir + "/bing.json" self.cache = Cache(cache_file) self.stopext = set([".pdf", ".doc", ".xls"]) self.headers = {'Ocp-Apim-Subscription-Key': api_key} def is_valid(self, url): if len(url)<4 or url[-4:] in self.stopext: return False return True def search(self, query_term, count=10): """ Reference: https://docs.microsoft.com/en-us/rest/api/cognitiveservices/bing-web-api-v5-reference#query-parameters Args: count: The number of search results to return in the response. If count is greater than 50, paging will be used to fetch the results since maximum results of each query is 50 """ if self.cache and self.cache.contains(query_term): urls = self.cache.get(query_term) return [url for url in urls if self.is_valid(url)] urls = [] offset = 0 while count>0: params = urllib.urlencode({ # Request parameters 'q': query_term, 'count': str(min(count, 50)), 'offset': str(offset), 'mkt': 'en-us', 'safesearch': 'Moderate'}) try: conn = httplib.HTTPSConnection('api.cognitive.microsoft.com') #conn.request("GET", "/bing/v5.0/search?%s" % params, "{body}", headers) conn.request("GET", "/bing/v7.0/search?%s" % params, "{body}", self.headers) response = conn.getresponse() data = response.read() obj = json.loads(data) if 'webPages' in obj: webPages = obj['webPages'] values = webPages['value'] for value in values: if self.is_valid(value['url']): url = URLUtility.normalize(value['url']) if url: urls.append(url) conn.close() except: traceback.print_exc() count -= 50 offset += 1 if self.cache: self.cache.add(query_term, urls) return urls def search_site(self, keyword, url, k=10): """ Search inside a given website using the search command: "keyword site:url" Parameters keyword: keyword used to search url: top level domain Returns list of urls """ keyword = keyword + " site:" + url return self.search(keyword, k)
class Player: def __init__(self): self.config = Config() self.ui = Ui() self.popen_handler = None # flag stop, prevent thread start self.playing_flag = False self.pause_flag = False self.process_length = 0 self.process_location = 0 self.process_first = False self.storage = Storage() self.info = self.storage.database["player_info"] self.songs = self.storage.database["songs"] self.playing_id = -1 self.cache = Cache() self.notifier = self.config.get_item("notifier") self.mpg123_parameters = self.config.get_item("mpg123_parameters") self.end_callback = None self.playing_song_changed_callback = None def popen_recall(self, onExit, popenArgs): """ Runs the given args in subprocess.Popen, and then calls the function onExit when the subprocess completes. onExit is a callable object, and popenArgs is a lists/tuple of args that would give to subprocess.Popen. """ def runInThread(onExit, arg): para = ['mpg123', '-R'] para[1:1] = self.mpg123_parameters self.popen_handler = subprocess.Popen(para, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) self.popen_handler.stdin.write("V " + str(self.info[ "playing_volume"]) + "\n") if arg: self.popen_handler.stdin.write("L " + arg + "\n") else: self.next_idx() onExit() return self.process_first = True while True: if self.playing_flag is False: break strout = self.popen_handler.stdout.readline() if re.match("^\@F.*$", strout): process_data = strout.split(" ") process_location = float(process_data[4]) if self.process_first: self.process_length = process_location self.process_first = False self.process_location = 0 else: self.process_location = self.process_length - process_location # NOQA continue elif strout[:2] == '@E': # get a alternative url from new api sid = popenArgs['song_id'] new_url = NetEase().songs_detail_new_api([sid])[0]['url'] if new_url is None: log.warning(('Song {} is unavailable ' 'due to copyright issue').format(sid)) break log.error('Song {} is not compatible with old api.'.format( sid)) self.popen_handler.stdin.write("\nL " + new_url + "\n") self.popen_handler.stdout.readline() elif strout == "@P 0\n": self.popen_handler.stdin.write("Q\n") self.popen_handler.kill() break if self.playing_flag: self.next_idx() onExit() return def getLyric(): if 'lyric' not in self.songs[str(self.playing_id)].keys(): self.songs[str(self.playing_id)]["lyric"] = [] if len(self.songs[str(self.playing_id)]["lyric"]) > 0: return netease = NetEase() lyric = netease.song_lyric(self.playing_id) if lyric == [] or lyric == '未找到歌词': return lyric = lyric.split('\n') self.songs[str(self.playing_id)]["lyric"] = lyric return def gettLyric(): if 'tlyric' not in self.songs[str(self.playing_id)].keys(): self.songs[str(self.playing_id)]["tlyric"] = [] if len(self.songs[str(self.playing_id)]["tlyric"]) > 0: return netease = NetEase() tlyric = netease.song_tlyric(self.playing_id) if tlyric == [] or tlyric == '未找到歌词翻译': return tlyric = tlyric.split('\n') self.songs[str(self.playing_id)]["tlyric"] = tlyric return def cacheSong(song_id, song_name, artist, song_url): def cacheExit(song_id, path): self.songs[str(song_id)]['cache'] = path self.cache.add(song_id, song_name, artist, song_url, cacheExit) self.cache.start_download() if 'cache' in popenArgs.keys() and os.path.isfile(popenArgs['cache']): thread = threading.Thread(target=runInThread, args=(onExit, popenArgs['cache'])) else: thread = threading.Thread(target=runInThread, args=(onExit, popenArgs['mp3_url'])) cache_thread = threading.Thread( target=cacheSong, args=(popenArgs['song_id'], popenArgs['song_name'], popenArgs[ 'artist'], popenArgs['mp3_url'])) cache_thread.start() thread.start() lyric_download_thread = threading.Thread(target=getLyric, args=()) lyric_download_thread.start() tlyric_download_thread = threading.Thread(target=gettLyric, args=()) tlyric_download_thread.start() # returns immediately after the thread starts return thread def get_playing_id(self): return self.playing_id def recall(self): if self.info["idx"] >= len(self.info["player_list"]) and self.end_callback is not None: self.end_callback() if self.info["idx"] < 0 or self.info["idx"] >= len(self.info["player_list"]): self.info["idx"] = 0 self.stop() return self.playing_flag = True self.pause_flag = False item = self.songs[self.info["player_list"][self.info["idx"]]] self.ui.build_playinfo(item['song_name'], item['artist'], item['album_name'], item['quality'], time.time()) if self.notifier: self.ui.notify("Now playing", item['song_name'], item['album_name'], item['artist']) self.playing_id = item['song_id'] self.popen_recall(self.recall, item) def generate_shuffle_playing_list(self): del self.info["playing_list"][:] for i in range(0, len(self.info["player_list"])): self.info["playing_list"].append(i) random.shuffle(self.info["playing_list"]) self.info["ridx"] = 0 def new_player_list(self, type, title, datalist, offset): self.info["player_list_type"] = type self.info["player_list_title"] = title self.info["idx"] = offset del self.info["player_list"][:] del self.info["playing_list"][:] self.info["ridx"] = 0 for song in datalist: self.info["player_list"].append(str(song["song_id"])) if str(song["song_id"]) not in self.songs.keys(): self.songs[str(song["song_id"])] = song else: database_song = self.songs[str(song["song_id"])] if (database_song["song_name"] != song["song_name"] or database_song["quality"] != song["quality"]): self.songs[str(song["song_id"])] = song def append_songs(self, datalist): for song in datalist: self.info["player_list"].append(str(song["song_id"])) if str(song["song_id"]) not in self.songs.keys(): self.songs[str(song["song_id"])] = song else: database_song = self.songs[str(song["song_id"])] if database_song["song_name"] != song["song_name"] or \ database_song["quality"] != song["quality"] or \ database_song["mp3_url"] != song["mp3_url"]: if "cache" in self.songs[str(song["song_id"])].keys(): song["cache"] = self.songs[str(song["song_id"])][ "cache"] self.songs[str(song["song_id"])] = song if len(datalist) > 0 and self.info["playing_mode"] == 3 or self.info[ "playing_mode"] == 4: self.generate_shuffle_playing_list() def play_and_pause(self, idx): # if same playlists && idx --> same song :: pause/resume it if self.info["idx"] == idx: if self.pause_flag: self.resume() else: self.pause() else: self.info["idx"] = idx # if it's playing if self.playing_flag: self.switch() # start new play else: self.recall() # play another def switch(self): self.stop() # wait process be killed time.sleep(0.1) self.recall() def stop(self): if self.playing_flag and self.popen_handler: self.playing_flag = False self.popen_handler.stdin.write("Q\n") try: self.popen_handler.kill() except OSError as e: log.error(e) return def pause(self): if not self.playing_flag and not self.popen_handler: return self.pause_flag = True self.popen_handler.stdin.write("P\n") item = self.songs[self.info["player_list"][self.info["idx"]]] self.ui.build_playinfo(item['song_name'], item['artist'], item['album_name'], item['quality'], time.time(), pause=True) def resume(self): self.pause_flag = False self.popen_handler.stdin.write("P\n") item = self.songs[self.info["player_list"][self.info["idx"]]] self.ui.build_playinfo(item['song_name'], item['artist'], item['album_name'], item['quality'], time.time()) self.playing_id = item['song_id'] def _swap_song(self): plist = self.info["playing_list"] now_songs = plist.index(self.info["idx"]) plist[0], plist[now_songs] = plist[now_songs], plist[0] def _is_idx_valid(self): return 0 <= self.info["idx"] < len(self.info["player_list"]) def _inc_idx(self): if self.info["idx"] < len(self.info["player_list"]) - 1: self.info["idx"] += 1 def _dec_idx(self): if self.info["idx"] > 0: self.info["idx"] -= 1 def next_idx(self): if not self._is_idx_valid(): self.stop() return playlist_len = len(self.info["player_list"]) playinglist_len = len(self.info["playing_list"]) # Playing mode. 0 is ordered. 1 is orderde loop. # 2 is single song loop. 3 is single random. 4 is random loop if self.info["playing_mode"] == 0: self._inc_idx() elif self.info["playing_mode"] == 1: self.info["idx"] = (self.info["idx"] + 1) % playlist_len elif self.info["playing_mode"] == 2: self.info["idx"] = self.info["idx"] elif self.info["playing_mode"] == 3 or self.info["playing_mode"] == 4: # First is out of border. Second is change playlist. if self.info["ridx"] >= playinglist_len or self.info["playing_list"][self.info["ridx"]] != self.info["idx"]: self.generate_shuffle_playing_list() playinglist_len = len(self.info["playing_list"]) # When you regenerate playing list, you should keep previous song same. try: self._swap_song() except Exception as e: log.error(e) self.info["ridx"] += 1 # Out of border if self.info["playing_mode"] == 4: self.info["ridx"] %= playinglist_len if self.info["ridx"] >= playinglist_len: self.info["idx"] = playlist_len else: self.info["idx"] = self.info["playing_list"][self.info["ridx"]] else: self.info["idx"] += 1 if self.playing_song_changed_callback is not None: self.playing_song_changed_callback() def next(self): self.stop() time.sleep(0.01) self.next_idx() self.recall() def prev_idx(self): if not self._is_idx_valid(): self.stop() return playlist_len = len(self.info["player_list"]) playinglist_len = len(self.info["playing_list"]) # Playing mode. 0 is ordered. 1 is orderde loop. # 2 is single song loop. 3 is single random. 4 is random loop if self.info["playing_mode"] == 0: self._dec_idx() elif self.info["playing_mode"] == 1: self.info["idx"] = (self.info["idx"] - 1) % playlist_len elif self.info["playing_mode"] == 2: self.info["idx"] = self.info["idx"] elif self.info["playing_mode"] == 3 or self.info["playing_mode"] == 4: if self.info["ridx"] >= len(self.info["playing_list"]) or \ self.info["playing_list"][self.info["ridx"]] != self.info["idx"]: self.generate_shuffle_playing_list() playinglist_len = len(self.info["playing_list"]) self.info["ridx"] -= 1 if self.info["ridx"] < 0: if self.info["playing_mode"] == 3: self.info["ridx"] = 0 else: self.info["ridx"] %= playinglist_len self.info["idx"] = self.info["playing_list"][self.info["ridx"]] else: self.info["idx"] -= 1 if self.playing_song_changed_callback is not None: self.playing_song_changed_callback() def prev(self): self.stop() time.sleep(0.01) self.prev_idx() self.recall() def shuffle(self): self.stop() time.sleep(0.01) self.info["playing_mode"] = 3 self.generate_shuffle_playing_list() self.info["idx"] = self.info["playing_list"][self.info["ridx"]] self.recall() def volume_up(self): self.info["playing_volume"] = self.info["playing_volume"] + 7 if (self.info["playing_volume"] > 100): self.info["playing_volume"] = 100 if not self.playing_flag: return self.popen_handler.stdin.write("V " + str(self.info[ "playing_volume"]) + "\n") def volume_down(self): self.info["playing_volume"] = self.info["playing_volume"] - 7 if (self.info["playing_volume"] < 0): self.info["playing_volume"] = 0 if not self.playing_flag: return self.popen_handler.stdin.write("V " + str(self.info[ "playing_volume"]) + "\n") def update_size(self): try: self.ui.update_size() item = self.songs[self.info["player_list"][self.info["idx"]]] if self.playing_flag: self.ui.build_playinfo(item['song_name'], item['artist'], item['album_name'], item['quality'], time.time()) if self.pause_flag: self.ui.build_playinfo(item['song_name'], item['artist'], item['album_name'], item['quality'], time.time(), pause=True) except Exception as e: log.error(e) pass def cacheSong1time(self, song_id, song_name, artist, song_url): def cacheExit(song_id, path): self.songs[str(song_id)]['cache'] = path self.cache.enable = False self.cache.enable = True self.cache.add(song_id, song_name, artist, song_url, cacheExit) self.cache.start_download()
offX = textsize[0] // 2 offY = textsize[1] // 2 cv2.putText(visual_frame, str(steering_angle), (700 + truth_wheel_cpy.shape[1] + 100 + (pred_wheel_cpy.shape[1] // 2) - offX, 100 + (pred_wheel_cpy.shape[0] // 2) + offY), font, 0.7, (0, 0, 0), 2) cv2.putText(visual_frame, 'Prediction', (740, 340), font, 0.7, (255, 255, 255), 2) cv2.putText(visual_frame, 'Truth', (1070, 340), font, 0.7, (255, 255, 255), 2) pred_cache.add([steering_angle_pred]) truth_cache.add([steering_angle]) ang_graph = angle_graph(truth_cache.get_all_index(0), pred_cache.get_all_index(0), (400, 300), ['Angle', '-25', '25'], ['Time', '0', '100']) visual_frame[380:380 + ang_graph.shape[0], 30:30 + ang_graph.shape[1]] = ang_graph error = abs(steering_angle - steering_angle_pred) / 100 error_cache.add([error]) acc_graph = line_graph(error_cache.get_all_index(0), error_cache.mean(0), 100, (400, 300), ['Time', '0', '100'], ['Degrees of Error', '0', '100'])
class Resolver: def __init__(self, parameter_store, stage_parameters, comparison_parameters): self.parameter_store = parameter_store self.stage_parameters = stage_parameters self.comparison_parameters = comparison_parameters self.sts = STS() self.cache = Cache() @staticmethod def _is_optional(value): return value.endswith('?') def fetch_stack_output(self, value, key, optional=False): # pylint: disable=too-many-statements try: [_, account_id, region, stack_name, output_key] = str(value).split(':') except ValueError: raise ValueError( "{0} is not a valid import string." "syntax should be import:account_id:region:stack_name:output_key" .format(str(value))) if Resolver._is_optional(output_key): LOGGER.info("Parameter %s is considered optional", output_key) optional = True output_key = output_key[:-1] if optional else output_key try: role = self.sts.assume_cross_account_role( 'arn:aws:iam::{0}:role/{1}'.format( account_id, 'adf-readonly-automation-role'), 'importer') cloudformation = CloudFormation( region=region, deployment_account_region=os.environ["AWS_REGION"], role=role, stack_name=stack_name, account_id=account_id) stack_output = self.cache.check( value) or cloudformation.get_stack_output(output_key) if stack_output: LOGGER.info("Stack output value is %s", stack_output) self.cache.add(value, stack_output) except ClientError: if not optional: raise stack_output = "" pass try: parent_key = list( Resolver.determine_parent_key(self.comparison_parameters, key))[0] if optional: self.stage_parameters[parent_key][key] = stack_output else: if not stack_output: raise Exception( "No Stack Output found on {account_id} in {region} " "with stack name {stack} and output key " "{output_key}".format( account_id=account_id, region=region, stack=stack_name, output_key=output_key, )) self.stage_parameters[parent_key][key] = stack_output except IndexError: if stack_output: if self.stage_parameters.get(key): self.stage_parameters[key] = stack_output else: raise Exception( "Could not determine the structure of the file in order to import from CloudFormation" ) return True def upload(self, value, key, file_name): if not any(item in value for item in ['path', 'virtual-hosted', 's3-key-only']): raise Exception( 'When uploading to S3 you need to specify a ' 'pathing style for the response either path or virtual-hosted, ' 'read more: https://docs.aws.amazon.com/AmazonS3/latest/dev/VirtualHosting.html' ) from None if str(value).count(':') > 2: [_, region, style, value] = value.split(':') else: [_, style, value] = value.split(':') region = DEFAULT_REGION bucket_name = self.parameter_store.fetch_parameter( '/cross_region/s3_regional_bucket/{0}'.format(region)) client = S3(region, bucket_name) try: parent_key = list( Resolver.determine_parent_key(self.comparison_parameters, key))[0] except IndexError: if self.stage_parameters.get(key): self.stage_parameters[key] = client.put_object( "adf-upload/{0}/{1}".format(value, file_name), "{0}".format(value), style, True #pre-check ) return True self.stage_parameters[parent_key][key] = client.put_object( "adf-upload/{0}/{1}".format(value, file_name), "{0}".format(value), style, True #pre-check ) return True @staticmethod def determine_parent_key(d, target_key, parent_key=None): for key, value in d.items(): if key == target_key: yield parent_key if isinstance(value, dict): for result in Resolver.determine_parent_key( value, target_key, key): yield result def fetch_parameter_store_value(self, value, key, optional=False): # pylint: disable=too-many-statements if self._is_optional(value): LOGGER.info("Parameter %s is considered optional", value) optional = True if str(value).count(':') > 1: [_, region, value] = value.split(':') else: [_, value] = value.split(':') region = DEFAULT_REGION value = value[:-1] if optional else value client = ParameterStore(region, boto3) try: parameter = self.cache.check('{0}/{1}'.format( region, value)) or client.fetch_parameter(value) except ParameterNotFoundError: if optional: LOGGER.info("Parameter %s not found, returning empty string", value) parameter = "" else: raise try: parent_key = list( Resolver.determine_parent_key(self.comparison_parameters, key))[0] if parameter: self.cache.add('{0}/{1}'.format(region, value), parameter) self.stage_parameters[parent_key][key] = parameter except IndexError as error: if parameter: if self.stage_parameters.get(key): self.stage_parameters[key] = parameter else: LOGGER.error( "Parameter was not found, unable to fetch it from parameter store" ) raise Exception( "Parameter was not found, unable to fetch it from parameter store" ) from error return True def update(self, key): for k, _ in self.comparison_parameters.items(): if not self.stage_parameters.get( k) and not self.stage_parameters.get(k, {}).get(key): self.stage_parameters[k] = self.comparison_parameters[k] if key not in self.stage_parameters[ k] and self.comparison_parameters.get(k, {}).get(key): self.stage_parameters[k][key] = self.comparison_parameters[k][ key]
class DataManager(DataTable): """ This class provides high level access to a database table. It loads and saves data from the table into an object or a DataSet, or a subclass thereof. The class of objects and data sets that are returned can be set using the set_object() and set_dataset() functions. You can request a single object (row), or you can specify a set of criteria, which are then translated into a WHERE clause. """ def __init__(self, table_name, field_dictionary): DataTable.__init__(self, table_name, field_dictionary) self.table = DataTable(table_name, field_dictionary) self.reset_cache() def reset_cache(self): """ Initialize this data manager's object cache. Any contents in the cache are discarded. """ self.cache = Cache() self.all_cached = 0 self.last_synched = '' def preload(self): """ Handles a client request to preload all objects from the database, fully populating the local cache and preparing for subsequent client requests. Preloading can result in significant performance benefits, because data managers who have preloaded their cache can fill more client requests from cache, avoiding expensive database accesses. Not all requests for preloading are honored. To be preloaded, a datamanager's cache size must be set to CACHE_UNLIMITED. Also, only one call is honored. Subsequent calls are silently ignored. This makes it safe and efficient for clients to request preloading, without needing to know whether or not the data manager has already been preloaded. """ if self.all_cached==1: return # FIXME: Also try to preload caches that are not unlimited, # but are large enough to hold all existing objects. # Use a SQL COUNT(*) function to make this determination, # so we don't waste lots of time attempting to preload # a cache that cannot be preloaded. if self.cache.size <> CACHE_UNLIMITED: return #print 'Preloading ' + self.table.name self.get_all() #i18n_table = self.table.name + '_i18n' #if self.dms.has_key(i18n_table): # i18n_dm = self.dms[i18n_table] # i18n_dm.preload() def get_by_id(self, id): """ Returns an individual object whose primary key field matches the specified id. If the object is in the cache, the cached object is returned. Objects which have more than one primary key field cannot be reliably retrieved using this function. In this event, only the first matching object will be returned. """ object = self.cache.get_by_key(id) if object==None: data_field = self.table.id_field() sql = self.table.select + ' WHERE ' + data_field.field_name + '=' + data_field.attr_to_field(id) cursor = db.select(sql) row = cursor.fetchone() if row==None: return object = self.row_to_object(row) return object def get_by_keys(self, filters): """ Returns all objects which match the supplied filters. """ if self.cache.filled==1: self.all_cached = 0 if self.all_cached==1: return self.get_cached_by_keys(filters) else: sql = self.filters_to_sql(filters) return self.get_sql(sql) def get_cached_by_keys(self, filters): """ This private function fills keyed requests directly from the object cache. No checking is performed to determine whether the cache contains all objects which fit the request. Therefore, this function should only be called by data managers whose caches are preloaded. See the preload() function for more information on preloading. """ sql = self.filters_to_sql(filters) function_text = self.filters_to_function(filters) print 'Function text: ' print function_text code = compile(function_text, '<string>', 'exec') print 'Code: ' + str(code) print 'Code has %s arguments.' % code.co_argcount self.test_object_filters.im_func.func_code = code #print 'Method code: ' + str(self.test_object_filters.im_func.func_code) good_keys = filter(self.test_object_filters, self.cache.keys()) print 'Good keys: ' + str(good_keys) dataset = self.new_dataset() for key in good_keys: dataset[key] = self.cache[key] return dataset def test_object_filters(self, key): return 1 def filters_to_function(self, filters): """ Converts a list of filters into a Python function which tests an object to see if it matches the filters. Precompiling filter tests speeds up key filtering enormously. The generated function accepts a single parameter, "key". It retrieves the object with that key in the object cache and tests for a match. If the object matches all the filters, the generated function returns 1. Otherwise, it returns 0. """ code = WOStringIO() code.write('def test_cached_object(key):\n') code.write(' object = self.cache[key]\n') for filter in filters: attribute, operator, value = filter test_value = repr(value) code.write(' obj_value = object.%s\n' % (attribute)) if operator.upper()=='LIKE': code.write(' if %s > len(%s): return 0\n' % (len(value), obj_value)) code.write(' return (%s <> obj_value.upper()[:%s])\n' % (test_value.upper(), len(value))) elif operator in ['<>', '<', '<=', '=', '>=', '>']: if operator=='=': operator = '==' code.write(' return (object.%s %s %s)\n' % (attribute, operator, repr(value))) else: raise UnknownOperator('Unrecognized operator: %s' % (operator)) return code.get_value() def filters_to_sql(self, filters): """ Converts a list of filters into the SQL statement which will retrieve matching records from the database. """ wheres = [] for filter in filters: attribute, operator, value = filter field = self.table.fields.find_attribute(attribute) if operator.upper()=='LIKE': wheres.append('upper(' + field_name + ') LIKE ' + field.attr_to_field(value.upper() + '%')) else: wheres.append(field.field_name + operator + field.attr_to_field(value)) where = ' WHERE ' + string.join(wheres, ' AND ') return self.table.select + where def get_all(self): """ Returns a set of all objects managed by this data manager. If the data manager's cache proves sufficient to cache all objects, the cache will subsequently be considered preloaded, i.e., subsequent calls to get_by_keys() will be served directly from the cache, bypassing expensive database accesses. See the preload() function for more information on preloading. """ if self.cache.filled==1: self.all_cached = 0 if self.all_cached==0: #print 'Loading all of ' + self.table.name + ' into cache.' set = self.get_sql(self.table.select) if self.cache.filled==0: self.all_cached = 1 return set return self.get_cached() def synch(self): """ Synchronize objects in the object cache with the database. Objects which have been deleted in the database are removed from the object cache. Objects which are out of synch with their database record have their attribute set to match the data in the database. """ #print 'Synchronizing ' + self.table.name + ' with database' last_synched = self.last_synched # Remember this, because we're about to overwrite it. self.last_synched = now_string() # Delete any newly deleted objects. sql = 'SELECT identifier FROM deleted WHERE table_name=' + wsq(self.table.name) + ' AND deleted >= ' + wsq(last_synched) cursor = db.select(sql) while (1): row = cursor.fetchone() if row==None: break # Load keys for the deleted object object = self.new_object() if len(self.table.key_list)==1: field = self.table.fields[self.table.key_list[0]] value = field.field_to_attr(row[0]) setattr(object, field.attribute, value) else: values = row[0].split() for key in self.table.key_list: field = self.table.fields[key] value = field.field_to_attr(values[field.index]) setattr(object, field.attribute, value) object.key = self.table.get_key(object) #print 'Deleting from ' + self.table.name + ' cache: ' + str(value) self.cache.delete(object) # FIXME: Delete the object from all data sets which contain it! # Update any newly updated objects. sql = self.table.select + ' WHERE updated >= ' + wsq(last_synched) cursor = db.select(sql) while (1): row = cursor.fetchone() if row==None: break key = self.table.get_row_key(row) if self.cache.has_key(key): object = self.cache[key] self.table.load_row(object, row) #print 'Updating in ' + self.table.name + ' cache: ' + str(object.key) else: object = self.row_to_object(row) self.cache.add(object) #print 'Adding in ' + self.table.name + ' cache: ' + str(object.key) # FIXME: Add the object to all data sets whose filters it matches. def get_cached(self): """ Returns a dataset containing all objects in the object cache. """ #print 'Pulling ' + self.table.name + ' from cache.' dataset = self.new_dataset() for key in self.cache.keys(): dataset[key] = self.cache[key] return dataset def get_sql(self, sql): """ Accepts a SQL statement, instantiates the corresponding objects from the database, and stores those objects in the data cache if possible. """ #print 'Cache miss, loading: ' + self.table.name dataset = self.new_dataset() cursor = db.select(sql) while (1): row = cursor.fetchone() if row==None: break object = self.row_to_object(row) dataset[object.key] = object self.cache.add(object) return dataset def set_object_class(self, object_class): self.object_class = object_class def set_dataset_class(self, dataset_class): self.dataset_class = dataset_class def new_object(self): object = self.object_class(self.dms, self) for key in self.table.fields.keys(): field = self.table.fields[key] setattr(object, field.attribute, field.get_default()) object.changed = 0 object.in_database = 0 return object def new_dataset(self): return self.dataset_class(self) def row_to_object(self, row): object = self.new_object() self.table.load_row(object, row) return object def add(self, object): self.save(object) def save(self, object): object.key = self.table.get_key(object) # New objects need their key calculated. if object.changed==0 and object.in_database==0: return if object.in_database==0: field_list = [] value_list = [] for key in self.table.field_list: field = self.table.fields[key] if field.data_type=='created': # The database is responsible for setting the timestamp. continue if field.data_type=='sequence': # When inserting, always increment the value. new_id = db.next_id(self.name, field.field_name) setattr(object, field.attribute, new_id) value = field.attr_to_field(getattr(object, field.attribute)) field_list.append(field.field_name) value_list.append(value) sql = 'INSERT INTO %s (%s) VALUES (%s)' % (self.table.name, string.join(field_list, ', '), string.join(value_list, ', ')) else: update_list = [] where_list = [] for key in self.table.field_list: field = self.table.fields[key] if field.data_type=='created': continue if field.data_type=='updated': value = wsq(now_string()) else: value = field.attr_to_field(getattr(object, field.attribute)) update_list.append(field.field_name + '=' + value) if field.key_field==1: where_list.append(field.field_name + '=' + value) sql = 'UPDATE %s SET %s WHERE %s' % (self.table.name, string.join(update_list, ', '), string.join(where_list, ' AND ')) # print sql db.runsql(sql) db.commit() self.cache.add(object) object.in_database = 1 object.changed = 0 def delete(self, object): if object.in_database==0: return self.cache.delete(object) wheres = [] for key in self.table.key_list: data_field = self.table.fields[key] value = data_field.attr_to_field(getattr(object, data_field.attribute)) wheres.append(data_field.field_name + '=' + value) where = ' WHERE ' + string.join(wheres, ' AND ') sql = 'DELETE FROM %s %s' % (self.table.name, where) db.runsql(sql) db.commit() sql = 'INSERT INTO deleted (table_name, identifier) VALUES (%s, %s)' % (wsq(self.table.name), wsq(str(object.key))) db.runsql(sql) db.commit() def delete_by_keys(self, filters): dataset = self.get_by_keys(filters) for key in dataset.keys(): object = dataset[key] self.delete(object) def clear(self, dataset): for key in dataset.keys(): self.delete(dataset[key])