def handle(self, *args, **options): masters = ReplicaSet.get_what_master().transinstance_set.all() what_torrent_ids = WhatTorrent.objects.all().values_list('id', flat=True) start = 0 page_size = 128 while start < len(what_torrent_ids): print 'Updating objects {0}-{1}/{2}'.format( start, start + page_size, len(what_torrent_ids)) bulk = WhatTorrent.objects.defer('torrent_file').in_bulk( what_torrent_ids[start:start + page_size]) start += page_size trans_torrents = { t.what_torrent_id: t for t in TransTorrent.objects.filter( instance__in=masters, what_torrent__in=bulk.values()) } for what_torrent in bulk.itervalues(): trans_torrent = trans_torrents.get(what_torrent.id) if trans_torrent is not None and trans_torrent.torrent_done == 1: try: WhatFileMetadataCache.get_metadata_batch( what_torrent, trans_torrent, True) except Exception as ex: print 'Failed updating torrent {0}: {1}'.format( what_torrent.id, ex)
def get_playlist_files(playlist): if playlist.startswith('what/'): what_id = int(playlist[len('what/'):]) what_torrent = WhatTorrent.objects.get(id=what_id) trans_torrent = what_torrent.master_trans_torrent items = WhatFileMetadataCache.get_metadata_batch(what_torrent, trans_torrent, False) playlist_name = what_torrent.joined_artists + ' - ' + what_torrent.info_title return playlist_name, items
def get_playlist_files(playlist): if playlist.startswith('what/'): what_id = int(playlist[len('what/'):]) what_torrent = WhatTorrent.objects.get(id=what_id) trans_torrent = what_torrent.master_trans_torrent items = WhatFileMetadataCache.get_metadata_batch( what_torrent, trans_torrent, False) playlist_name = what_torrent.joined_artists + ' - ' + what_torrent.info_title return playlist_name, items
def handle(self, *args, **options): masters = ReplicaSet.get_what_master().transinstance_set.all() what_torrent_ids = WhatTorrent.objects.all().values_list("id", flat=True) start = 0 page_size = 128 while start < len(what_torrent_ids): print "Updating objects {0}-{1}/{2}".format(start, start + page_size, len(what_torrent_ids)) bulk = WhatTorrent.objects.defer("torrent_file").in_bulk(what_torrent_ids[start : start + page_size]) start += page_size trans_torrents = { t.what_torrent_id: t for t in TransTorrent.objects.filter(instance__in=masters, what_torrent__in=bulk.values()) } for what_torrent in bulk.itervalues(): trans_torrent = trans_torrents.get(what_torrent.id) if trans_torrent is not None and trans_torrent.torrent_done == 1: try: WhatFileMetadataCache.get_metadata_batch(what_torrent, trans_torrent, True) except Exception as ex: print "Failed updating torrent {0}: {1}".format(what_torrent.id, ex)
def get_torrent_group_have(what_trans_torrents, sync_torrents=False): what_torrents = [w_t[0] for w_t in what_trans_torrents] trans_torrents = {w_t[0].id: w_t[1] for w_t in what_trans_torrents} torrents = sort_filter_torrents(what_torrents) torrent = None for candidate in torrents: trans_torrent = trans_torrents.get(candidate.id) if trans_torrent is None: continue if trans_torrent.torrent_done == 1: torrent = candidate break if torrent is None: for candidate in torrents: trans_torrent = trans_torrents.get(candidate.id) if trans_torrent is None: continue if sync_torrents: trans_torrent.sync_t_torrent() done = trans_torrent.torrent_done if torrent is None or done > trans_torrents[torrent.id].torrent_done: torrent = candidate if torrent: trans_torrent = trans_torrents[torrent.id] if trans_torrent.torrent_done == 1: cache_entries = WhatFileMetadataCache.get_metadata_batch(torrent, trans_torrent, False) duration = sum(c.duration for c in cache_entries) return { 'have': True, 'duration': duration, 'playlist': [ { 'id': 'what/' + str(torrent.id) + '#' + str(i), 'url': reverse('player.views.get_file') + '?path=' + urlquote( entry.path, ''), 'metadata': entry.easy } for i, entry in enumerate(cache_entries) ], } else: return { 'have': trans_torrent.torrent_done } return { 'have': False }