def test_get_master(self): what_master = ReplicaSet.get_what_master() self.assertEqual(what_master.zone, ReplicaSet.ZONE_WHAT) self.assertEqual(what_master.name, 'master') bib_master = ReplicaSet.get_bibliotik_master() self.assertEqual(bib_master.zone, ReplicaSet.ZONE_BIBLIOTIK) self.assertEqual(bib_master.name, 'master')
def handle(self, *args, **options): print 'Initiating what client...' what = get_what_client(dummy_request, True) index_response = what.request('index') print 'Status:', index_response['status'] print 'Scanning replica sets...' try: ReplicaSet.objects.get(zone='what.cd') raise Exception('Please delete your what.cd replica set now') except ReplicaSet.DoesNotExist: pass try: pth_replica_set = ReplicaSet.get_what_master() if pth_replica_set.transinstance_set.count() < 1: raise ReplicaSet.DoesNotExist() except ReplicaSet.DoesNotExist: raise Exception('Please get your PTH replica set ready') print 'Scanning locations...' location_mapping = {} with open('what_manager2_torrents.jsonl', 'rb') as torrents_input: for line in torrents_input: data = ujson.loads(line) location_path = data['location']['path'] if location_path not in location_mapping: try: new_location = DownloadLocationEquivalent.objects.get( old_location=location_path).new_location except DownloadLocationEquivalent.DoesNotExist: new_location = raw_input( 'Enter the new location to map to {}: '.format( location_path)) DownloadLocationEquivalent.objects.create( old_location=location_path, new_location=new_location, ) location_mapping[location_path] = new_location print 'Location mappings:' for old_location, new_location in location_mapping.items(): try: DownloadLocation.objects.get(zone='redacted.ch', path=new_location) except DownloadLocation.DoesNotExist: raise Exception( 'Please create the {} location in the DB in zone redacted.ch' .format(new_location)) print old_location, '=', new_location with open('what_manager2_torrents.jsonl', 'rb') as torrents_input: for line in torrents_input: data = ujson.loads(line) migration_job = TorrentMigrationJob( what, location_mapping, data, flac_only=options['flac_only']) migration_job.process()
def error_torrents(request): error_torrents = [] error_torrents.extend(TransTorrent.objects.filter( instance__in=ReplicaSet.get_what_master().transinstance_set.all() ).exclude(torrent_error=0).prefetch_related('what_torrent')) error_torrents.extend(BibliotikTransTorrent.objects.filter( instance__in=ReplicaSet.get_bibliotik_master().transinstance_set.all() ).exclude(torrent_error=0).prefetch_related('bibliotik_torrent')) data = { 'torrents': error_torrents } return render(request, 'home/part_ui/error_torrents.html', data)
def files_sync(self): check_running() print 'Running initial rsync...' self.call_rsyncs() print 'Iterating instances...' current_torrents = {} best_instance = None for instance in ReplicaSet.get_bibliotik_master( ).transinstance_set.all(): t_torrents = instance.get_t_torrents( TransTorrentBase.sync_t_arguments) if best_instance is None or len(t_torrents) < best_instance[0]: best_instance = (len(t_torrents), instance) for t_torrent in t_torrents: part = t_torrent.downloadDir.rpartition('/') current_torrents[int(part[2])] = { 'download_dir': part[0], } new_torrents = {} print 'Iterating locations...' for location in DownloadLocation.objects.filter( zone=ReplicaSet.ZONE_BIBLIOTIK): for i in os.listdir(location.path): torrent_id = int(i) if torrent_id not in current_torrents: new_torrents[torrent_id] = { 'id': torrent_id, 'location': location, } else: del current_torrents[torrent_id] to_add = list() for batch_number, batch in enumerate( chunks(new_torrents.itervalues(), 100)): print 'Requests status for batch {0}...'.format(batch_number) batch_status = torrents_status(unicode(i['id']) for i in batch) for row in batch_status: if row['status'] == 'downloaded': to_add.append(new_torrents[row['id']]) print 'Running second rsync...' self.call_rsyncs() preferred_instance = best_instance[1] for row in to_add: print 'Downloading torrent {0}'.format(row['id']) torrent_file = get_torrent(row['id']) print 'Adding torrent {0}'.format(row['id']) t_torrent = preferred_instance.client.add_torrent( base64.b64encode(torrent_file), download_dir=os.path.join(str(row['location'].path), str(row['id'])), paused=False) monitor_torrent(preferred_instance.client, t_torrent.id) print 'Completed.'
def handle(self, *args, **options): masters = ReplicaSet.get_what_master().transinstance_set.all() what_torrent_ids = WhatTorrent.objects.all().values_list('id', flat=True) start = 0 page_size = 128 while start < len(what_torrent_ids): print 'Updating objects {0}-{1}/{2}'.format( start, start + page_size, len(what_torrent_ids)) bulk = WhatTorrent.objects.defer('torrent_file').in_bulk( what_torrent_ids[start:start + page_size]) start += page_size trans_torrents = { t.what_torrent_id: t for t in TransTorrent.objects.filter( instance__in=masters, what_torrent__in=bulk.values()) } for what_torrent in bulk.itervalues(): trans_torrent = trans_torrents.get(what_torrent.id) if trans_torrent is not None and trans_torrent.torrent_done == 1: try: WhatFileMetadataCache.get_metadata_batch( what_torrent, trans_torrent, True) except Exception as ex: print 'Failed updating torrent {0}: {1}'.format( what_torrent.id, ex)
def download_zip(request, what_id): t_torrent = None for instance in ReplicaSet.get_what_master().transinstance_set.all(): try: t_torrent = TransTorrent.objects.get(instance=instance, what_torrent_id=what_id) except TransTorrent.DoesNotExist: pass if not t_torrent: return HttpResponse('Could not find that torrent.') torrent_file = [f for f in os.listdir(t_torrent.path) if '.torrent' in f] if len(torrent_file) == 1: torrent_file = os.path.splitext(torrent_file[0])[0] else: return HttpResponse('Not one .torrent in dir: ' + t_torrent.path) target_dir = os.path.join(t_torrent.path, t_torrent.torrent_name).encode('utf-8') torrent_files = [] for root, rel_path, files in os.walk(target_dir): for file in files: rel_path = root.replace(target_dir, '') if rel_path.startswith('/') or rel_path.startswith('\\'): rel_path = rel_path[1:] rel_path = os.path.join(rel_path.encode('utf-8'), file) torrent_files.append((rel_path, os.path.join(root, file))) download_filename = '[{0}] {1}.zip'.format(what_id, torrent_file) response = download_zip_handler(download_filename, torrent_files) LogEntry.add(request.user, u'action', u'Downloaded {0} - {1}'.format( t_torrent, filesizeformat(response['Content-Length']) )) return response
def sync_replicas(request): start_time = time.time() part_start_time = time.time() master = ReplicaSet.get_what_master() try: for replica_set in ReplicaSet.objects.all(): if replica_set.id != master.id: trans_sync.sync_all_instances_db(request, replica_set) replicas_dbs_time = time.time() - part_start_time except Exception as ex: tb = traceback.format_exc() LogEntry.add(request.user, u"error", u"Error syncing replicas DB: {0}".format(ex), tb) return {"success": False, "error": unicode(ex), "traceback": tb} try: trans_sync.sync_all_replicas_to_master() except Exception as ex: tb = traceback.format_exc() LogEntry.add(request.user, u"error", u"Error running replica sync: {0}".format(ex), tb) return {"success": False, "error": unicode(ex), "traceback": tb} time_taken = time.time() - start_time LogEntry.add( request.user, u"info", u"Completed replica sync in {0:.3f}s. DB in {1:.3f}s.".format(time_taken, replicas_dbs_time), ) return {"success": True}
def move_torrent_to_location(request): what_id = int(request.GET['id']) new_location = DownloadLocation.objects.get(zone=ReplicaSet.ZONE_WHAT, path=request.GET['path']) what_torrent = WhatTorrent.objects.get(id=what_id) trans_torrent = TransTorrent.objects.get( instance__in=ReplicaSet.get_what_master().transinstance_set.all(), what_torrent=what_torrent) if trans_torrent.location.id == new_location.id: raise Exception('Torrent is already there.') print 'Source is', trans_torrent.location.path print 'Destination is', new_location.path print 'Instance is', trans_torrent.instance.name print 'Size is', trans_torrent.torrent_size print 'Name is', trans_torrent.torrent_name client = trans_torrent.instance.client client.stop_torrent(trans_torrent.torrent_id) source_path = os.path.join(trans_torrent.location.path, unicode(what_torrent.id)) shutil.move(source_path, new_location.path) client.move_torrent_data(trans_torrent.torrent_id, os.path.join(new_location.path, unicode(what_torrent.id))) trans_torrent.location = new_location trans_torrent.save() client.verify_torrent(trans_torrent.torrent_id) client.start_torrent(trans_torrent.torrent_id) return { 'success': True }
def download_bibliotik_zip(request, bibliotik_id): b_torrent = None for instance in ReplicaSet.get_bibliotik_master().transinstance_set.all(): try: b_torrent = BibliotikTransTorrent.objects.get(instance=instance, bibliotik_torrent_id=bibliotik_id) except BibliotikTransTorrent.DoesNotExist: pass if not b_torrent: return HttpResponse('Could not find that torrent.') torrent_files = [] for root, rel_path, files in os.walk(b_torrent.path): for file in files: assert root.find(b_torrent.path) != -1 rel_path = root.replace(b_torrent.path, '') if rel_path.startswith('/') or rel_path.startswith('\\'): rel_path = rel_path[1:] rel_path = os.path.join(rel_path.encode('utf-8'), file) torrent_files.append((rel_path, os.path.join(root, file))) download_filename = u'[{0}] {1}.zip'.format(bibliotik_id, b_torrent.torrent_name) response = download_zip_handler(download_filename, torrent_files) LogEntry.add(request.user, u'action', u'Downloaded {0} - {1}' .format(b_torrent, filesizeformat(response['Content-Length']))) return response
def refresh_oldest_torrent(request): bibliotik_id = request.GET['bibliotik_id'] bibliotik_client = BibliotikClient(bibliotik_id) most_recent = BibliotikTorrent.objects.defer('torrent_file').order_by('retrieved')[0] most_recent_id = most_recent.id try: most_recent.import_bibliotik_data(bibliotik_client) except Exception as ex: try: BibliotikTransTorrent.objects.get(instance__in=ReplicaSet.get_bibliotik_master().transinstance_set.all(), bibliotik_torrent=most_recent) return { 'success': False, 'id': most_recent_id, 'status': 'request error', } except BibliotikTransTorrent.DoesNotExist: most_recent.delete() return { 'success': True, 'id': most_recent_id, 'status': 'deleted', } old_retrieved = most_recent.retrieved most_recent.retrieved = timezone.now() most_recent.save() return { 'success': True, 'id': most_recent_id, 'status': 'refreshed', 'retrieved': unicode(old_retrieved), }
def handle(self, *args, **options): if not self.check_args(args): print u'Pass the torrent data directory as a first argument, ' \ u'a path to the .torrent file as a second.' return self.data_path, self.torrent_path = [wm_unicode(i) for i in args] with open(wm_str(self.torrent_path), 'rb') as f: self.torrent_info = bencode.bdecode(f.read()) if options['base_dir']: self.data_path = os.path.join(self.data_path, wm_unicode(self.torrent_info['info']['name'])) print u'Checking to see if torrent is already loaded into WM..' masters = list(ReplicaSet.get_what_master().transinstance_set.all()) try: TransTorrent.objects.get(instance__in=masters, info_hash=self.info_hash) print u'Torrent already added to WM. Skipping...' return False except TransTorrent.DoesNotExist: pass self.what_torrent = WhatTorrent.get_or_create(self.pseudo_request, info_hash=self.info_hash) if not self.check_files(): return self.move_files() print 'Adding torrent to WM...' manage_torrent.add_torrent(self.pseudo_request, self.trans_instance, self.download_location, self.what_torrent.id) print 'Done!'
def download_torrent_group(request, group_id): if not request.user.has_perm('home.add_whattorrent'): return { 'success': False, 'error': 'You don\'t have permission to add torrents. Talk to the administrator.', } try: torrent_group = WhatTorrentGroup.objects.get(id=group_id) except WhatTorrentGroup.DoesNotExist: torrent_group = WhatTorrentGroup.update_from_what(get_what_client(request), group_id) if torrent_group.torrents_json is None: torrent_group = WhatTorrentGroup.update_from_what(get_what_client(request), group_id) ids = get_ids_to_download(torrent_group) try: instance = ReplicaSet.get_what_master().get_preferred_instance() download_location = DownloadLocation.get_what_preferred() for torrent_id in ids: add_torrent(request, instance, download_location, torrent_id) except Exception as ex: return { 'success': False, 'error': unicode(ex), 'traceback': traceback.format_exc(), } return { 'success': True, 'added': len(ids), }
def move_torrent_to_location(request): what_id = int(request.GET["id"]) new_location = DownloadLocation.objects.get(zone=ReplicaSet.ZONE_WHAT, path=request.GET["path"]) what_torrent = WhatTorrent.objects.get(id=what_id) trans_torrent = TransTorrent.objects.get( instance__in=ReplicaSet.get_what_master().transinstance_set.all(), what_torrent=what_torrent ) if trans_torrent.location.id == new_location.id: raise Exception("Torrent is already there.") print "Source is", trans_torrent.location.path print "Destination is", new_location.path print "Instance is", trans_torrent.instance.name print "Size is", trans_torrent.torrent_size print "Name is", trans_torrent.torrent_name client = trans_torrent.instance.client client.stop_torrent(trans_torrent.torrent_id) source_path = os.path.join(trans_torrent.location.path, unicode(what_torrent.id)) shutil.move(source_path, new_location.path) client.move_torrent_data(trans_torrent.torrent_id, os.path.join(new_location.path, unicode(what_torrent.id))) trans_torrent.location = new_location trans_torrent.save() client.verify_torrent(trans_torrent.torrent_id) client.start_torrent(trans_torrent.torrent_id) return {"success": True}
def download_bibliotik_zip(request, bibliotik_id): b_torrent = None for instance in ReplicaSet.get_bibliotik_master().transinstance_set.all(): try: b_torrent = BibliotikTransTorrent.objects.get( instance=instance, bibliotik_torrent_id=bibliotik_id) except BibliotikTransTorrent.DoesNotExist: pass if not b_torrent: return HttpResponse('Could not find that torrent.') torrent_files = [] for root, rel_path, files in os.walk(b_torrent.path): for file in files: assert root.find(b_torrent.path) != -1 rel_path = root.replace(b_torrent.path, '') if rel_path.startswith('/') or rel_path.startswith('\\'): rel_path = rel_path[1:] rel_path = os.path.join(rel_path.encode('utf-8'), file) torrent_files.append((rel_path, os.path.join(root, file))) download_filename = u'[{0}] {1}.zip'.format(bibliotik_id, b_torrent.torrent_name) response = download_zip_handler(download_filename, torrent_files) LogEntry.add( request.user, u'action', u'Downloaded {0} - {1}'.format( b_torrent, filesizeformat(response['Content-Length']))) return response
def update_freeleech(request): start_time = time.time() added = 0 total_bytes = 0 total_torrents = 0 try: master = ReplicaSet.get_what_master() what_client = get_what_client(request) for what_id, what_group, what_torrent in what_client.get_free_torrent_ids(): total_bytes += what_torrent['size'] total_torrents += 1 if not WhatTorrent.is_downloaded(request, what_id=what_id): freeleech_add_torrent(request, master, what_id) added += 1 log_type = u'action' if added > 0 else u'info' if added >= FREELEECH_EMAIL_THRESHOLD and socket.gethostname() == FREELEECH_HOSTNAME: send_freeleech_email(u'Added {0} freeleech torrents'.format(added)) time_taken = time.time() - start_time LogEntry.add(request.user, log_type, u'Successfully updated freeleech in {0:.3f}s. ' u'{1} added. {2} / {3} torrents total.'.format( time_taken, added, filesizeformat(total_bytes), total_torrents)) except Exception as ex: tb = traceback.format_exc() LogEntry.add(request.user, u'error', u'Error updating freeleech: {0}({1})'.format(type(ex).__name__, unicode(ex)), tb) return { 'success': True, 'added': added }
def add_torrent(request): if not request.user.has_perm("home.add_whattorrent"): return {"success": False, "error": "You don't have permission to add torrents. Talk to the administrator."} try: if "dir" in request.POST: download_location = DownloadLocation.objects.get(zone=ReplicaSet.ZONE_WHAT, path=request.POST["dir"]) else: download_location = DownloadLocation.get_what_preferred() except DownloadLocation.DoesNotExist: return {"success": False, "error": u"Download location does not exist."} if download_location.free_space_percent < MIN_FREE_DISK_SPACE: LogEntry.add(request.user, u"error", u"Failed to add torrent. Not enough disk space.") return {"success": False, "error": u"Not enough free space on disk."} try: what_id = int(request.POST["id"]) except (ValueError, MultiValueDictKeyError): return {"success": False, "error": u"Invalid id"} instance = ReplicaSet.get_what_master().get_preferred_instance() try: if WhatTorrent.is_downloaded(request, what_id=what_id): m_torrent = TransTorrent.objects.filter(what_torrent_id=what_id)[0] raise TorrentAlreadyAddedException() m_torrent = manage_torrent.add_torrent(request, instance, download_location, what_id, True) m_torrent.what_torrent.added_by = request.user m_torrent.what_torrent.save() except TorrentAlreadyAddedException: LogEntry.add(request.user, u"info", u"Tried adding what_id={0}, already added.".format(what_id)) what_torrent = WhatTorrent.get_or_none(request, what_id=what_id) result = { "success": False, "error_code": u"already_added", "error": u"Already added.", "torrent_id": m_torrent.what_torrent_id, } if m_torrent.what_torrent.info_category_id == 1: result["artist"] = what_torrent.info_artist if what_torrent else "<<< Unable to find torrent >>>" result["title"] = what_torrent.info_title if what_torrent else "<<< Unable to find torrent >>>" return result except Exception as ex: tb = traceback.format_exc() LogEntry.add(request.user, u"error", u"Tried adding what_id={0}. Error: {1}".format(what_id, unicode(ex)), tb) return {"success": False, "error": unicode(ex), "traceback": tb} tags = request.POST.get("tags") if tags: m_torrent.what_torrent.tags = tags m_torrent.what_torrent.save() LogEntry.add(request.user, u"action", u"Added {0} to {1}".format(m_torrent, m_torrent.instance)) result = {"success": True} if m_torrent.what_torrent.info_category_id == 1: result["artist"] = (m_torrent.what_torrent.info_artist,) result["title"] = (m_torrent.what_torrent.info_title,) return result
def delete_torrent(request, what_id): t_torrent = None for instance in ReplicaSet.get_what_master().transinstance_set.all(): try: t_torrent = TransTorrent.objects.get(instance=instance, what_torrent_id=what_id) except TransTorrent.DoesNotExist: pass if not t_torrent: return HttpResponse('Could not find that torrent.') path = wm_str( t_torrent.path ) # Save this because t_torrent won't exist before rmtree is called WhatTorrent.objects.get(info_hash=t_torrent.info_hash).delete() t_torrent.instance.client.remove_torrent(t_torrent.info_hash) try: shutil.rmtree(path, onerror=attemptFixPermissions) return redirect('home.views.torrents') except OSError as e: if e.errno == errno.EPERM: # Operation not permitted return HttpResponse( 'Error removing folder "{}". Permission denied. Please remove folder ' 'manually. The torrent and database entry has been successfully removed ' 'from Transmission and WM.'.format(path)) else: raise e
def remove_transmission_dupes(request): dupes = defaultdict(list) for instance in ReplicaSet.get_what_master().transinstance_set.all(): for m_torrent in instance.transtorrent_set.all(): dupes[m_torrent.what_torrent_id].append(instance.name + '/' + str(m_torrent.torrent_id)) if len(dupes[m_torrent.what_torrent_id]) > 1: if 'remove' in request.GET: instance.client.remove_torrent(m_torrent.torrent_id) return list(i for i in dupes.iteritems() if len(i[1]) > 1)
def add_bibliotik_torrent(torrent_id, instance=None, location=None, bibliotik_client=None, add_to_client=True): bibliotik_torrent = BibliotikTorrent.get_or_create(bibliotik_client, torrent_id) if not instance: instance = ReplicaSet.get_bibliotik_master().get_preferred_instance() if not location: location = DownloadLocation.get_bibliotik_preferred() with LockModelTables(BibliotikTransTorrent, TransInstance): try: existing_one = BibliotikTransTorrent.objects.get( info_hash=bibliotik_torrent.info_hash) raise TorrentAlreadyAddedException( u'Already added (instance={0}, new_instance={1}, info_hash={2}).' .format(instance, existing_one.instance, bibliotik_torrent.info_hash)) except BibliotikTransTorrent.DoesNotExist: pass download_dir = os.path.join(location.path, unicode(bibliotik_torrent.id)) def create_b_torrent(): new_b_torrent = BibliotikTransTorrent( instance=instance, location=location, bibliotik_torrent=bibliotik_torrent, info_hash=bibliotik_torrent.info_hash, ) new_b_torrent.save() return new_b_torrent if add_to_client: with transaction.atomic(): b_torrent = create_b_torrent() t_torrent = instance.client.add_torrent( base64.b64encode(bibliotik_torrent.torrent_file), download_dir=download_dir, paused=False) t_torrent = instance.client.get_torrent( t_torrent.id, arguments=BibliotikTransTorrent.sync_t_arguments) if not os.path.exists(download_dir): os.mkdir(download_dir) if not os.stat(download_dir).st_mode & 0777 == 0777: os.chmod(download_dir, 0777) norm_t_torrent(t_torrent) b_torrent.sync_t_torrent(t_torrent) else:
def downloading(request): downloading = [] for m_torrent in TransTorrent.objects.filter( instance__in=ReplicaSet.get_what_master().transinstance_set.all(), torrent_done__lt=1).prefetch_related('what_torrent'): m_torrent.sync_t_torrent() downloading.append(m_torrent) for b_torrent in BibliotikTransTorrent.objects.filter( instance__in=ReplicaSet.get_bibliotik_master().transinstance_set.all(), torrent_done__lt=1 ).prefetch_related('bibliotik_torrent'): b_torrent.sync_t_torrent() downloading.append(b_torrent) downloading.sort(key=lambda t: t.torrent_date_added) data = { 'torrents': downloading } return render(request, 'home/part_ui/downloading.html', data)
def files_sync(self): check_running() print 'Running initial rsync...' self.call_rsyncs() print 'Iterating instances...' current_torrents = {} best_instance = None for instance in ReplicaSet.get_bibliotik_master().transinstance_set.all(): t_torrents = instance.get_t_torrents(TransTorrentBase.sync_t_arguments) if best_instance is None or len(t_torrents) < best_instance[0]: best_instance = (len(t_torrents), instance) for t_torrent in t_torrents: part = t_torrent.downloadDir.rpartition('/') current_torrents[int(part[2])] = { 'download_dir': part[0], } new_torrents = {} print 'Iterating locations...' for location in DownloadLocation.objects.filter(zone=ReplicaSet.ZONE_BIBLIOTIK): for i in os.listdir(location.path): torrent_id = int(i) if torrent_id not in current_torrents: new_torrents[torrent_id] = { 'id': torrent_id, 'location': location, } else: del current_torrents[torrent_id] to_add = list() for batch_number, batch in enumerate(chunks(new_torrents.itervalues(), 100)): print 'Requests status for batch {0}...'.format(batch_number) batch_status = torrents_status(unicode(i['id']) for i in batch) for row in batch_status: if row['status'] == 'downloaded': to_add.append(new_torrents[row['id']]) print 'Running second rsync...' self.call_rsyncs() preferred_instance = best_instance[1] for row in to_add: print 'Downloading torrent {0}'.format(row['id']) torrent_file = get_torrent(row['id']) print 'Adding torrent {0}'.format(row['id']) t_torrent = preferred_instance.client.add_torrent( base64.b64encode(torrent_file), download_dir=os.path.join(str(row['location'].path), str(row['id'])), paused=False ) monitor_torrent(preferred_instance.client, t_torrent.id) print 'Completed.'
def run_load_balance(request): torrent_count = int(request.GET["count"]) source_instance = request.GET["source"] instance = TransInstance.objects.get(name=source_instance) for i in xrange(torrent_count): t = choice(instance.transtorrent_set.filter(torrent_uploaded=0)) t = manage_torrent.move_torrent(t, ReplicaSet.get_what_master().get_preferred_instance()) return {"success": True}
def __init__(self): super(Command, self).__init__() self.pseudo_request = lambda: None self.trans_instance = ReplicaSet.get_what_master().get_preferred_instance() self.download_location = DownloadLocation.get_what_preferred() self.data_path = None self.torrent_path = None self.torrent_info = None self.info_hash = None self.dest_path = None self.what_torrent = None
def torrent_stats(request): what_buffer = 0 try: what_buffer = WhatUserSnapshot.get_last().buffer_105 except WhatUserSnapshot.DoesNotExist: pass data = { 'master': ReplicaSet.get_what_master(), 'buffer': what_buffer, } return render(request, 'home/part_ui/torrent_stats.html', data)
def run_load_balance(request): torrent_count = int(request.GET['count']) source_instance = request.GET['source'] instance = TransInstance.objects.get(name=source_instance) for i in xrange(torrent_count): t = choice(instance.transtorrent_set.filter(torrent_uploaded=0)) t = manage_torrent.move_torrent(t, ReplicaSet.get_what_master().get_preferred_instance()) return { 'success': True }
def recently_downloaded(request): count = 40 recent = [] for instance in ReplicaSet.get_what_master().transinstance_set.all(): torrents = instance.transtorrent_set.filter(torrent_done=1) torrents = torrents.order_by('-torrent_date_added')[:count] for t in torrents: t.playlist_name = 'what/{0}'.format(t.what_torrent_id) recent.extend(torrents) for instance in ReplicaSet.get_bibliotik_master().transinstance_set.all(): bibliotik_torrents = instance.bibliotiktranstorrent_set.filter(torrent_done=1) bibliotik_torrents = bibliotik_torrents.order_by('-torrent_date_added')[:count] recent.extend(bibliotik_torrents) recent.sort(key=lambda lt: lt.torrent_date_added, reverse=True) recent = recent[:count] data = { 'token': get_user_token(request.user), 'torrents': recent, } return render(request, 'home/part_ui/recently_downloaded.html', data)
def update_freeleech(request): start_time = time.time() added = 0 total_bytes = 0 total_torrents = 0 try: master = ReplicaSet.get_what_master() what_client = get_what_client(request) for what_id, what_group, what_torrent in what_client.get_free_torrent_ids(): total_bytes += what_torrent['size'] total_torrents += 1 if not WhatTorrent.is_downloaded(request, what_id=what_id): download_locations = DownloadLocation.objects.filter(zone=ReplicaSet.ZONE_WHAT) download_locations = [l for l in download_locations if l.free_space_percent >= MIN_FREE_DISK_SPACE] if len(download_locations) == 0: LogEntry.add(request.user, u'error', u'Unable to update freeleech: not enough space on disk.') return { 'success': False, 'error': u'Not enough free space on disk.' } download_location = choice(download_locations) instance = master.get_preferred_instance() m_torrent = manage_torrent.add_torrent(request, instance, download_location, what_id, True) m_torrent.what_torrent.tags = 'seed' m_torrent.what_torrent.added_by = request.user m_torrent.what_torrent.save() added += 1 LogEntry.add(request.user, u'action', u'Added freeleech {0} to {1} - {2}'.format(m_torrent, m_torrent.instance, download_location.path)) log_type = u'action' if added > 0 else u'info' if added >= FREELEECH_EMAIL_THRESHOLD and socket.gethostname() == FREELEECH_HOSTNAME: send_freeleech_email(u'Added {0} freeleech torrents'.format(added)) time_taken = time.time() - start_time LogEntry.add(request.user, log_type, u'Successfully updated freeleech in {0:.3f}s. {1} added. {2} / {3} torrents total.'.format( time_taken, added, filesizeformat(total_bytes), total_torrents)) except Exception as ex: tb = traceback.format_exc() LogEntry.add(request.user, u'error', u'Error updating freeleech: {0}({1})'.format(type(ex).__name__, unicode(ex)), tb) return { 'success': True, 'added': added }
def move_to_dest_add(request, book_upload): location = DownloadLocation.get_what_preferred() dest_path = os.path.join(location.path, str(book_upload.what_torrent_id)) book_path = os.path.join(dest_path, book_upload.target_filename) if not os.path.exists(dest_path): os.mkdir(dest_path) os.chmod(dest_path, 0777) shutil.copyfile(book_upload.book_data.storage.path(book_upload.book_data), book_path) os.chmod(book_path, 0777) manage_torrent.add_torrent( request, ReplicaSet.get_what_master().get_preferred_instance(), location, book_upload.what_torrent_id)
def handle(self, *args, **options): print 'Initiating what client...' what = get_what_client(dummy_request, True) index_response = what.request('index') print 'Status:', index_response['status'] print 'Scanning replica sets...' try: ReplicaSet.objects.get(zone='what.cd') raise Exception('Please delete your what.cd replica set now') except ReplicaSet.DoesNotExist: pass try: pth_replica_set = ReplicaSet.get_what_master() if pth_replica_set.transinstance_set.count() < 1: raise ReplicaSet.DoesNotExist() except ReplicaSet.DoesNotExist: raise Exception('Please get your PTH replica set ready') print 'Scanning locations...' location_mapping = {} with open('what_manager2_torrents.jsonl', 'rb') as torrents_input: for line in torrents_input: data = ujson.loads(line) location_path = data['location']['path'] if location_path not in location_mapping: try: new_location = DownloadLocationEquivalent.objects.get( old_location=location_path).new_location except DownloadLocationEquivalent.DoesNotExist: new_location = raw_input( 'Enter the new location to map to {}: '.format(location_path)) DownloadLocationEquivalent.objects.create( old_location=location_path, new_location=new_location, ) location_mapping[location_path] = new_location print 'Location mappings:' for old_location, new_location in location_mapping.items(): try: DownloadLocation.objects.get(zone='redacted.ch', path=new_location) except DownloadLocation.DoesNotExist: raise Exception( 'Please create the {} location in the DB in zone redacted.ch'.format( new_location)) print old_location, '=', new_location with open('what_manager2_torrents.jsonl', 'rb') as torrents_input: for line in torrents_input: data = ujson.loads(line) migration_job = TorrentMigrationJob(what, location_mapping, data, flac_only=options['flac_only']) migration_job.process()
def add_bibliotik_torrent(torrent_id, instance=None, location=None, bibliotik_client=None, add_to_client=True): bibliotik_torrent = BibliotikTorrent.get_or_create(bibliotik_client, torrent_id) if not instance: instance = ReplicaSet.get_bibliotik_master().get_preferred_instance() if not location: location = DownloadLocation.get_bibliotik_preferred() with LockModelTables(BibliotikTransTorrent, TransInstance): try: existing_one = BibliotikTransTorrent.objects.get(info_hash=bibliotik_torrent.info_hash) raise TorrentAlreadyAddedException(u'Already added (instance={0}, new_instance={1}, info_hash={2}).'.format( instance, existing_one.instance, bibliotik_torrent.info_hash)) except BibliotikTransTorrent.DoesNotExist: pass download_dir = os.path.join(location.path, unicode(bibliotik_torrent.id)) def create_b_torrent(): new_b_torrent = BibliotikTransTorrent( instance=instance, location=location, bibliotik_torrent=bibliotik_torrent, info_hash=bibliotik_torrent.info_hash, ) new_b_torrent.save() return new_b_torrent if add_to_client: with transaction.atomic(): b_torrent = create_b_torrent() t_torrent = instance.client.add_torrent( base64.b64encode(bibliotik_torrent.torrent_file), download_dir=download_dir, paused=False ) t_torrent = instance.client.get_torrent( t_torrent.id, arguments=BibliotikTransTorrent.sync_t_arguments) if not os.path.exists(download_dir): os.mkdir(download_dir) if not os.stat(download_dir).st_mode & 0777 == 0777: os.chmod(download_dir, 0777) norm_t_torrent(t_torrent) b_torrent.sync_t_torrent(t_torrent) else:
def add_torrent(request, instance, download_location, what_id, add_to_client=True, moving=False): w_torrent = WhatTorrent.get_or_create(request, what_id=what_id) masters = list(ReplicaSet.get_what_master().transinstance_set.all()) with LockModelTables(TransTorrent, LogEntry): if add_to_client and not moving: try: existing_one = TransTorrent.objects.get( instance__in=masters, info_hash=w_torrent.info_hash) raise TorrentAlreadyAddedException( u'Already added (instance={0}, new_instance={1}, info_hash={2}).' .format(instance, existing_one.instance, w_torrent.info_hash)) except TransTorrent.DoesNotExist: pass if add_to_client: manager = transaction.atomic else: manager = dummy_context_manager with manager(): if True: m_torrent = TransTorrent( instance=instance, location=download_location, what_torrent=w_torrent, info_hash=w_torrent.info_hash, ) m_torrent.save() if add_to_client: download_dir = os.path.join(download_location.path, unicode(w_torrent.id)) t_torrent = instance.client.add_torrent( w_torrent.torrent_file, download_dir=download_dir, paused=False) t_torrent = instance.client.get_torrent( t_torrent.id, arguments=TransTorrent.sync_t_arguments) norm_t_torrent(t_torrent) m_torrent.sync_t_torrent(t_torrent) m_torrent.sync_files() return m_torrent
def do_pop(request): download_location = DownloadLocation.get_what_preferred() if download_location.free_space_percent < MIN_FREE_DISK_SPACE: LogEntry.add(request.user, u'error', u'Failed to add torrent. Not enough disk space.') return { 'success': False, 'error': u'Not enough free space on disk.' } front = QueueItem.get_front() if not front: return { 'success': False, 'message': 'Queue is empty.' } instance = ReplicaSet.get_what_master().get_preferred_instance() if WhatTorrent.is_downloaded(request, what_id=front.what_id): front.delete() return { 'success': True, 'message': 'Already added.' } try: m_torrent = manage_torrent.add_torrent(request, instance, download_location, front.what_id) m_torrent.what_torrent.added_by = request.user m_torrent.what_torrent.tags = 'seed project' m_torrent.what_torrent.save() front.delete() LogEntry.add(request.user, u'action', u'Popped {0} from queue.'.format(m_torrent)) except Exception as ex: tb = traceback.format_exc() LogEntry.add(request.user, u'error', u'Tried popping what_id={0} from queue. Error: {1}'.format(front.what_id, unicode(ex)), tb) return { 'success': False, 'error': unicode(ex), 'traceback': tb } return { 'success': True }
def add_mam_torrent(torrent_id, instance=None, location=None, mam_client=None, add_to_client=True): mam_torrent = MAMTorrent.get_or_create(mam_client, torrent_id) if not instance: instance = ReplicaSet.get_myanonamouse_master().get_preferred_instance() if not location: location = DownloadLocation.get_myanonamouse_preferred() with LockModelTables(MAMTransTorrent): try: MAMTransTorrent.objects.get(info_hash=mam_torrent.info_hash) raise TorrentAlreadyAddedException(u'Already added.') except MAMTransTorrent.DoesNotExist: pass download_dir = os.path.join(location.path, unicode(mam_torrent.id)) def create_b_torrent(): new_b_torrent = MAMTransTorrent( instance=instance, location=location, mam_torrent=mam_torrent, info_hash=mam_torrent.info_hash, ) new_b_torrent.save() return new_b_torrent if add_to_client: with transaction.atomic(): b_torrent = create_b_torrent() t_torrent = instance.client.add_torrent( base64.b64encode(mam_torrent.torrent_file), download_dir=download_dir, paused=False ) t_torrent = instance.client.get_torrent( t_torrent.id, arguments=MAMTransTorrent.sync_t_arguments) if not os.path.exists(download_dir): os.mkdir(download_dir) if not os.stat(download_dir).st_mode & 0777 == 0777: os.chmod(download_dir, 0777) norm_t_torrent(t_torrent) b_torrent.sync_t_torrent(t_torrent) else:
def add_all(request): what_ids = set() for instance in ReplicaSet.get_what_master().transinstance_set.all(): for m_torrent in instance.transtorrent_set.all(): if m_torrent.what_torrent_id in what_ids: raise Exception('Duplcate what_id in transmission') what_ids.add(m_torrent.what_torrent_id) dest_dir = request.GET['path'] if dest_dir not in [p.path for p in DownloadLocation.objects.filter(zone=ReplicaSet.ZONE_WHAT)]: raise Exception('Path not allowed') dest_dir_ids = {int(i) for i in os.listdir(dest_dir)} return { 'torrents in dir': len(dest_dir_ids), 'torrents in transmissions': len(what_ids), 'torrents not added': list(dest_dir_ids - what_ids), 'torrents missing': list(what_ids - dest_dir_ids) }
def sync(request): start_time = time.time() part_start_time = time.time() try: trans_sync.sync_profile(request) profile_time = time.time() - part_start_time part_start_time = time.time() except Exception as ex: profile_time = 0 tb = traceback.format_exc() LogEntry.add(request.user, u'error', u'Error syncing profile: {0}'.format(ex), tb) try: master = ReplicaSet.get_what_master() trans_sync.sync_all_instances_db(request, master) master_db_time = time.time() - part_start_time part_start_time = time.time() except Exception as ex: tb = traceback.format_exc() LogEntry.add(request.user, u'error', u'Error syncing master DB: {0}({1})'.format( type(ex).__name__, ex), tb) return { 'success': False, 'error': unicode(ex), 'traceback': tb } # try: # if trans_sync.sync_fulltext(): # LogEntry.add(request.user, u'error', u'Fulltext table was out of sync. Synced.') # except Exception as ex: # tb = traceback.format_exc() # LogEntry.add(request.user, u'error', u'Error syncing fulltext table: {0}'.format(ex), tb) # return { # 'success': False, # 'error': unicode(ex), # 'traceback': tb # } time_taken = time.time() - start_time LogEntry.add(request.user, u'info', u'Completed what.cd sync in {0:.3f}s. Profile in {1:.3f}s. Master DB in {2:.3f}s.' .format(time_taken, profile_time, master_db_time)) return { 'success': True }
def get_torrent_groups_have(torrent_group_ids, sync_torrents=False): torrent_group_ids = list(torrent_group_ids) masters = ReplicaSet.get_what_master().transinstance_set.all() what_torrents = { w_t.id: w_t for w_t in WhatTorrent.objects.filter(torrent_group_id__in=torrent_group_ids) } trans_torrents = TransTorrent.objects.filter( what_torrent__in=what_torrents, instance__in=masters).prefetch_related('location') trans_torrents_by_group = defaultdict(list) for t_t in trans_torrents: w_t = what_torrents[t_t.what_torrent_id] trans_torrents_by_group[w_t.torrent_group_id].append((w_t, t_t)) return { torrent_group_id: get_torrent_group_have( trans_torrents_by_group[torrent_group_id], sync_torrents) for torrent_group_id in torrent_group_ids }
def sync(request): start_time = time.time() try: master = ReplicaSet.get_bibliotik_master() trans_sync.sync_all_instances_db(master) except Exception as ex: tb = traceback.format_exc() LogEntry.add( request.user, u'error', u'Error syncing bibliotik master DB: {0}({1})'.format( type(ex).__name__, ex), tb) return {'success': False, 'error': unicode(ex), 'traceback': tb} time_taken = time.time() - start_time LogEntry.add(request.user, u'info', u'Completed bibliotik sync in {0:.3f}s.'.format(time_taken)) return {'success': True}
def refresh_whattorrent(what_client, what_torrent=None): if what_torrent is None: what_torrent = WhatTorrent.objects.defer('torrent_file').order_by( 'retrieved')[0] try: response = what_client.request('torrent', id=what_torrent.id)['response'] except RequestException as ex: if ex.response and type(ex.response) is dict and ex.response.get( 'error') == 'bad id parameter': try: TransTorrent.objects.get( instance__in=ReplicaSet.get_what_master( ).transinstance_set.all(), what_torrent=what_torrent) return { 'success': False, 'id': what_torrent.id, 'status': 'missing', } except TransTorrent.DoesNotExist: what_torrent.delete() return { 'success': True, 'id': what_torrent.id, 'status': 'deleted', } else: return { 'success': False, 'status': 'unknown request exception', } old_retrieved = what_torrent.retrieved what_torrent.info = json.dumps(response) what_torrent.retrieved = timezone.now() what_torrent.save() return { 'success': True, 'id': what_torrent.id, 'status': 'refreshed', 'retrieved': unicode(old_retrieved), }
def sync(request): start_time = time.time() part_start_time = time.time() try: trans_sync.sync_profile(request) profile_time = time.time() - part_start_time part_start_time = time.time() except Exception as ex: tb = traceback.format_exc() LogEntry.add(request.user, u'error', u'Error syncing profile: {0}'.format(ex), tb) try: master = ReplicaSet.get_what_master() trans_sync.sync_all_instances_db(request, master) master_db_time = time.time() - part_start_time part_start_time = time.time() except Exception as ex: tb = traceback.format_exc() LogEntry.add(request.user, u'error', u'Error syncing master DB: {0}({1})'.format(type(ex).__name__, ex), tb) return { 'success': False, 'error': unicode(ex), 'traceback': tb } # try: # if trans_sync.sync_fulltext(): # LogEntry.add(request.user, u'error', u'Fulltext table was out of sync. Synced.') # except Exception as ex: # tb = traceback.format_exc() # LogEntry.add(request.user, u'error', u'Error syncing fulltext table: {0}'.format(ex), tb) # return { # 'success': False, # 'error': unicode(ex), # 'traceback': tb # } time_taken = time.time() - start_time LogEntry.add(request.user, u'info', u'Completed what.cd sync in {0:.3f}s. Profile in {1:.3f}s. Master DB in {2:.3f}s.' .format(time_taken, profile_time, master_db_time)) return { 'success': True }
def refresh_whattorrent(request): if 'id' in request.GET: most_recent = WhatTorrent.objects.get(id=request.GET['id']) else: most_recent = WhatTorrent.objects.defer('torrent_file').order_by('retrieved')[0] most_recent_id = most_recent.id what = get_what_client(request) try: response = what.request('torrent', id=most_recent.id)['response'] except RequestException as ex: if ex.response and type(ex.response) is dict and ex.response.get('error') == 'bad id parameter': try: TransTorrent.objects.get(instance__in=ReplicaSet.get_what_master().transinstance_set.all(), what_torrent=most_recent) return { 'success': False, 'id': most_recent_id, 'status': 'missing', } except TransTorrent.DoesNotExist: most_recent.delete() return { 'success': True, 'id': most_recent_id, 'status': 'deleted', } else: return { 'success': False, 'status': 'unknown request exception', } old_retrieved = most_recent.retrieved most_recent.info = json.dumps(response) most_recent.retrieved = timezone.now() most_recent.save() return { 'success': True, 'id': most_recent_id, 'status': 'refreshed', 'retrieved': unicode(old_retrieved), }
def add_all(request): what_ids = set() for instance in ReplicaSet.get_what_master().transinstance_set.all(): for m_torrent in instance.transtorrent_set.all(): if m_torrent.what_torrent_id in what_ids: raise Exception('Duplcate what_id in transmission') what_ids.add(m_torrent.what_torrent_id) dest_dir = request.GET['path'] if dest_dir not in [ p.path for p in DownloadLocation.objects.filter(zone=ReplicaSet.ZONE_WHAT) ]: raise Exception('Path not allowed') dest_dir_ids = {int(i) for i in os.listdir(dest_dir)} return { 'torrents in dir': len(dest_dir_ids), 'torrents in transmissions': len(what_ids), 'torrents not added': list(dest_dir_ids - what_ids), 'torrents missing': list(what_ids - dest_dir_ids) }
def handle(self, *args, **options): masters = ReplicaSet.get_what_master().transinstance_set.all() what_torrent_ids = WhatTorrent.objects.all().values_list("id", flat=True) start = 0 page_size = 128 while start < len(what_torrent_ids): print "Updating objects {0}-{1}/{2}".format(start, start + page_size, len(what_torrent_ids)) bulk = WhatTorrent.objects.defer("torrent_file").in_bulk(what_torrent_ids[start : start + page_size]) start += page_size trans_torrents = { t.what_torrent_id: t for t in TransTorrent.objects.filter(instance__in=masters, what_torrent__in=bulk.values()) } for what_torrent in bulk.itervalues(): trans_torrent = trans_torrents.get(what_torrent.id) if trans_torrent is not None and trans_torrent.torrent_done == 1: try: WhatFileMetadataCache.get_metadata_batch(what_torrent, trans_torrent, True) except Exception as ex: print "Failed updating torrent {0}: {1}".format(what_torrent.id, ex)
def add_torrent(request, instance, download_location, what_id, add_to_client=True, moving=False): w_torrent = WhatTorrent.get_or_create(request, what_id=what_id) masters = list(ReplicaSet.get_what_master().transinstance_set.all()) with LockModelTables(TransTorrent, LogEntry): if add_to_client and not moving: try: existing_one = TransTorrent.objects.get(instance__in=masters, info_hash=w_torrent.info_hash) raise TorrentAlreadyAddedException(u'Already added (instance={0}, new_instance={1}, info_hash={2}).'.format( instance, existing_one.instance, w_torrent.info_hash)) except TransTorrent.DoesNotExist: pass if add_to_client: manager = transaction.atomic else: manager = dummy_context_manager with manager(): if True: m_torrent = TransTorrent( instance=instance, location=download_location, what_torrent=w_torrent, info_hash=w_torrent.info_hash, ) m_torrent.save() if add_to_client: download_dir = os.path.join(download_location.path, unicode(w_torrent.id)) t_torrent = instance.client.add_torrent( w_torrent.torrent_file, download_dir=download_dir, paused=False ) t_torrent = instance.client.get_torrent(t_torrent.id, arguments=TransTorrent.sync_t_arguments) norm_t_torrent(t_torrent) m_torrent.sync_t_torrent(t_torrent) m_torrent.sync_files() return m_torrent
def download_zip(request, what_id): t_torrent = None for instance in ReplicaSet.get_what_master().transinstance_set.all(): try: t_torrent = TransTorrent.objects.get(instance=instance, what_torrent_id=what_id) except TransTorrent.DoesNotExist: pass if not t_torrent: return HttpResponse('Could not find that torrent.') torrent_file = [f for f in os.listdir(t_torrent.path) if '.torrent' in f] if len(torrent_file) == 1: torrent_file = os.path.splitext(torrent_file[0])[0] else: return HttpResponse('Not one .torrent in dir: ' + t_torrent.path) target_dir = os.path.join(t_torrent.path, t_torrent.torrent_name).encode('utf-8') torrent_files = [] if not os.path.isdir(target_dir): torrent_files.append((t_torrent.torrent_name, target_dir)) else: for root, rel_path, files in os.walk(target_dir): for file in files: rel_path = root.replace(target_dir, '') if rel_path.startswith('/') or rel_path.startswith('\\'): rel_path = rel_path[1:] rel_path = os.path.join(rel_path.encode('utf-8'), file) torrent_files.append((rel_path, os.path.join(root, file))) download_filename = u'[{0}] {1}.zip'.format(what_id, torrent_file).encode('utf-8') response = download_zip_handler(download_filename, torrent_files) LogEntry.add( request.user, u'action', u'Downloaded {0} - {1}'.format( t_torrent, filesizeformat(response['Content-Length']))) return response
def sync(request): start_time = time.time() try: master = ReplicaSet.get_bibliotik_master() trans_sync.sync_all_instances_db(master) except Exception as ex: tb = traceback.format_exc() LogEntry.add(request.user, u'error', u'Error syncing bibliotik master DB: {0}({1})' .format(type(ex).__name__, ex), tb) return { 'success': False, 'error': unicode(ex), 'traceback': tb } time_taken = time.time() - start_time LogEntry.add(request.user, u'info', u'Completed bibliotik sync in {0:.3f}s.' .format(time_taken)) return { 'success': True }
def sync_replicas(request): start_time = time.time() part_start_time = time.time() master = ReplicaSet.get_what_master() try: for replica_set in ReplicaSet.objects.all(): if replica_set.id != master.id: trans_sync.sync_all_instances_db(request, replica_set) replicas_dbs_time = time.time() - part_start_time except Exception as ex: tb = traceback.format_exc() LogEntry.add(request.user, u'error', u'Error syncing replicas DB: {0}'.format(ex), tb) return { 'success': False, 'error': unicode(ex), 'traceback': tb } try: trans_sync.sync_all_replicas_to_master() except Exception as ex: tb = traceback.format_exc() LogEntry.add(request.user, u'error', u'Error running replica sync: {0}'.format(ex), tb) return { 'success': False, 'error': unicode(ex), 'traceback': tb } time_taken = time.time() - start_time LogEntry.add(request.user, u'info', u'Completed replica sync in {0:.3f}s. DB in {1:.3f}s.'.format(time_taken, replicas_dbs_time)) return { 'success': True }
def refresh_oldest_torrent(request): bibliotik_id = request.GET['bibliotik_id'] bibliotik_client = BibliotikClient(bibliotik_id) most_recent = BibliotikTorrent.objects.defer('torrent_file').order_by( 'retrieved')[0] most_recent_id = most_recent.id try: most_recent.import_bibliotik_data(bibliotik_client) except Exception: try: BibliotikTransTorrent.objects.get( instance__in=ReplicaSet.get_bibliotik_master( ).transinstance_set.all(), bibliotik_torrent=most_recent) return { 'success': False, 'id': most_recent_id, 'status': 'request error', } except BibliotikTransTorrent.DoesNotExist: most_recent.delete() return { 'success': True, 'id': most_recent_id, 'status': 'deleted', } old_retrieved = most_recent.retrieved most_recent.retrieved = timezone.now() most_recent.save() return { 'success': True, 'id': most_recent_id, 'status': 'refreshed', 'retrieved': unicode(old_retrieved), }
def update_freeleech(request): start_time = time.time() added = 0 total_bytes = 0 total_torrents = 0 try: master = ReplicaSet.get_what_master() what_client = get_what_client(request) for what_id, what_group, what_torrent in what_client.get_free_torrent_ids(): total_bytes += what_torrent["size"] total_torrents += 1 if not WhatTorrent.is_downloaded(request, what_id=what_id): freeleech_add_torrent(request, master, what_id) added += 1 log_type = u"action" if added > 0 else u"info" if added >= FREELEECH_EMAIL_THRESHOLD and socket.gethostname() == FREELEECH_HOSTNAME: send_freeleech_email(u"Added {0} freeleech torrents".format(added)) time_taken = time.time() - start_time LogEntry.add( request.user, log_type, u"Successfully updated freeleech in {0:.3f}s. " u"{1} added. {2} / {3} torrents total.".format( time_taken, added, filesizeformat(total_bytes), total_torrents ), ) except Exception as ex: tb = traceback.format_exc() LogEntry.add( request.user, u"error", u"Error updating freeleech: {0}({1})".format(type(ex).__name__, unicode(ex)), tb ) return {"success": True, "added": added}