def _createType(self, meta_name, root, movie_info, group, file_type, i): # Get file path camelcase_method = underscoreToCamel(file_type.capitalize()) name = getattr(self, 'get' + camelcase_method + 'Name')(meta_name, root, i) if name and (self.conf('meta_' + file_type) or self.conf('meta_' + file_type) is None): # Get file content content = getattr(self, 'get' + camelcase_method)(movie_info = movie_info, data = group, i = i) if content: log.debug('Creating %s file: %s', (file_type, name)) if os.path.isfile(content): content = sp(content) name = sp(name) if not os.path.exists(os.path.dirname(name)): os.makedirs(os.path.dirname(name)) shutil.copy2(content, name) shutil.copyfile(content, name) # Try and copy stats seperately try: shutil.copystat(content, name) except: pass else: self.createFile(name, content) group['renamed_files'].append(name) try: os.chmod(sp(name), Env.getPermission('file')) except: log.debug('Failed setting permissions for %s: %s', (name, traceback.format_exc()))
def searchSingle(self, group): if self.isDisabled(): return try: available_languages = sum(group['subtitle_language'].values(), []) downloaded = [] files = [toUnicode(x) for x in group['files']['movie']] log.debug('Searching for subtitles for: %s', files) for lang in self.getLanguages(): if lang not in available_languages: download = subliminal.download_subtitles(files, multi = True, force = self.conf('force'), languages = [lang], services = self.services, cache_dir = Env.get('cache_dir')) for subtitle in download: downloaded.extend(download[subtitle]) for d_sub in downloaded: log.info('Found subtitle (%s): %s', (d_sub.language.alpha2, files)) group['files']['subtitle'].append(sp(d_sub.path)) group['before_rename'].append(sp(d_sub.path)) group['subtitle_language'][sp(d_sub.path)] = [d_sub.language.alpha2] return True except: log.error('Failed searching for subtitle: %s', (traceback.format_exc())) return False
def getAllDownloadStatus(self, ids): log.debug('Checking rTorrent download status.') if not self.connect(): return [] try: torrents = self.rt.get_torrents() release_downloads = ReleaseDownloadList(self) for torrent in torrents: if torrent.info_hash in ids: torrent_directory = os.path.normpath(torrent.directory) torrent_files = [] for file in torrent.get_files(): if not os.path.normpath( file.path).startswith(torrent_directory): file_path = os.path.join(torrent_directory, file.path.lstrip('/')) else: file_path = file.path torrent_files.append(sp(file_path)) status = 'busy' if torrent.complete: if torrent.active: status = 'seeding' else: status = 'completed' release_downloads.append({ 'id': torrent.info_hash, 'name': torrent.name, 'status': status, 'seed_ratio': torrent.ratio, 'original_status': torrent.state, 'timeleft': str( timedelta(seconds=float(torrent.left_bytes) / torrent.down_rate)) if torrent.down_rate > 0 else -1, 'folder': sp(torrent.directory), 'files': '|'.join(torrent_files) }) return release_downloads except Exception as err: log.error('Failed to get status from rTorrent: %s', err) return []
def isSubFolder(sub_folder, base_folder): if base_folder and sub_folder: base = sp(os.path.realpath(base_folder)) + os.path.sep subfolder = sp(os.path.realpath(sub_folder)) + os.path.sep return os.path.commonprefix([subfolder, base]) == base return False
def deleteEmptyFolder(self, folder, show_error=True, only_clean=None): folder = sp(folder) for item in os.listdir(folder): full_folder = sp(os.path.join(folder, item)) if not only_clean or (item in only_clean and os.path.isdir(full_folder)): for subfolder, dirs, files in os.walk(full_folder, topdown=False): try: os.rmdir(subfolder) except: if show_error: log.info2('Couldn\'t remove directory %s: %s', (subfolder, traceback.format_exc())) try: os.rmdir(folder) except: if show_error: log.error('Couldn\'t remove empty directory %s: %s', (folder, traceback.format_exc()))
def getAllDownloadStatus(self, ids): log.debug('Checking qBittorrent download status.') if not self.connect(): return [] try: torrents = self.qb.get_torrents() release_downloads = ReleaseDownloadList(self) for torrent in torrents: if torrent.hash in ids: torrent.update_general() # get extra info torrent_filelist = torrent.get_files() torrent_files = [] torrent_dir = os.path.join(torrent.save_path, torrent.name) if os.path.isdir(torrent_dir): torrent.save_path = torrent_dir if len(torrent_filelist) > 1 and os.path.isdir( torrent_dir ): # multi file torrent, path.isdir check makes sure we're not in the root download folder for root, _, files in os.walk(torrent.save_path): for f in files: torrent_files.append(sp(os.path.join(root, f))) else: # multi or single file placed directly in torrent.save_path for f in torrent_filelist: file_path = os.path.join(torrent.save_path, f.name) if os.path.isfile(file_path): torrent_files.append(sp(file_path)) release_downloads.append({ 'id': torrent.hash, 'name': torrent.name, 'status': self.getTorrentStatus(torrent), 'seed_ratio': torrent.ratio, 'original_status': torrent.state, 'timeleft': torrent.progress * 100 if torrent.progress else -1, # percentage 'folder': sp(torrent.save_path), 'files': torrent_files }) return release_downloads except Exception as e: log.error('Failed to get status from qBittorrent: %s', e) return []
def compact(self, try_repair = True, **kwargs): success = False db = self.getDB() # Removing left over compact files db_path = sp(db.path) for f in os.listdir(sp(db.path)): for x in ['_compact_buck', '_compact_stor']: if f[-len(x):] == x: os.unlink(os.path.join(db_path, f)) try: start = time.time() size = float(db.get_db_details().get('size', 0)) log.debug('Compacting database, current size: %sMB', round(size/1048576, 2)) db.compact() new_size = float(db.get_db_details().get('size', 0)) log.debug('Done compacting database in %ss, new size: %sMB, saved: %sMB', (round(time.time()-start, 2), round(new_size/1048576, 2), round((size-new_size)/1048576, 2))) success = True except (IndexException, AttributeError): if try_repair: log.error('Something wrong with indexes, trying repair') # Remove all indexes old_indexes = self.indexes.keys() for index_name in old_indexes: try: db.destroy_index(index_name) except IndexNotFoundException: pass except: log.error('Failed removing old index %s', index_name) # Add them again for index_name in self.indexes: klass = self.indexes[index_name] # Category index index_instance = klass(db.path, index_name) try: db.add_index(index_instance) db.reindex_index(index_name) except IndexConflict: pass except: log.error('Failed adding index %s', index_name) raise self.compact(try_repair = False) else: log.error('Failed compact: %s', traceback.format_exc()) except: log.error('Failed compact: %s', traceback.format_exc()) return { 'success': success }
def test(self): """ Test and see if the torrent and movie directories is writable :return: boolean """ writable = False movie_directory = self.conf('movie_directory') if movie_directory and os.path.isdir(movie_directory): test_file = sp( os.path.join(movie_directory, 'couchpotato_test.txt')) # Check if folder is writable self.createFile(test_file, 'This is a test file') if os.path.isfile(test_file): os.remove(test_file) writable = True torrent_directory = self.conf('torrent_directory') if writable and torrent_directory and os.path.isdir(torrent_directory): test_file = sp( os.path.join(torrent_directory, 'couchpotato_test.txt')) # Check if folder is writable self.createFile(test_file, 'This is a test file') if os.path.isfile(test_file): os.remove(test_file) writable = True else: writable = False return writable
def getUserDir(): try: import pwd os.environ['HOME'] = sp(pwd.getpwuid(os.geteuid()).pw_dir) except: pass return sp(os.path.expanduser('~'))
def getAllDownloadStatus(self, ids): log.debug('Checking uTorrent download status.') if not self.connect(): return [] release_downloads = ReleaseDownloadList(self) data = self.utorrent_api.get_status() if not data: log.error('Error getting data from uTorrent') return [] queue = json.loads(data) if queue.get('error'): log.error('Error getting data from uTorrent: %s', queue.get('error')) return [] if not queue.get('torrents'): log.debug('Nothing in queue') return [] # Get torrents for torrent in queue['torrents']: if torrent[0] in ids: #Get files of the torrent torrent_files = [] try: torrent_files = json.loads(self.utorrent_api.get_files(torrent[0])) torrent_files = [sp(os.path.join(torrent[26], torrent_file[0])) for torrent_file in torrent_files['files'][1]] except: log.debug('Failed getting files from torrent: %s', torrent[2]) status = 'busy' if (torrent[1] & self.status_flags['STARTED'] or torrent[1] & self.status_flags['QUEUED']) and torrent[4] == 1000: status = 'seeding' elif (torrent[1] & self.status_flags['ERROR']): status = 'failed' elif torrent[4] == 1000: status = 'completed' if not status == 'busy': self.removeReadOnly(torrent_files) release_downloads.append({ 'id': torrent[0], 'name': torrent[2], 'status': status, 'seed_ratio': float(torrent[7]) / 1000, 'original_status': torrent[1], 'timeleft': str(timedelta(seconds = torrent[10])), 'folder': sp(torrent[26]), 'files': '|'.join(torrent_files) }) return release_downloads
def getAllDownloadStatus(self, ids): log.debug('Checking uTorrent download status.') if not self.connect(): return [] release_downloads = ReleaseDownloadList(self) data = self.utorrent_api.get_status() if not data: log.error('Error getting data from uTorrent') return [] queue = json.loads(data) if queue.get('error'): log.error('Error getting data from uTorrent: %s', queue.get('error')) return [] if not queue.get('torrents'): log.debug('Nothing in queue') return [] # Get torrents for torrent in queue['torrents']: if torrent[0] in ids: #Get files of the torrent torrent_files = [] try: torrent_files = json.loads(self.utorrent_api.get_files(torrent[0])) torrent_files = [sp(os.path.join(torrent[26], torrent_file[0])) for torrent_file in torrent_files['files'][1]] except: log.debug('Failed getting files from torrent: %s', torrent[2]) status = 'busy' if (torrent[1] & self.status_flags['STARTED'] or torrent[1] & self.status_flags['QUEUED']) and torrent[4] == 1000: status = 'seeding' elif (torrent[1] & self.status_flags['ERROR']): status = 'failed' elif torrent[4] == 1000: status = 'completed' if not status == 'busy': self.removeReadOnly(torrent_files) release_downloads.append({ 'id': torrent[0], 'name': torrent[2], 'status': status, 'seed_ratio': float(torrent[7]) / 1000, 'original_status': torrent[1], 'timeleft': str(timedelta(seconds = torrent[10])), 'folder': sp(torrent[26]), 'files': torrent_files }) return release_downloads
def getAllDownloadStatus(self, ids): """ Get status of all active downloads :param ids: list of (mixed) downloader ids Used to match the releases for this downloader as there could be other downloaders active that it should ignore :return: list of releases """ log.debug('Checking Hadouken download status.') if not self.connect(): return [] release_downloads = ReleaseDownloadList(self) queue = self.hadouken_api.get_by_hash_list(ids) if not queue: return [] for torrent in queue: if torrent is None: continue torrent_filelist = self.hadouken_api.get_files_by_hash(torrent['InfoHash']) torrent_files = [] save_path = torrent['SavePath'] # The 'Path' key for each file_item contains # the full path to the single file relative to the # torrents save path. # For a single file torrent the result would be, # - Save path: "C:\Downloads" # - file_item['Path'] = "file1.iso" # Resulting path: "C:\Downloads\file1.iso" # For a multi file torrent the result would be, # - Save path: "C:\Downloads" # - file_item['Path'] = "dirname/file1.iso" # Resulting path: "C:\Downloads\dirname/file1.iso" for file_item in torrent_filelist: torrent_files.append(sp(os.path.join(save_path, file_item['Path']))) release_downloads.append({ 'id': torrent['InfoHash'].upper(), 'name': torrent['Name'], 'status': self.get_torrent_status(torrent), 'seed_ratio': self.get_seed_ratio(torrent), 'original_status': torrent['State'], 'timeleft': -1, 'folder': sp(save_path if len(torrent_files == 1) else os.path.join(save_path, torrent['Name'])), 'files': torrent_files }) return release_downloads
def getAllDownloadStatus(self, ids): """ Get status of all active downloads :param ids: list of (mixed) downloader ids Used to match the releases for this downloader as there could be other downloaders active that it should ignore :return: list of releases """ log.debug('Checking qBittorrent download status.') if not self.connect(): return [] try: torrents = self.qb.get_torrents() release_downloads = ReleaseDownloadList(self) for torrent in torrents: if torrent.hash in ids: torrent.update_general() # get extra info torrent_filelist = torrent.get_files() torrent_files = [] torrent_dir = os.path.join(torrent.save_path, torrent.name) if os.path.isdir(torrent_dir): torrent.save_path = torrent_dir if len(torrent_filelist) > 1 and os.path.isdir(torrent_dir): # multi file torrent, path.isdir check makes sure we're not in the root download folder for root, _, files in os.walk(torrent.save_path): for f in files: torrent_files.append(sp(os.path.join(root, f))) else: # multi or single file placed directly in torrent.save_path for f in torrent_filelist: file_path = os.path.join(torrent.save_path, f.name) if os.path.isfile(file_path): torrent_files.append(sp(file_path)) release_downloads.append({ 'id': torrent.hash, 'name': torrent.name, 'status': self.getTorrentStatus(torrent), 'seed_ratio': torrent.ratio, 'original_status': torrent.state, 'timeleft': torrent.progress * 100 if torrent.progress else -1, # percentage 'folder': sp(torrent.save_path), 'files': torrent_files }) return release_downloads except Exception as e: log.error('Failed to get status from qBittorrent: %s', e) return []
def getUserDir(): try: import pwd if not os.environ['HOME']: os.environ['HOME'] = sp(pwd.getpwuid(os.geteuid()).pw_dir) except: pass return sp(os.path.expanduser('~'))
def getAllDownloadStatus(self, ids): log.debug('Checking Deluge download status.') if not self.connect(): return [] release_downloads = ReleaseDownloadList(self) queue = self.drpc.get_alltorrents(ids) if not queue: log.debug('Nothing in queue or error') return [] for torrent_id in queue: torrent = queue[torrent_id] if not 'hash' in torrent: # When given a list of ids, deluge will return an empty item for a non-existant torrent. continue log.debug('name=%s / id=%s / save_path=%s / move_on_completed=%s / move_completed_path=%s / hash=%s / progress=%s / state=%s / eta=%s / ratio=%s / stop_ratio=%s / is_seed=%s / is_finished=%s / paused=%s', (torrent['name'], torrent['hash'], torrent['save_path'], torrent['move_on_completed'], torrent['move_completed_path'], torrent['hash'], torrent['progress'], torrent['state'], torrent['eta'], torrent['ratio'], torrent['stop_ratio'], torrent['is_seed'], torrent['is_finished'], torrent['paused'])) # Deluge has no easy way to work out if a torrent is stalled or failing. #status = 'failed' status = 'busy' if torrent['is_seed'] and tryFloat(torrent['ratio']) < tryFloat(torrent['stop_ratio']): # We have torrent['seeding_time'] to work out what the seeding time is, but we do not # have access to the downloader seed_time, as with deluge we have no way to pass it # when the torrent is added. So Deluge will only look at the ratio. # See above comment in download(). status = 'seeding' elif torrent['is_seed'] and torrent['is_finished'] and torrent['paused'] and torrent['state'] == 'Paused': status = 'completed' download_dir = sp(torrent['save_path']) if torrent['move_on_completed']: download_dir = torrent['move_completed_path'] torrent_files = [] for file_item in torrent['files']: torrent_files.append(sp(os.path.join(download_dir, file_item['path']))) release_downloads.append({ 'id': torrent['hash'], 'name': torrent['name'], 'status': status, 'original_status': torrent['state'], 'seed_ratio': torrent['ratio'], 'timeleft': str(timedelta(seconds = torrent['eta'])), 'folder': sp(download_dir if len(torrent_files) == 1 else os.path.join(download_dir, torrent['name'])), 'files': torrent_files, }) return release_downloads
def getAllDownloadStatus(self, ids): """ Get status of all active downloads :param ids: list of (mixed) downloader ids Used to match the releases for this downloader as there could be other downloaders active that it should ignore :return: list of releases """ log.debug('Checking qBittorrent download status.') if not self.connect(): return [] try: torrents = self.qb.torrents(status='all', label=self.conf('label')) release_downloads = ReleaseDownloadList(self) for torrent in torrents: if torrent['hash'] in ids: torrent_filelist = self.qb.get_torrent_files(torrent['hash']) torrent_files = [] torrent_dir = os.path.join(torrent['save_path'], torrent['name']) if os.path.isdir(torrent_dir): torrent['save_path'] = torrent_dir if len(torrent_filelist) > 1 and os.path.isdir(torrent_dir): # multi file torrent, path.isdir check makes sure we're not in the root download folder for root, _, files in os.walk(torrent['save_path']): for f in files: torrent_files.append(sp(os.path.join(root, f))) else: # multi or single file placed directly in torrent.save_path for f in torrent_filelist: file_path = os.path.join(torrent['save_path'], f['name']) if os.path.isfile(file_path): torrent_files.append(sp(file_path)) release_downloads.append({ 'id': torrent['hash'], 'name': torrent['name'], 'status': self.getTorrentStatus(torrent), 'seed_ratio': torrent['ratio'], 'original_status': torrent['state'], 'timeleft': str(timedelta(seconds = torrent['eta'])), 'folder': sp(torrent['save_path']), 'files': torrent_files }) return release_downloads except Exception as e: log.error('Failed to get status from qBittorrent: %s', e) return []
def isSubFolder(sub_folder, base_folder): # Returns True if sub_folder is the same as or inside base_folder return base_folder and sub_folder and ss(os.path.normpath(base_folder).rstrip(os.path.sep) + os.path.sep) in ss(os.path.normpath(sub_folder).rstrip(os.path.sep) + os.path.sep) # return base_folder and sub_folder and os.path.normpath(base_folder).rstrip(os.path.sep) + os.path.sep in os.path.normpath(sub_folder).rstrip(os.path.sep) + os.path.sep if base_folder and sub_folder: base = sp(os.path.realpath(base_folder)) + os.path.sep subfolder = sp(os.path.realpath(sub_folder)) + os.path.sep return os.path.commonprefix([subfolder, base]) == base return False
def getAllDownloadStatus(self, ids): log.debug('Checking Transmission download status.') if not self.connect(): return [] release_downloads = ReleaseDownloadList(self) return_params = { 'fields': ['id', 'name', 'hashString', 'percentDone', 'status', 'eta', 'isStalled', 'isFinished', 'downloadDir', 'uploadRatio', 'secondsSeeding', 'seedIdleLimit', 'files'] } session = self.trpc.get_session() queue = self.trpc.get_alltorrents(return_params) if not (queue and queue.get('torrents')): log.debug('Nothing in queue or error') return [] for torrent in queue['torrents']: if torrent['hashString'] in ids: log.debug('name=%s / id=%s / downloadDir=%s / hashString=%s / percentDone=%s / status=%s / isStalled=%s / eta=%s / uploadRatio=%s / isFinished=%s / incomplete-dir-enabled=%s / incomplete-dir=%s', (torrent['name'], torrent['id'], torrent['downloadDir'], torrent['hashString'], torrent['percentDone'], torrent['status'], torrent.get('isStalled', 'N/A'), torrent['eta'], torrent['uploadRatio'], torrent['isFinished'], session['incomplete-dir-enabled'], session['incomplete-dir'])) status = 'busy' if torrent.get('isStalled') and not torrent['percentDone'] == 1 and self.conf('stalled_as_failed'): status = 'failed' elif torrent['status'] == 0 and torrent['percentDone'] == 1: status = 'completed' elif torrent['status'] in [5, 6]: status = 'seeding' if session['incomplete-dir-enabled'] and status == 'busy': torrent_folder = session['incomplete-dir'] else: torrent_folder = torrent['downloadDir'] torrent_files = [] for file_item in torrent['files']: torrent_files.append(sp(os.path.join(torrent_folder, file_item['name']))) release_downloads.append({ 'id': torrent['hashString'], 'name': torrent['name'], 'status': status, 'original_status': torrent['status'], 'seed_ratio': torrent['uploadRatio'], 'timeleft': str(timedelta(seconds = torrent['eta'])), 'folder': sp(torrent_folder if len(torrent_files) == 1 else os.path.join(torrent_folder, torrent['name'])), 'files': torrent_files }) return release_downloads
def getAllDownloadStatus(self, ids): """ Get status of all active downloads :param ids: list of (mixed) downloader ids Used to match the releases for this downloader as there could be other downloaders active that it should ignore :return: list of releases """ log.debug('Checking Hadouken download status.') if not self.connect(): return [] release_downloads = ReleaseDownloadList(self) queue = self.hadouken_api.get_by_hash_list(ids) if not queue: return [] for torrent in queue: if torrent is None: continue torrent_filelist = self.hadouken_api.get_files_by_hash( torrent.info_hash) torrent_files = [] for file_item in torrent_filelist: torrent_files.append( sp(os.path.join(torrent.save_path, file_item))) release_downloads.append({ 'id': torrent.info_hash.upper(), 'name': torrent.name, 'status': torrent.get_status(), 'seed_ratio': torrent.get_seed_ratio(), 'original_status': torrent.state, 'timeleft': -1, 'folder': sp(torrent.save_path if len(torrent_files == 1) else os.path. join(torrent.save_path, torrent.name)), 'files': torrent_files }) return release_downloads
def getAllDownloadStatus(self, ids): log.debug("Checking qBittorrent download status.") if not self.connect(): return [] try: torrents = self.qb.get_torrents() release_downloads = ReleaseDownloadList(self) for torrent in torrents: if torrent.hash in ids: torrent.update_general() # get extra info torrent_filelist = torrent.get_files() torrent_files = [] torrent_dir = os.path.join(torrent.save_path, torrent.name) if os.path.isdir(torrent_dir): torrent.save_path = torrent_dir if len(torrent_filelist) > 1 and os.path.isdir( torrent_dir ): # multi file torrent, path.isdir check makes sure we're not in the root download folder for root, _, files in os.walk(torrent.save_path): for f in files: torrent_files.append(sp(os.path.join(root, f))) else: # multi or single file placed directly in torrent.save_path for f in torrent_filelist: file_path = os.path.join(torrent.save_path, f.name) if os.path.isfile(file_path): torrent_files.append(sp(file_path)) release_downloads.append( { "id": torrent.hash, "name": torrent.name, "status": self.getTorrentStatus(torrent), "seed_ratio": torrent.ratio, "original_status": torrent.state, "timeleft": torrent.progress * 100 if torrent.progress else -1, # percentage "folder": sp(torrent.save_path), "files": torrent_files, } ) return release_downloads except Exception as e: log.error("Failed to get status from qBittorrent: %s", e) return []
def getAllDownloadStatus(self): log.debug('Checking rTorrent download status.') if not self.connect(): return False try: torrents = self.rt.get_torrents() release_downloads = ReleaseDownloadList(self) for torrent in torrents: torrent_files = [] for file_item in torrent.get_files(): torrent_files.append( sp(os.path.join(torrent.directory, file_item.path))) status = 'busy' if torrent.complete: if torrent.active: status = 'seeding' else: status = 'completed' release_downloads.append({ 'id': torrent.info_hash, 'name': torrent.name, 'status': status, 'seed_ratio': torrent.ratio, 'original_status': torrent.state, 'timeleft': str( timedelta( seconds=float(torrent.left_bytes) / torrent.down_rate)) if torrent.down_rate > 0 else -1, 'folder': sp(torrent.directory), 'files': '|'.join(torrent_files) }) return release_downloads except Exception, err: log.error('Failed to get status from rTorrent: %s', err) return False
def create(self, message = None, group = None): if self.isDisabled(): return if not group: group = {} log.info('Creating %s metadata.', self.getName()) # Update library to get latest info try: updated_library = fireEvent('library.update.movie', group['library']['identifier'], extended = True, single = True) group['library'] = mergeDicts(group['library'], updated_library) except: log.error('Failed to update movie, before creating metadata: %s', traceback.format_exc()) root_name = self.getRootName(group) meta_name = os.path.basename(root_name) root = os.path.dirname(root_name) movie_info = group['library'].get('info') for file_type in ['nfo', 'thumbnail', 'fanart']: try: # Get file path name = getattr(self, 'get' + file_type.capitalize() + 'Name')(meta_name, root) if name and (self.conf('meta_' + file_type) or self.conf('meta_' + file_type) is None): # Get file content content = getattr(self, 'get' + file_type.capitalize())(movie_info = movie_info, data = group) if content: log.debug('Creating %s file: %s', (file_type, name)) if os.path.isfile(content): content = sp(content) name = sp(name) shutil.copy2(content, name) shutil.copyfile(content, name) # Try and copy stats seperately try: shutil.copystat(content, name) except: pass else: self.createFile(name, content) group['renamed_files'].append(name) try: os.chmod(sp(name), Env.getPermission('file')) except: log.debug('Failed setting permissions for %s: %s', (name, traceback.format_exc())) except: log.error('Unable to create %s file: %s', (file_type, traceback.format_exc()))
def getAllDownloadStatus(self, ids): """ Get status of all active downloads :param ids: list of (mixed) downloader ids Used to match the releases for this downloader as there could be other downloaders active that it should ignore :return: list of releases """ log.debug('Checking rTorrent download status.') if not self.connect(): return [] try: torrents = self.rt.get_torrents() release_downloads = ReleaseDownloadList(self) for torrent in torrents: if torrent.info_hash in ids: torrent_directory = os.path.normpath(torrent.directory) torrent_files = [] for file in torrent.get_files(): if not os.path.normpath(file.path).startswith(torrent_directory): file_path = os.path.join(torrent_directory, file.path.lstrip('/')) else: file_path = file.path torrent_files.append(sp(file_path)) release_downloads.append({ 'id': torrent.info_hash, 'name': torrent.name, 'status': self.getTorrentStatus(torrent), 'seed_ratio': torrent.ratio, 'original_status': torrent.state, 'timeleft': str(timedelta(seconds = float(torrent.left_bytes) / torrent.down_rate)) if torrent.down_rate > 0 else -1, 'folder': sp(torrent.directory), 'files': torrent_files }) return release_downloads except Exception as err: log.error('Failed to get status from rTorrent: %s', err) return []
def createFile(self, path, content, binary=False): path = sp(path) self.makeDir(os.path.dirname(path)) if os.path.exists(path): log.debug('%s already exists, overwriting file with new version', path) write_type = 'w+' if not binary else 'w+b' # Stream file using response object if isinstance(content, requests.models.Response): # Write file to temp with open('%s.tmp' % path, write_type) as f: for chunk in content.iter_content(chunk_size=1048576): if chunk: # filter out keep-alive new chunks f.write(chunk) f.flush() # Rename to destination os.rename('%s.tmp' % path, path) else: try: f = open(path, write_type) f.write(content) f.close() os.chmod(path, Env.getPermission('file')) except: log.error('Unable to write file "%s": %s', (path, traceback.format_exc())) if os.path.isfile(path): os.remove(path)
def getDirectories(self, path = '/', show_hidden = True): # Return driveletters or root if path is empty if path == '/' or not path or path == '\\': if os.name == 'nt': return self.getDriveLetters() path = '/' dirs = [] path = sp(path) for f in os.listdir(path): p = sp(os.path.join(path, f)) if os.path.isdir(p) and ((self.is_hidden(p) and bool(int(show_hidden))) or not self.is_hidden(p)): dirs.append(toUnicode('%s%s' % (p, os.path.sep))) return sorted(dirs)
def removePyc(folder, only_excess=True, show_logs=True): folder = sp(folder) for root, dirs, files in os.walk(folder): pyc_files = filter(lambda filename: filename.endswith('.pyc'), files) py_files = set(filter(lambda filename: filename.endswith('.py'), files)) excess_pyc_files = filter( lambda pyc_filename: pyc_filename[:-1] not in py_files, pyc_files) if only_excess else pyc_files for excess_pyc_file in excess_pyc_files: full_path = os.path.join(root, excess_pyc_file) if show_logs: log.debug('Removing old PYC file: %s', full_path) try: os.remove(full_path) except: log.error('Couldn\'t remove %s: %s', (full_path, traceback.format_exc())) for dir_name in dirs: full_path = os.path.join(root, dir_name) if len(os.listdir(full_path)) == 0: try: os.rmdir(full_path) except: log.error('Couldn\'t remove empty directory %s: %s', (full_path, traceback.format_exc()))
def deleteEmptyFolder(self, folder, show_error = True, only_clean = None): folder = sp(folder) for item in os.listdir(folder): full_folder = os.path.join(folder, item) if not only_clean or (item in only_clean and os.path.isdir(full_folder)): for root, dirs, files in os.walk(full_folder): for dir_name in dirs: full_path = os.path.join(root, dir_name) if len(os.listdir(full_path)) == 0: try: os.rmdir(full_path) except: if show_error: log.error('Couldn\'t remove empty directory %s: %s', (full_path, traceback.format_exc())) try: os.rmdir(folder) except: if show_error: log.error('Couldn\'t remove empty directory %s: %s', (folder, traceback.format_exc()))
def getAllDownloadStatus(self, ids): """ Get status of all active downloads :param ids: list of (mixed) downloader ids Used to match the releases for this downloader as there could be other downloaders active that it should ignore :return: list of releases """ raw_statuses = self.call('nzb') release_downloads = ReleaseDownloadList(self) for nzb in raw_statuses.get('nzbs', []): nzb_id = os.path.basename(nzb['nzbFileName']) if nzb_id in ids: # Check status status = 'busy' if nzb['state'] == 20: status = 'completed' elif nzb['state'] in [21, 22, 24]: status = 'failed' release_downloads.append({ 'temp_id': nzb['id'], 'id': nzb_id, 'name': nzb['uiTitle'], 'status': status, 'original_status': nzb['state'], 'timeleft': -1, 'folder': sp(nzb['destinationPath']), }) return release_downloads
def download(self, url='', dest=None, overwrite=False, urlopen_kwargs=None): if not urlopen_kwargs: urlopen_kwargs = {} # Return response object to stream download urlopen_kwargs['stream'] = True if not dest: # to Cache dest = os.path.join(Env.get('cache_dir'), ss('%s.%s' % (md5(url), get_extension(url)))) dest = sp(dest) if not overwrite and os.path.isfile(dest): return dest try: filedata = self.urlopen(url, **urlopen_kwargs) except: log.error('Failed downloading file %s: %s', (url, traceback.format_exc())) return False self.createFile(dest, filedata, binary=True) return dest
def processComplete(self, release_download, delete_files): log.debug('Requesting rTorrent to remove the torrent %s%s.', (release_download['name'], ' and cleanup the downloaded files' if delete_files else '')) if not self.connect(): return False torrent = self.rt.find_torrent(release_download['id']) if torrent is None: return False if delete_files: for file_item in torrent.get_files(): # will only delete files, not dir/sub-dir os.unlink(os.path.join(torrent.directory, file_item.path)) if torrent.is_multi_file() and torrent.directory.endswith(torrent.name): # Remove empty directories bottom up try: for path, _, _ in os.walk(sp(torrent.directory), topdown = False): os.rmdir(path) except OSError: log.info('Directory "%s" contains extra files, unable to remove', torrent.directory) torrent.erase() # just removes the torrent, doesn't delete data return True
def createFileName(self, data, filedata, movie): name = sp(os.path.join(self.createNzbName(data, movie))) if data.get( 'protocol' ) == 'nzb' and 'DOCTYPE nzb' not in filedata and '</nzb>' not in filedata: return '%s.%s' % (name, 'rar') return '%s.%s' % (name, data.get('protocol'))
def clean(self, release_id): try: db = get_db() rel = db.get('id', release_id) raw_files = rel.get('files') if len(raw_files) == 0: self.delete(rel['_id']) else: files = {} for file_type in raw_files: for release_file in raw_files.get(file_type, []): if os.path.isfile(sp(release_file)): if file_type not in files: files[file_type] = [] files[file_type].append(release_file) rel['files'] = files db.update(rel) return True except: log.error('Failed: %s', traceback.format_exc()) return False
def getAllDownloadStatus(self, ids): raw_statuses = self.call('nzb') release_downloads = ReleaseDownloadList(self) for nzb in raw_statuses.get('nzbs', []): nzb_id = os.path.basename(nzb['nzbFileName']) if nzb_id in ids: # Check status status = 'busy' if nzb['state'] == 20: status = 'completed' elif nzb['state'] in [21, 22, 24]: status = 'failed' release_downloads.append({ 'temp_id': nzb['id'], 'id': nzb_id, 'name': nzb['uiTitle'], 'status': status, 'original_status': nzb['state'], 'timeleft': -1, 'folder': sp(nzb['destinationPath']), }) return release_downloads
def get_all_download_status(self, ids): """ Get status of all active downloads :param ids: list of (mixed) downloader ids Used to match the releases for this downloader as there could be other downloaders active that it should ignore :return: list of releases """ raw_statuses = self.call('nzb') release_downloads = ReleaseDownloadList(self) for nzb in raw_statuses.get('nzbs', []): nzb_id = os.path.basename(nzb['nzbFileName']) if nzb_id in ids: # Check status status = 'busy' if nzb['state'] == 20: status = 'completed' elif nzb['state'] in [21, 22, 24]: status = 'failed' release_downloads.append({ 'temp_id': nzb['id'], 'id': nzb_id, 'name': nzb['uiTitle'], 'status': status, 'original_status': nzb['state'], 'timeleft': -1, 'folder': sp(nzb['destinationPath']), }) return release_downloads
def removePyc(folder, only_excess=True, show_logs=True): folder = sp(folder) for root, dirs, files in os.walk(folder): pyc_files = filter(lambda filename: filename.endswith(".pyc"), files) py_files = set(filter(lambda filename: filename.endswith(".py"), files)) excess_pyc_files = ( filter(lambda pyc_filename: pyc_filename[:-1] not in py_files, pyc_files) if only_excess else pyc_files ) for excess_pyc_file in excess_pyc_files: full_path = os.path.join(root, excess_pyc_file) if show_logs: log.debug("Removing old PYC file: %s", full_path) try: os.remove(full_path) except: log.error("Couldn't remove %s: %s", (full_path, traceback.format_exc())) for dir_name in dirs: full_path = os.path.join(root, dir_name) if len(os.listdir(full_path)) == 0: try: os.rmdir(full_path) except: log.error("Couldn't remove empty directory %s: %s", (full_path, traceback.format_exc()))
def createFile(self, path, content, binary = False): path = sp(path) self.makeDir(os.path.dirname(path)) if os.path.exists(path): log.debug('%s already exists, overwriting file with new version', path) write_type = 'w+' if not binary else 'w+b' # Stream file using response object if isinstance(content, requests.models.Response): # Write file to temp with open('%s.tmp' % path, write_type) as f: for chunk in content.iter_content(chunk_size = 1048576): if chunk: # filter out keep-alive new chunks f.write(chunk) f.flush() # Rename to destination os.rename('%s.tmp' % path, path) else: try: f = open(path, write_type) f.write(content) f.close() os.chmod(path, Env.getPermission('file')) except: log.error('Unable to write file "%s": %s', (path, traceback.format_exc())) if os.path.isfile(path): os.remove(path)
def getAllDownloadStatus(self, ids): raw_statuses = self.call('nzb') release_downloads = ReleaseDownloadList(self) for nzb in raw_statuses.get('nzbs', []): if nzb['id'] in ids: # Check status status = 'busy' if nzb['state'] == 20: status = 'completed' elif nzb['state'] in [21, 22, 24]: status = 'failed' release_downloads.append({ 'id': nzb['id'], 'name': nzb['uiTitle'], 'status': status, 'original_status': nzb['state'], 'timeleft':-1, 'folder': sp(nzb['destinationPath']), }) return release_downloads
def processComplete(self, release_download, delete_files): log.debug( 'Requesting rTorrent to remove the torrent %s%s.', (release_download['name'], ' and cleanup the downloaded files' if delete_files else '')) if not self.connect(): return False torrent = self.rt.find_torrent(release_download['id']) if torrent is None: return False if delete_files: for file_item in torrent.get_files( ): # will only delete files, not dir/sub-dir os.unlink(os.path.join(torrent.directory, file_item.path)) if torrent.is_multi_file() and torrent.directory.endswith( torrent.name): # Remove empty directories bottom up try: for path, _, _ in os.walk(sp(torrent.directory), topdown=False): os.rmdir(path) except OSError: log.info( 'Directory "%s" contains extra files, unable to remove', torrent.directory) torrent.erase() # just removes the torrent, doesn't delete data return True
def getAllDownloadStatus(self, ids): log.debug('Checking SABnzbd download status.') # Go through Queue try: queue = self.call({ 'mode': 'queue', }) except: log.error('Failed getting queue: %s', traceback.format_exc(1)) return [] # Go through history items try: history = self.call({ 'mode': 'history', 'limit': 15, }) except: log.error('Failed getting history json: %s', traceback.format_exc(1)) return [] release_downloads = ReleaseDownloadList(self) # Get busy releases for nzb in queue.get('slots', []): if nzb['nzo_id'] in ids: status = 'busy' if 'ENCRYPTED / ' in nzb['filename']: status = 'failed' release_downloads.append({ 'id': nzb['nzo_id'], 'name': nzb['filename'], 'status': status, 'original_status': nzb['status'], 'timeleft': nzb['timeleft'] if not queue['paused'] else -1, }) # Get old releases for nzb in history.get('slots', []): if nzb['nzo_id'] in ids: status = 'busy' if nzb['status'] == 'Failed' or (nzb['status'] == 'Completed' and nzb['fail_message'].strip()): status = 'failed' elif nzb['status'] == 'Completed': status = 'completed' release_downloads.append({ 'id': nzb['nzo_id'], 'name': nzb['name'], 'status': status, 'original_status': nzb['status'], 'timeleft': str(timedelta(seconds = 0)), 'folder': sp(os.path.dirname(nzb['storage']) if os.path.isfile(nzb['storage']) else nzb['storage']), }) return release_downloads
def getDirectories(self, path='/', show_hidden=True): # Return driveletters or root if path is empty if path == '/' or not path or path == '\\': if os.name == 'nt': return self.getDriveLetters() path = '/' dirs = [] path = sp(path) for f in os.listdir(path): p = sp(os.path.join(path, f)) if os.path.isdir(p) and ( (self.is_hidden(p) and bool(int(show_hidden))) or not self.is_hidden(p)): dirs.append(toUnicode('%s%s' % (p, os.path.sep))) return sorted(dirs)
def makeDir(self, path): path = sp(path) try: if not os.path.isdir(path): os.makedirs(path, Env.getPermission('folder')) return True except Exception as e: log.error('Unable to create folder "%s": %s', (path, e)) return False
def getAllDownloadStatus(self, ids): """ Get status of all active downloads :param ids: list of (mixed) downloader ids Used to match the releases for this downloader as there could be other downloaders active that it should ignore :return: list of releases """ log.debug('Checking Hadouken download status.') if not self.connect(): return [] release_downloads = ReleaseDownloadList(self) queue = self.hadouken_api.get_by_hash_list(ids) if not queue: return [] for torrent in queue: if torrent is None: continue torrent_filelist = self.hadouken_api.get_files_by_hash(torrent.info_hash) torrent_files = [] for file_item in torrent_filelist: torrent_files.append(sp(os.path.join(torrent.save_path, file_item))) release_downloads.append({ 'id': torrent.info_hash.upper(), 'name': torrent.name, 'status': torrent.get_status(), 'seed_ratio': torrent.get_seed_ratio(), 'original_status': torrent.state, 'timeleft': -1, 'folder': sp(torrent.save_path if len(torrent_files == 1) else os.path.join(torrent.save_path, torrent.name)), 'files': torrent_files }) return release_downloads
def getSize(paths): single = not isinstance(paths, (tuple, list)) if single: paths = [paths] total_size = 0 for path in paths: path = sp(path) if os.path.isdir(path): total_size = 0 for dirpath, _, filenames in os.walk(path): for f in filenames: total_size += os.path.getsize(sp(os.path.join(dirpath, f))) elif os.path.isfile(path): total_size += os.path.getsize(path) return total_size / 1048576 # MB
def deleteEmptyFolder(self, folder, show_error = True, only_clean = None): folder = sp(folder) for item in os.listdir(folder): full_folder = sp(os.path.join(folder, item)) if not only_clean or (item in only_clean and os.path.isdir(full_folder)): for subfolder, dirs, files in os.walk(full_folder, topdown = False): try: os.rmdir(subfolder) except: if show_error: log.info2('Couldn\'t remove directory %s: %s', (subfolder, traceback.format_exc())) try: os.rmdir(folder) except: if show_error: log.error('Couldn\'t remove empty directory %s: %s', (folder, traceback.format_exc()))
def test(self): directory = self.conf('directory') if directory and os.path.isdir(directory): test_file = sp(os.path.join(directory, 'couchpotato_test.txt')) # Check if folder is writable self.createFile(test_file, 'This is a test file') if os.path.isfile(test_file): os.remove(test_file) return True return False
def replaceWith(self, path): path = sp(path) app_dir = Env.get('app_dir') data_dir = Env.get('data_dir') # Get list of files we want to overwrite removePyc(app_dir) existing_files = [] for root, subfiles, filenames in os.walk(app_dir): for filename in filenames: existing_files.append(os.path.join(root, filename)) for root, subfiles, filenames in os.walk(path): for filename in filenames: fromfile = os.path.join(root, filename) tofile = os.path.join(app_dir, fromfile.replace(path + os.path.sep, '')) if not Env.get('dev'): try: if os.path.isfile(tofile): os.remove(tofile) dirname = os.path.dirname(tofile) if not os.path.isdir(dirname): self.makeDir(dirname) shutil.move(fromfile, tofile) try: existing_files.remove(tofile) except ValueError: pass except: log.error('Failed overwriting file "%s": %s', (tofile, traceback.format_exc())) return False for still_exists in existing_files: if data_dir in still_exists: continue try: os.remove(still_exists) except: log.error('Failed removing non-used file: %s', traceback.format_exc()) return True