def _move(self, file_path, new_path, associated_files=False): if associated_files: file_list = self._list_associated_files(file_path) else: file_list = [file_path] if not file_list: self._log( u"There were no files associated with " + file_path + ", not moving anything", logger.DEBUG) return for cur_file_path in file_list: cur_file_name = ek.ek(os.path.basename, cur_file_path) new_file_path = ek.ek(os.path.join, new_path, cur_file_name) self._log( u"Moving file from " + cur_file_path + " to " + new_file_path, logger.DEBUG) try: helpers.moveFile(cur_file_path, new_file_path) except (IOError, OSError), e: self._log( "Unable to move file " + cur_file_path + " to " + new_file_path + ": " + str(e).decode('utf-8'), logger.ERROR) raise e
def _move(self, file_path, new_path, associated_files=False): if associated_files: file_list = self._list_associated_files(file_path) else: file_list = [file_path] if not file_list: self._log(u"There were no files associated with " + file_path + ", not moving anything", logger.DEBUG) return for cur_file_path in file_list: cur_file_name = ek.ek(os.path.basename, cur_file_path) new_file_path = ek.ek(os.path.join, new_path, cur_file_name) self._log(u"Moving file from " + cur_file_path + " to " + new_file_path, logger.DEBUG) try: helpers.moveFile(cur_file_path, new_file_path) except (IOError, OSError), e: self._log( "Unable to move file " + cur_file_path + " to " + new_file_path + ": " + str(e).decode("utf-8"), logger.ERROR, ) raise e
def _int_move(cur_file_path, new_file_path): self._log(u"Moving file from " + cur_file_path + " to " + new_file_path, logger.DEBUG) try: helpers.moveFile(cur_file_path, new_file_path) helpers.chmodAsParent(new_file_path) except (IOError, OSError), e: self._log(u"Unable to move file " + cur_file_path + " to " + new_file_path + ": " + ex(e), logger.ERROR) raise e
def _int_move(cur_file_path, new_file_path, success_tmpl=u' %s to %s'): try: helpers.moveFile(cur_file_path, new_file_path) helpers.chmodAsParent(new_file_path) self._log(u'Moved file from' + (success_tmpl % (cur_file_path, new_file_path)), logger.DEBUG) except (IOError, OSError), e: self._log(u'Unable to move file %s<br />.. %s' % (success_tmpl % (cur_file_path, new_file_path), str(e)), logger.ERROR) raise e
def _int_move(cur_file_path, new_file_path): self._log(u"Moving file from "+cur_file_path+" to "+new_file_path, logger.DEBUG) try: helpers.moveFile(cur_file_path, new_file_path) helpers.chmodAsParent(new_file_path) if sickbeard.UPDATE_DIRECTORY_TIMESTAMP: helpers.touchPath(helpers.getParentDirectory(new_file_path)) except (IOError, OSError), e: self._log("Unable to move file "+cur_file_path+" to "+new_file_path+": "+ex(e), logger.ERROR) raise e
def _int_link (cur_file_path, new_file_path): self._log(u"Linking file from "+cur_file_path+" to "+new_file_path, logger.DEBUG) est = eec.set(self._link, cur_file_path) try: helpers.moveFile(cur_file_path, new_file_path) helpers.linkFile(cur_file_path, new_file_path) helpers.chmodAsParent(new_file_path) except (IOError, OSError), e: logger.log("Unable to link file "+cur_file_path+" to "+new_file_path+": "+ex(e), logger.ERROR) logger.log(str(e), logger.ERROR); eec.clock(est, False) raise e
def _cache_image_from_file(self, image_path, img_type, indexer_id, move_file=False): """ Takes the image provided and copies or moves it to the cache folder returns: full path to cached file or None image_path: path to the image to cache img_type: BANNER, POSTER, or FANART indexer_id: id of the show this image belongs to move_file: True if action is to move the file else file should be copied """ # generate the path based on the type & indexer_id fanart_subdir = [] if img_type == self.POSTER: dest_path = self.poster_path(indexer_id) elif img_type == self.BANNER: dest_path = self.banner_path(indexer_id) elif img_type == self.FANART: with open(image_path, mode='rb') as resource: crc = '%05X' % (zlib.crc32(resource.read()) & 0xFFFFFFFF) fanart_subdir = [self._fanart_dir(indexer_id)] dest_path = self.fanart_path(indexer_id).replace( '.fanart.jpg', '.%s.fanart.jpg' % crc) else: logger.log(u'Invalid cache image type: ' + str(img_type), logger.ERROR) return False for cache_dir in [ self._cache_dir(), self._thumbnails_dir(), self._fanart_dir() ] + fanart_subdir: helpers.make_dirs(cache_dir) logger.log(u'%sing from %s to %s' % (('Copy', 'Mov')[move_file], image_path, dest_path)) if move_file: helpers.moveFile(image_path, dest_path) else: helpers.copyFile(image_path, dest_path) return ek.ek(os.path.isfile, dest_path) and dest_path or None
def _cache_image_from_file(self, image_path, img_type, indexer_id, move_file=False): """ Takes the image provided and copies or moves it to the cache folder returns: full path to cached file or None image_path: path to the image to cache img_type: BANNER, POSTER, or FANART indexer_id: id of the show this image belongs to move_file: True if action is to move the file else file should be copied """ # generate the path based on the type & indexer_id fanart_subdir = [] if img_type == self.POSTER: dest_path = self.poster_path(indexer_id) elif img_type == self.BANNER: dest_path = self.banner_path(indexer_id) elif img_type == self.FANART: with open(image_path, mode='rb') as resource: crc = '%05X' % (zlib.crc32(resource.read()) & 0xFFFFFFFF) fanart_subdir = [self._fanart_dir(indexer_id)] dest_path = self.fanart_path(indexer_id).replace('.fanart.jpg', '.%s.fanart.jpg' % crc) else: logger.log(u'Invalid cache image type: ' + str(img_type), logger.ERROR) return False for cache_dir in [self._cache_dir(), self._thumbnails_dir(), self._fanart_dir()] + fanart_subdir: helpers.make_dirs(cache_dir) logger.log(u'%sing from %s to %s' % (('Copy', 'Mov')[move_file], image_path, dest_path)) if move_file: helpers.moveFile(image_path, dest_path) else: helpers.copyFile(image_path, dest_path) return ek.ek(os.path.isfile, dest_path) and dest_path or None
def _move(self, file_path, new_path, new_base_name, associated_files=False): if associated_files: file_list = self._list_associated_files(file_path) else: file_list = [file_path] if not file_list: self._log(u"There were no files associated with "+file_path+", not moving anything", logger.DEBUG) return for cur_file_path in file_list: cur_file_name = ek.ek(os.path.basename, cur_file_path) #AW: If new base name then convert name if new_base_name: # get the extension cur_extension = cur_file_path.rpartition('.')[-1] # replace .nfo with .nfo-orig to avoid conflicts if cur_extension == 'nfo': cur_extension = 'nfo-orig' new_file_name = new_base_name +'.' + cur_extension else: new_file_name = cur_file_name new_file_path = ek.ek(os.path.join, new_path, new_file_name) self._log(u"Moving file from "+cur_file_path+" to "+new_file_path, logger.DEBUG) try: helpers.moveFile(cur_file_path, new_file_path) except (IOError, OSError), e: self._log("Unable to move file "+cur_file_path+" to "+new_file_path+": "+str(e).decode('utf-8'), logger.ERROR) raise e
def run(self): # TODO: Put that in the __init__ before starting the thread? if not sickbeard.USE_SUBTITLES: logger.log(u'Subtitles support disabled', logger.DEBUG) return if len(sickbeard.subtitles.getEnabledServiceList()) < 1: logger.log(u'Not enough services selected. At least 1 service is required to search subtitles in the background', logger.ERROR) return logger.log(u'Checking for subtitles', logger.MESSAGE) # get episodes on which we want subtitles # criteria is: # - show subtitles = 1 # - episode subtitles != config wanted languages or SINGLE (depends on config multi) # - search count < 2 and diff(airdate, now) > 1 week : now -> 1d # - search count < 7 and diff(airdate, now) <= 1 week : now -> 4h -> 8h -> 16h -> 1d -> 1d -> 1d myDB = db.DBConnection() today = datetime.date.today().toordinal() # you have 5 minutes to understand that one. Good luck sqlResults = myDB.select('SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.subtitles, e.subtitles_searchcount AS searchcount, e.subtitles_lastsearch AS lastsearch, e.location, (? - e.airdate) AS airdate_daydiff FROM tv_episodes AS e INNER JOIN tv_shows AS s ON (e.showid = s.tvdb_id) WHERE s.subtitles = 1 AND e.subtitles NOT LIKE (?) AND ((e.subtitles_searchcount <= 2 AND (? - e.airdate) > 7) OR (e.subtitles_searchcount <= 7 AND (? - e.airdate) <= 7)) AND (e.status IN ('+','.join([str(x) for x in Quality.DOWNLOADED])+') OR (e.status IN ('+','.join([str(x) for x in Quality.SNATCHED + Quality.SNATCHED_PROPER])+') AND e.location != ""))', [today, wantedLanguages(True), today, today]) if len(sqlResults) == 0: logger.log('No subtitles to download', logger.MESSAGE) return rules = self._getRules() now = datetime.datetime.now(); for epToSub in sqlResults: if not ek.ek(os.path.isfile, epToSub['location']): logger.log('Episode file does not exist, cannot download subtitles for episode %dx%d of show %s' % (epToSub['season'], epToSub['episode'], epToSub['show_name']), logger.DEBUG) continue # Old shows rule if ((epToSub['airdate_daydiff'] > 7 and epToSub['searchcount'] < 2 and now - datetime.datetime.strptime(epToSub['lastsearch'], '%Y-%m-%d %H:%M:%S') > datetime.timedelta(hours=rules['old'][epToSub['searchcount']])) or # Recent shows rule (epToSub['airdate_daydiff'] <= 7 and epToSub['searchcount'] < 7 and now - datetime.datetime.strptime(epToSub['lastsearch'], '%Y-%m-%d %H:%M:%S') > datetime.timedelta(hours=rules['new'][epToSub['searchcount']]))): logger.log('Downloading subtitles for episode %dx%d of show %s' % (epToSub['season'], epToSub['episode'], epToSub['show_name']), logger.DEBUG) showObj = helpers.findCertainShow(sickbeard.showList, int(epToSub['showid'])) if not showObj: logger.log(u'Show not found', logger.DEBUG) return epObj = showObj.getEpisode(int(epToSub["season"]), int(epToSub["episode"])) if isinstance(epObj, str): logger.log(u'Episode not found', logger.DEBUG) return previous_subtitles = epObj.subtitles try: subtitles = epObj.downloadSubtitles() if sickbeard.SUBTITLES_DIR: for video in subtitles: subs_new_path = ek.ek(os.path.join, os.path.dirname(video.path), sickbeard.SUBTITLES_DIR) dir_exists = helpers.makeDir(subs_new_path) if not dir_exists: logger.log(u"Unable to create subtitles folder "+subs_new_path, logger.ERROR) else: helpers.chmodAsParent(subs_new_path) for subtitle in subtitles.get(video): new_file_path = ek.ek(os.path.join, subs_new_path, os.path.basename(subtitle.path)) helpers.moveFile(subtitle.path, new_file_path) helpers.chmodAsParent(new_file_path) else: for video in subtitles: for subtitle in subtitles.get(video): helpers.chmodAsParent(subtitle.path) except: logger.log(u'Unable to find subtitles', logger.DEBUG) return
def run(self): # TODO: Put that in the __init__ before starting the thread? if not sickbeard.USE_SUBTITLES: logger.log(u'Subtitles support disabled', logger.DEBUG) return if len(sickbeard.subtitles.getEnabledServiceList()) < 1: logger.log( u'Not enough services selected. At least 1 service is required to search subtitles in the background', logger.ERROR) return logger.log(u'Checking for subtitles', logger.MESSAGE) # get episodes on which we want subtitles # criteria is: # - show subtitles = 1 # - episode subtitles != config wanted languages or SINGLE (depends on config multi) # - search count < 2 and diff(airdate, now) > 1 week : now -> 1d # - search count < 7 and diff(airdate, now) <= 1 week : now -> 4h -> 8h -> 16h -> 1d -> 1d -> 1d myDB = db.DBConnection() today = datetime.date.today().toordinal() # you have 5 minutes to understand that one. Good luck sqlResults = myDB.select( 'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.subtitles, e.subtitles_searchcount AS searchcount, e.subtitles_lastsearch AS lastsearch, e.location, (? - e.airdate) AS airdate_daydiff FROM tv_episodes AS e INNER JOIN tv_shows AS s ON (e.showid = s.tvdb_id) WHERE s.subtitles = 1 AND e.subtitles NOT LIKE (?) AND ((e.subtitles_searchcount <= 2 AND (? - e.airdate) > 7) OR (e.subtitles_searchcount <= 7 AND (? - e.airdate) <= 7)) AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED + [ARCHIVED]]) + ') OR (e.status IN (' + ','.join( [str(x) for x in Quality.SNATCHED + Quality.SNATCHED_PROPER]) + ') AND e.location != ""))', [today, wantedLanguages(True), today, today]) if len(sqlResults) == 0: logger.log('No subtitles to download', logger.MESSAGE) return rules = self._getRules() now = datetime.datetime.now() for epToSub in sqlResults: if not ek.ek(os.path.isfile, epToSub['location']): logger.log( 'Episode file does not exist, cannot download subtitles for episode %dx%d of show %s' % (epToSub['season'], epToSub['episode'], epToSub['show_name']), logger.DEBUG) continue # Old shows rule if ((epToSub['airdate_daydiff'] > 7 and epToSub['searchcount'] < 2 and now - datetime.datetime.strptime(epToSub['lastsearch'], '%Y-%m-%d %H:%M:%S') > datetime.timedelta(hours=rules['old'][epToSub['searchcount']]) ) or # Recent shows rule (epToSub['airdate_daydiff'] <= 7 and epToSub['searchcount'] < 7 and now - datetime.datetime.strptime(epToSub['lastsearch'], '%Y-%m-%d %H:%M:%S') > datetime.timedelta(hours=rules['new'][epToSub['searchcount']]) )): logger.log( 'Downloading subtitles for episode %dx%d of show %s' % (epToSub['season'], epToSub['episode'], epToSub['show_name']), logger.DEBUG) showObj = helpers.findCertainShow(sickbeard.showList, int(epToSub['showid'])) if not showObj: logger.log(u'Show not found', logger.DEBUG) return epObj = showObj.getEpisode(int(epToSub["season"]), int(epToSub["episode"])) if isinstance(epObj, str): logger.log(u'Episode not found', logger.DEBUG) return previous_subtitles = epObj.subtitles try: subtitles = epObj.downloadSubtitles() if sickbeard.SUBTITLES_DIR: for video in subtitles: subs_new_path = ek.ek(os.path.join, os.path.dirname(video.path), sickbeard.SUBTITLES_DIR) dir_exists = helpers.makeDir(subs_new_path) if not dir_exists: logger.log( u"Unable to create subtitles folder " + subs_new_path, logger.ERROR) else: helpers.chmodAsParent(subs_new_path) for subtitle in subtitles.get(video): new_file_path = ek.ek( os.path.join, subs_new_path, os.path.basename(subtitle.path)) helpers.moveFile(subtitle.path, new_file_path) helpers.chmodAsParent(new_file_path) else: for video in subtitles: for subtitle in subtitles.get(video): helpers.chmodAsParent(subtitle.path) except: logger.log(u'Unable to find subtitles', logger.DEBUG) return
def download_result(self, result): """ Save the result to disk. """ # check for auth if not self._authorised(): return False if GenericProvider.TORRENT == self.providerType: final_dir = sickbeard.TORRENT_DIR link_type = 'magnet' try: torrent_hash = re.findall('(?i)urn:btih:([0-9a-f]{32,40})', result.url)[0].upper() if 32 == len(torrent_hash): torrent_hash = b16encode(b32decode(torrent_hash)).lower() if not torrent_hash: logger.log( 'Unable to extract torrent hash from link: ' + ex(result.url), logger.ERROR) return False urls = [ 'http%s://%s/%s.torrent' % (u + (torrent_hash, )) for u in (('s', 'torcache.net/torrent'), ('', 'thetorrent.org/torrent'), ('s', 'itorrents.org/torrent')) ] except: link_type = 'torrent' urls = [result.url] elif GenericProvider.NZB == self.providerType: final_dir = sickbeard.NZB_DIR link_type = 'nzb' urls = [result.url] else: return ref_state = 'Referer' in self.session.headers and self.session.headers[ 'Referer'] saved = False for url in urls: cache_dir = sickbeard.CACHE_DIR or helpers._getTempDir() base_name = '%s.%s' % (helpers.sanitizeFileName( result.name), self.providerType) cache_file = ek.ek(os.path.join, cache_dir, base_name) self.session.headers['Referer'] = url if helpers.download_file(url, cache_file, session=self.session): if self._verify_download(cache_file): logger.log(u'Downloaded %s result from %s' % (self.name, url)) final_file = ek.ek(os.path.join, final_dir, base_name) try: helpers.moveFile(cache_file, final_file) msg = 'moved' except: msg = 'copied cached file' logger.log(u'Saved %s link and %s to %s' % (link_type, msg, final_file)) saved = True break remove_file_failed(cache_file) if 'Referer' in self.session.headers: if ref_state: self.session.headers['Referer'] = ref_state else: del (self.session.headers['Referer']) if not saved: logger.log( u'All torrent cache servers failed to return a downloadable result', logger.ERROR) return saved
def run(self): # TODO: Put that in the __init__ before starting the thread? if not sickbeard.USE_SUBTITLES: logger.log(u"Subtitles support disabled", logger.DEBUG) return if len(sickbeard.subtitles.getEnabledServiceList()) < 1: logger.log( u"Not enough services selected. At least 1 service is required to search subtitles in the background", logger.ERROR, ) return logger.log(u"Checking for subtitles", logger.MESSAGE) # get episodes on which we want subtitles # criteria is: # - show subtitles = 1 # - episode subtitles != config wanted languages or SINGLE (depends on config multi) # - search count < 2 and diff(airdate, now) > 1 week : now -> 1d # - search count < 7 and diff(airdate, now) <= 1 week : now -> 4h -> 8h -> 16h -> 1d -> 1d -> 1d myDB = db.DBConnection() today = datetime.date.today().toordinal() # you have 5 minutes to understand that one. Good luck sqlResults = myDB.select( "SELECT s.show_name, e.showid, e.season, e.episode, e.subtitles_searchcount AS searchcount, e.subtitles_lastsearch AS lastsearch, e.location, (? - e.airdate) AS airdate_daydiff FROM tv_episodes AS e INNER JOIN tv_shows AS s ON (e.showid = s.tvdb_id) WHERE s.subtitles = 1 AND e.subtitles NOT LIKE (?) AND ((e.subtitles_searchcount <= 2 AND (? - e.airdate) > 7) OR (e.subtitles_searchcount <= 7 AND (? - e.airdate) <= 7)) AND (e.status IN (" + ",".join([str(x) for x in Quality.DOWNLOADED + [ARCHIVED]]) + ") OR (e.status IN (" + ",".join([str(x) for x in Quality.SNATCHED + Quality.SNATCHED_PROPER]) + ') AND e.location != ""))', [today, wantedLanguages(True), today, today], ) locations = [] toRefresh = [] rules = self._getRules() now = datetime.datetime.now() for epToSub in sqlResults: if not ek.ek(os.path.isfile, epToSub["location"]): logger.log( "Episode file does not exist, cannot download subtitles for episode %dx%d of show %s" % (epToSub["season"], epToSub["episode"], epToSub["show_name"]), logger.DEBUG, ) continue # Old shows rule if ( epToSub["airdate_daydiff"] > 7 and epToSub["searchcount"] < 2 and now - datetime.datetime.strptime(epToSub["lastsearch"], "%Y-%m-%d %H:%M:%S") > datetime.timedelta(hours=rules["old"][epToSub["searchcount"]]) ): logger.log( "Downloading subtitles for episode %dx%d of show %s" % (epToSub["season"], epToSub["episode"], epToSub["show_name"]), logger.DEBUG, ) locations.append(epToSub["location"]) toRefresh.append((epToSub["showid"], epToSub["season"], epToSub["episode"])) continue # Recent shows rule if ( epToSub["airdate_daydiff"] <= 7 and epToSub["searchcount"] < 7 and now - datetime.datetime.strptime(epToSub["lastsearch"], "%Y-%m-%d %H:%M:%S") > datetime.timedelta(hours=rules["new"][epToSub["searchcount"]]) ): logger.log( "Downloading subtitles for episode %dx%d of show %s" % (epToSub["season"], epToSub["episode"], epToSub["show_name"]), logger.DEBUG, ) locations.append(epToSub["location"]) toRefresh.append((epToSub["showid"], epToSub["season"], epToSub["episode"])) continue # Not matching my rules # logger.log('Do not match criteria to get downloaded: %s - %dx%d' % (epToSub['showid'], epToSub['season'], epToSub['episode']), logger.DEBUG) # stop here if we don't have subtitles to download if not locations: logger.log("No subtitles to download", logger.MESSAGE) return # download subtitles subtitles = subliminal.download_subtitles( locations, cache_dir=sickbeard.CACHE_DIR, multi=True, languages=sickbeard.SUBTITLES_LANGUAGES, services=sickbeard.subtitles.getEnabledServiceList(), ) if sickbeard.SUBTITLES_DIR: for video in subtitles: subsDir = ek.ek(os.path.join, os.path.dirname(video.path), sickbeard.SUBTITLES_DIR) if not ek.ek(os.path.isdir, subsDir): ek.ek(os.mkdir, subsDir) for subtitle in subtitles.get(video): new_file_path = ek.ek(os.path.join, subsDir, os.path.basename(subtitle.path)) helpers.moveFile(subtitle.path, new_file_path) if subtitles: logger.log("Downloaded %d subtitles" % len(subtitles), logger.MESSAGE) for video in subtitles: notifiers.notify_subtitle_download( os.path.basename(video.path).rpartition(".")[0], ",".join([subtitle.language.name for subtitle in subtitles.get(video)]), ) else: logger.log("No subtitles found", logger.MESSAGE) # refresh each show self._refreshShows(toRefresh, now)
def download_result(self, result): """ Save the result to disk. """ # check for auth if not self._authorised(): return False if GenericProvider.TORRENT == self.providerType: final_dir = sickbeard.TORRENT_DIR link_type = 'magnet' try: torrent_hash = re.findall('(?i)urn:btih:([0-9a-f]{32,40})', result.url)[0].upper() if 32 == len(torrent_hash): torrent_hash = b16encode(b32decode(torrent_hash)).lower() if not torrent_hash: logger.log('Unable to extract torrent hash from link: ' + ex(result.url), logger.ERROR) return False urls = ['http%s://%s/%s.torrent' % (u + (torrent_hash,)) for u in (('s', 'torcache.net/torrent'), ('', 'thetorrent.org/torrent'), ('s', 'itorrents.org/torrent'))] except: link_type = 'torrent' urls = [result.url] elif GenericProvider.NZB == self.providerType: final_dir = sickbeard.NZB_DIR link_type = 'nzb' urls = [result.url] else: return ref_state = 'Referer' in self.session.headers and self.session.headers['Referer'] saved = False for url in urls: cache_dir = sickbeard.CACHE_DIR or helpers._getTempDir() base_name = '%s.%s' % (helpers.sanitizeFileName(result.name), self.providerType) cache_file = ek.ek(os.path.join, cache_dir, base_name) self.session.headers['Referer'] = url if helpers.download_file(url, cache_file, session=self.session): if self._verify_download(cache_file): logger.log(u'Downloaded %s result from %s' % (self.name, url)) final_file = ek.ek(os.path.join, final_dir, base_name) try: helpers.moveFile(cache_file, final_file) msg = 'moved' except: msg = 'copied cached file' logger.log(u'Saved %s link and %s to %s' % (link_type, msg, final_file)) saved = True break remove_file_failed(cache_file) if 'Referer' in self.session.headers: if ref_state: self.session.headers['Referer'] = ref_state else: del(self.session.headers['Referer']) if not saved: logger.log(u'All torrent cache servers failed to return a downloadable result', logger.ERROR) return saved
def download_result(self, result): """ Save the result to disk. """ # check for auth if not self._authorised(): return False if GenericProvider.TORRENT == self.providerType: final_dir = sickbeard.TORRENT_DIR link_type = 'magnet' try: torrent_hash = re.findall('(?i)urn:btih:([0-9a-f]{32,40})', result.url)[0].upper() if 32 == len(torrent_hash): torrent_hash = b16encode(b32decode(torrent_hash)).lower() if not torrent_hash: logger.log('Unable to extract torrent hash from link: ' + ex(result.url), logger.ERROR) return False urls = ['http%s://%s/torrent/%s.torrent' % (u + (torrent_hash,)) for u in (('s', 'itorrents.org'), ('s', 'torra.pro'), ('s', 'torra.click'), ('s', 'torrage.info'), ('', 'reflektor.karmorra.info'), ('s', 'torrentproject.se'), ('', 'thetorrent.org'))] except (StandardError, Exception): link_type = 'torrent' urls = [result.url] elif GenericProvider.NZB == self.providerType: final_dir = sickbeard.NZB_DIR link_type = 'nzb' urls = [result.url] else: return ref_state = 'Referer' in self.session.headers and self.session.headers['Referer'] saved = False for url in urls: cache_dir = sickbeard.CACHE_DIR or helpers._getTempDir() base_name = '%s.%s' % (helpers.sanitizeFileName(result.name), self.providerType) cache_file = ek.ek(os.path.join, cache_dir, base_name) self.session.headers['Referer'] = url if helpers.download_file(url, cache_file, session=self.session): if self._verify_download(cache_file): logger.log(u'Downloaded %s result from %s' % (self.name, url)) final_file = ek.ek(os.path.join, final_dir, base_name) try: helpers.moveFile(cache_file, final_file) msg = 'moved' except (OSError, Exception): msg = 'copied cached file' logger.log(u'Saved %s link and %s to %s' % (link_type, msg, final_file)) saved = True break remove_file_failed(cache_file) if 'Referer' in self.session.headers: if ref_state: self.session.headers['Referer'] = ref_state else: del(self.session.headers['Referer']) if not saved and 'magnet' == link_type: logger.log(u'All torrent cache servers failed to return a downloadable result', logger.ERROR) logger.log(u'Advice: in search settings, change from method blackhole to direct torrent client connect', logger.ERROR) final_file = ek.ek(os.path.join, final_dir, '%s.%s' % (helpers.sanitizeFileName(result.name), link_type)) try: with open(final_file, 'wb') as fp: fp.write(result.url) fp.flush() os.fsync(fp.fileno()) logger.log(u'Saved magnet link to file as some clients (or plugins) support this, %s' % final_file) except (StandardError, Exception): pass elif not saved: logger.log(u'Server failed to return anything useful', logger.ERROR) return saved