Exemplo n.º 1
0
    def downloadResult(self, result):
        """
        Save the result to disk.
        """

        # check for auth
        if not self._doLogin():
            return False

        urls, filename = self._makeURL(result)

        for url in urls:
            # Search results don't return torrent files directly, it returns show sheets so we must parse showSheet to access torrent.
            data = self.getURL(url)
            url_torrent = re.search(r'http://tumejorserie.com/descargar/.+\.torrent', data, re.DOTALL).group()

            if url_torrent.startswith('http'):
                self.headers.update({'Referer': '/'.join(url_torrent.split('/')[:3]) + '/'})

            logger.log(u"Downloading a result from " + self.name + " at " + url)

            if helpers.download_file(url_torrent, filename, session=self.session, headers=self.headers):
                if self._verify_download(filename):
                    logger.log(u"Saved result to " + filename, logger.INFO)
                    return True
                else:
                    logger.log(u"Could not download %s" % url, logger.WARNING)
                    helpers.remove_file_failed(filename)

        if len(urls):
            logger.log(u"Failed to download any results", logger.WARNING)

        return False
Exemplo n.º 2
0
    def download_result(self, result):
        """
        Save the result to disk.
        """

        # check for auth
        if not self.login():
            return False

        urls, filename = self._make_url(result)

        for url in urls:
            # Search results don't return torrent files directly, it returns show sheets so we must parse showSheet to access torrent.
            data = self.get_url(url, returns='text')
            url_torrent = re.search(r'http://tumejorserie.com/descargar/.+\.torrent', data, re.DOTALL).group()

            if url_torrent.startswith('http'):
                self.headers.update({'Referer': '/'.join(url_torrent.split('/')[:3]) + '/'})

            logger.log('Downloading a result from {}'.format(url))

            if helpers.download_file(url_torrent, filename, session=self.session, headers=self.headers):
                if self._verify_download(filename):
                    logger.log('Saved result to {}'.format(filename), logger.INFO)
                    return True
                else:
                    logger.log('Could not download {}'.format(url), logger.WARNING)
                    helpers.remove_file_failed(filename)

        if len(urls):
            logger.log('Failed to download any results', logger.WARNING)

        return False
Exemplo n.º 3
0
    def download_result(self, result):
        if not self.login():
            return False

        urls, filename = self._make_url(result)

        for url in urls:
            if 'NO_DOWNLOAD_NAME' in url:
                continue

            if url.startswith('http'):
                self.headers.update({
                    'Referer': '/'.join(url.split('/')[:3]) + '/'
                })

            logger.log(u'Downloading a result from {0} at {1}'.format(self.name, url))

            if url.endswith(GenericProvider.TORRENT) and filename.endswith(GenericProvider.NZB):
                filename = replace_extension(filename, GenericProvider.TORRENT)

            if download_file(url, filename, session=self.session, headers=self.headers, hooks={'response': self.get_url_hook}):
                if self._verify_download(filename):
                    logger.log(u'Saved result to {0}'.format(filename), logger.INFO)
                    return True

                logger.log(u'Could not download {0}'.format(url), logger.WARNING)
                remove_file_failed(filename)

        if urls:
            logger.log(u'Failed to download any results', logger.WARNING)

        return False
Exemplo n.º 4
0
    def download_result(self, result):
        if not self.login():
            return False

        urls, filename = self._make_url(result)

        for url in urls:
            if 'NO_DOWNLOAD_NAME' in url:
                continue

            if url.startswith('http'):
                self.headers.update({
                    'Referer': '/'.join(url.split('/')[:3]) + '/'
                })

            logger.log(u'Downloading a result from %s at %s' % (self.name, url))

            if url.endswith(GenericProvider.TORRENT) and filename.endswith(GenericProvider.NZB):
                filename = replace_extension(filename, GenericProvider.TORRENT)

            if download_file(url, filename, session=self.session, headers=self.headers):
                if self._verify_download(filename):
                    logger.log(u'Saved result to %s' % filename, logger.INFO)
                    return True

                logger.log(u'Could not download %s' % url, logger.WARNING)
                remove_file_failed(filename)

        if len(urls):
            logger.log(u'Failed to download any results', logger.WARNING)

        return False
Exemplo n.º 5
0
    def download_result(self, result):
        if not self.login():
            return False

        urls, filename = self._make_url(result)

        for url in urls:
            if "NO_DOWNLOAD_NAME" in url:
                continue

            if url.startswith("http"):
                self.headers.update({"Referer": "/".join(url.split("/")[:3]) + "/"})

            logger.log(u"Downloading a result from {0} at {1}".format(self.name, url))

            if url.endswith(GenericProvider.TORRENT) and filename.endswith(GenericProvider.NZB):
                filename = replace_extension(filename, GenericProvider.TORRENT)

            if download_file(
                url, filename, session=self.session, headers=self.headers, hooks={"response": self.get_url_hook}
            ):
                if self._verify_download(filename):
                    logger.log(u"Saved result to {0}".format(filename), logger.INFO)
                    return True

                logger.log(u"Could not download {0}".format(url), logger.WARNING)
                remove_file_failed(filename)

        if urls:
            logger.log(u"Failed to download any results", logger.WARNING)

        return False
Exemplo n.º 6
0
    def download_result(self, result):
        if not self.login():
            return False

        urls, filename = self._make_url(result)

        for url in urls:
            if 'NO_DOWNLOAD_NAME' in url:
                continue

            if url.startswith('http'):
                self.headers.update({
                    'Referer': '/'.join(url.split('/')[:3]) + '/'
                })

            logger.log('Downloading a result from {0} at {1}'.format(self.name, url))

            downloaded_filename = download_file(url, filename, session=self.session, headers=self.headers,
                                                hooks={'response': self.get_url_hook}, return_filename=True)
            if downloaded_filename:
                if self._verify_download(downloaded_filename):
                    logger.log('Saved result to {0}'.format(downloaded_filename), logger.INFO)
                    return True

                logger.log('Could not download {0}'.format(url), logger.WARNING)
                remove_file_failed(downloaded_filename)

        if urls:
            logger.log('Failed to download any results', logger.WARNING)

        return False
Exemplo n.º 7
0
    def download_result(self, result):
        if not self.login():
            return False

        urls, filename = self._make_url(result)

        if urls:
            if result.url.startswith('magnet'):
                # opening in browser
                try:
                    import webbrowser
                    logger.log(u'Opening magnet link in browser: {0}'.format(result.url), logger.DEBUG)
                    try:
                        return webbrowser.open(result.url, 2, 1)
                    except Exception:
                        try:
                            return webbrowser.open(result.url, 1, 1)
                        except Exception:
                            logger.log(u"Unable to launch a browser", logger.ERROR)
                except ImportError:
                    logger.log(u"Unable to load the webbrowser module, cannot launch the browser.", logger.WARNING)
            
            logger.log(u'Failed to download any results', logger.WARNING)

        for url in urls:
            if 'NO_DOWNLOAD_NAME' in url:
                continue

            if url.startswith('http'):
                self.headers.update({
                    'Referer': '/'.join(url.split('/')[:3]) + '/'
                })

            logger.log('Downloading a result from {0} at {1}'.format(self.name, url))

            downloaded_filename = download_file(url, filename, session=self.session, headers=self.headers,
                                                hooks={'response': self.get_url_hook}, return_filename=True)
            if downloaded_filename:
                if self._verify_download(downloaded_filename):
                    logger.log('Saved result to {0}'.format(downloaded_filename), logger.INFO)
                    return True

                logger.log('Could not download {0}'.format(url), logger.WARNING)
                remove_file_failed(downloaded_filename)

        return False
Exemplo n.º 8
0
    def downloadResult(self, result):
        """
        Save the result to disk.
        """

        # check for auth
        if not self._doLogin():
            return False

        urls, filename = self._makeURL(result)

        for url in urls:
            if 'NO_DOWNLOAD_NAME' in url:
                continue

            if url.startswith('http'):
                self.headers.update(
                    {'Referer': '/'.join(url.split('/')[:3]) + '/'})

            logging.info("Downloading a result from " + self.name + " at " +
                         url)

            # Support for Jackett/TorzNab
            if url.endswith(GenericProvider.TORRENT) and filename.endswith(
                    GenericProvider.NZB):
                filename = filename.rsplit(
                    '.', 1)[0] + '.' + GenericProvider.TORRENT

            if helpers.download_file(url,
                                     filename,
                                     session=self.session,
                                     headers=self.headers):
                if self._verify_download(filename):
                    logging.info("Saved result to " + filename)
                    return True
                else:
                    logging.warning("Could not download %s" % url)
                    helpers.remove_file_failed(filename)

        if len(urls):
            logging.warning("Failed to download any results")

        return False
Exemplo n.º 9
0
    def downloadResult(self, result):
        """
        Save the result to disk.
        """

        # check for auth
        if not self._doLogin():
            return False

        urls, filename = self._makeURL(result)

        for url in urls:
            if 'NO_DOWNLOAD_NAME' in url:
                continue

            if url.startswith('http'):
                self.headers.update({'Referer': '/'.join(url.split('/')[:3]) + '/'})

            logger.log(u"Downloading a result from " + self.name + " at " + url)

            # Support for Jackett/TorzNab
            if url.endswith(GenericProvider.TORRENT) and filename.endswith(GenericProvider.NZB):
                filename = filename.rsplit('.', 1)[0] + '.' + GenericProvider.TORRENT

            if helpers.download_file(url, filename, session=self.session, headers=self.headers):
                if self._verify_download(filename):
                    logger.log(u"Saved result to " + filename, logger.INFO)
                    return True
                else:
                    logger.log(u"Could not download %s" % url, logger.WARNING)
                    helpers.remove_file_failed(filename)

        if len(urls):
            logger.log(u"Failed to download any results", logger.WARNING)

        return False
Exemplo n.º 10
0
def snatch_episode(result, end_status=SNATCHED):
    """
    Contains the internal logic necessary to actually "snatch" a result that
    has been found.

    Returns a bool representing success.

    result: SearchResult instance to be snatched.
    endStatus: the episode status that should be used for the episode object once it's snatched.
    """

    if None is result:
        return False

    result.priority = 0  # -1 = low, 0 = normal, 1 = high
    if sickbeard.ALLOW_HIGH_PRIORITY:
        # if it aired recently make it high priority
        for cur_ep in result.episodes:
            if datetime.date.today() - cur_ep.airdate <= datetime.timedelta(
                    days=7):
                result.priority = 1
    if 0 < result.properlevel:
        end_status = SNATCHED_PROPER

    # NZBs can be sent straight to SAB or saved to disk
    if result.resultType in ('nzb', 'nzbdata'):
        if 'blackhole' == sickbeard.NZB_METHOD:
            dl_result = _download_result(result)
        elif 'sabnzbd' == sickbeard.NZB_METHOD:
            dl_result = sab.send_nzb(result)
        elif 'nzbget' == sickbeard.NZB_METHOD:
            dl_result = nzbget.send_nzb(result)
        else:
            logger.log(
                u'Unknown NZB action specified in config: %s' %
                sickbeard.NZB_METHOD, logger.ERROR)
            dl_result = False

    # TORRENTs can be sent to clients or saved to disk
    elif 'torrent' == result.resultType:
        if not result.url.startswith(
                'magnet') and None is not result.get_data_func:
            result.url = result.get_data_func(result.url)
            result.get_data_func = None  # consume only once
            if not result.url:
                return False
        # torrents are saved to disk when blackhole mode
        if 'blackhole' == sickbeard.TORRENT_METHOD:
            dl_result = _download_result(result)
        else:
            # make sure we have the torrent file content
            if not result.content and not result.url.startswith('magnet'):
                result.content = result.provider.get_url(result.url)
                if result.provider.should_skip() or not result.content:
                    logger.log(
                        u'Torrent content failed to download from %s' %
                        result.url, logger.ERROR)
                    return False
            # Snatches torrent with client
            client = clients.get_client_instance(sickbeard.TORRENT_METHOD)()
            dl_result = client.send_torrent(result)

            if getattr(result, 'cache_file', None):
                helpers.remove_file_failed(result.cache_file)
    else:
        logger.log(u'Unknown result type, unable to download it', logger.ERROR)
        dl_result = False

    if not dl_result:
        return False

    if sickbeard.USE_FAILED_DOWNLOADS:
        failed_history.add_snatched(result)

    ui.notifications.message(u'Episode snatched', result.name)

    history.log_snatch(result)

    # don't notify when we re-download an episode
    sql_l = []
    update_imdb_data = True
    for cur_ep_obj in result.episodes:
        with cur_ep_obj.lock:
            if is_first_best_match(cur_ep_obj.status, result):
                cur_ep_obj.status = Quality.compositeStatus(
                    SNATCHED_BEST, result.quality)
            else:
                cur_ep_obj.status = Quality.compositeStatus(
                    end_status, result.quality)

            item = cur_ep_obj.get_sql()
            if None is not item:
                sql_l.append(item)

        if cur_ep_obj.status not in Quality.DOWNLOADED:
            notifiers.notify_snatch(
                cur_ep_obj._format_pattern('%SN - %Sx%0E - %EN - %QN'))

            update_imdb_data = update_imdb_data and cur_ep_obj.show.load_imdb_info(
            )

    if 0 < len(sql_l):
        my_db = db.DBConnection()
        my_db.mass_action(sql_l)

    return True
Exemplo n.º 11
0
def snatch_episode(result, end_status=SNATCHED):
    """
    Contains the internal logic necessary to actually "snatch" a result that
    has been found.

    Returns a bool representing success.

    result: SearchResult instance to be snatched.
    endStatus: the episode status that should be used for the episode object once it's snatched.
    """

    if None is result:
        return False

    result.priority = 0  # -1 = low, 0 = normal, 1 = high
    if sickbeard.ALLOW_HIGH_PRIORITY:
        # if it aired recently make it high priority
        for cur_ep in result.episodes:
            if datetime.date.today() - cur_ep.airdate <= datetime.timedelta(days=7):
                result.priority = 1
    if 0 < result.properlevel:
        end_status = SNATCHED_PROPER

    # NZBs can be sent straight to SAB or saved to disk
    if result.resultType in ('nzb', 'nzbdata'):
        if 'blackhole' == sickbeard.NZB_METHOD:
            dl_result = _download_result(result)
        elif 'sabnzbd' == sickbeard.NZB_METHOD:
            dl_result = sab.send_nzb(result)
        elif 'nzbget' == sickbeard.NZB_METHOD:
            dl_result = nzbget.send_nzb(result)
        else:
            logger.log(u'Unknown NZB action specified in config: %s' % sickbeard.NZB_METHOD, logger.ERROR)
            dl_result = False

    # TORRENTs can be sent to clients or saved to disk
    elif 'torrent' == result.resultType:
        if not result.url.startswith('magnet') and None is not result.get_data_func:
            result.url = result.get_data_func(result.url)
            result.get_data_func = None  # consume only once
            if not result.url:
                return False
        if not result.content and result.url.startswith('magnet-'):
            if sickbeard.TORRENT_DIR:
                filepath = ek.ek(os.path.join, sickbeard.TORRENT_DIR, 'files.txt')
                try:
                    with open(filepath, 'a') as fh:
                        result.url = result.url[7:]
                        fh.write('"%s"\t"%s"\n' % (result.url, sickbeard.TV_DOWNLOAD_DIR))
                    dl_result = True
                except IOError:
                    logger.log(u'Failed to write to %s' % filepath, logger.ERROR)
                    return False
            else:
                logger.log(u'Need to set a torrent blackhole folder', logger.ERROR)
                return False
        # torrents are saved to disk when blackhole mode
        elif 'blackhole' == sickbeard.TORRENT_METHOD:
            dl_result = _download_result(result)
        else:
            # make sure we have the torrent file content
            if not result.content and not result.url.startswith('magnet'):
                result.content = result.provider.get_url(result.url)
                if result.provider.should_skip() or not result.content:
                    logger.log(u'Torrent content failed to download from %s' % result.url, logger.ERROR)
                    return False
            # Snatches torrent with client
            client = clients.get_client_instance(sickbeard.TORRENT_METHOD)()
            dl_result = client.send_torrent(result)

            if getattr(result, 'cache_file', None):
                helpers.remove_file_failed(result.cache_file)
    else:
        logger.log(u'Unknown result type, unable to download it', logger.ERROR)
        dl_result = False

    if not dl_result:
        return False

    if sickbeard.USE_FAILED_DOWNLOADS:
        failed_history.add_snatched(result)

    ui.notifications.message(u'Episode snatched', result.name)

    history.log_snatch(result)

    # don't notify when we re-download an episode
    sql_l = []
    update_imdb_data = True
    for cur_ep_obj in result.episodes:
        with cur_ep_obj.lock:
            if is_first_best_match(cur_ep_obj.status, result):
                cur_ep_obj.status = Quality.compositeStatus(SNATCHED_BEST, result.quality)
            else:
                cur_ep_obj.status = Quality.compositeStatus(end_status, result.quality)

            item = cur_ep_obj.get_sql()
            if None is not item:
                sql_l.append(item)

        if cur_ep_obj.status not in Quality.DOWNLOADED:
            notifiers.notify_snatch(cur_ep_obj._format_pattern('%SN - %Sx%0E - %EN - %QN'))

            update_imdb_data = update_imdb_data and cur_ep_obj.show.load_imdb_info()

    if 0 < len(sql_l):
        my_db = db.DBConnection()
        my_db.mass_action(sql_l)

    return True
Exemplo n.º 12
0
    def download_result(self, result):
        """
        Save the result to disk.
        """

        # check for auth
        if not self._authorised():
            return False

        if GenericProvider.TORRENT == self.providerType:
            final_dir = sickbeard.TORRENT_DIR
            link_type = 'magnet'
            try:
                torrent_hash = re.findall('(?i)urn:btih:([0-9a-f]{32,40})',
                                          result.url)[0].upper()

                if 32 == len(torrent_hash):
                    torrent_hash = b16encode(b32decode(torrent_hash)).lower()

                if not torrent_hash:
                    logger.log(
                        'Unable to extract torrent hash from link: ' +
                        ex(result.url), logger.ERROR)
                    return False

                urls = [
                    'http%s://%s/%s.torrent' % (u + (torrent_hash, ))
                    for u in (('s', 'torcache.net/torrent'),
                              ('', 'thetorrent.org/torrent'),
                              ('s', 'itorrents.org/torrent'))
                ]
            except:
                link_type = 'torrent'
                urls = [result.url]

        elif GenericProvider.NZB == self.providerType:
            final_dir = sickbeard.NZB_DIR
            link_type = 'nzb'
            urls = [result.url]

        else:
            return

        ref_state = 'Referer' in self.session.headers and self.session.headers[
            'Referer']
        saved = False
        for url in urls:
            cache_dir = sickbeard.CACHE_DIR or helpers._getTempDir()
            base_name = '%s.%s' % (helpers.sanitizeFileName(
                result.name), self.providerType)
            cache_file = ek.ek(os.path.join, cache_dir, base_name)

            self.session.headers['Referer'] = url
            if helpers.download_file(url, cache_file, session=self.session):

                if self._verify_download(cache_file):
                    logger.log(u'Downloaded %s result from %s' %
                               (self.name, url))
                    final_file = ek.ek(os.path.join, final_dir, base_name)
                    try:
                        helpers.moveFile(cache_file, final_file)
                        msg = 'moved'
                    except:
                        msg = 'copied cached file'
                    logger.log(u'Saved %s link and %s to %s' %
                               (link_type, msg, final_file))
                    saved = True
                    break

                remove_file_failed(cache_file)

        if 'Referer' in self.session.headers:
            if ref_state:
                self.session.headers['Referer'] = ref_state
            else:
                del (self.session.headers['Referer'])

        if not saved:
            logger.log(
                u'All torrent cache servers failed to return a downloadable result',
                logger.ERROR)

        return saved
Exemplo n.º 13
0
    def download_result(self, result):
        """
        Save the result to disk.
        """

        # check for auth
        if not self._authorised():
            return False

        if GenericProvider.TORRENT == self.providerType:
            final_dir = sickbeard.TORRENT_DIR
            link_type = 'magnet'
            try:
                torrent_hash = re.findall('(?i)urn:btih:([0-9a-f]{32,40})', result.url)[0].upper()

                if 32 == len(torrent_hash):
                    torrent_hash = b16encode(b32decode(torrent_hash)).lower()

                if not torrent_hash:
                    logger.log('Unable to extract torrent hash from link: ' + ex(result.url), logger.ERROR)
                    return False

                urls = ['http%s://%s/%s.torrent' % (u + (torrent_hash,))
                        for u in (('s', 'torcache.net/torrent'), ('', 'thetorrent.org/torrent'),
                                  ('s', 'itorrents.org/torrent'))]
            except:
                link_type = 'torrent'
                urls = [result.url]

        elif GenericProvider.NZB == self.providerType:
            final_dir = sickbeard.NZB_DIR
            link_type = 'nzb'
            urls = [result.url]

        else:
            return

        ref_state = 'Referer' in self.session.headers and self.session.headers['Referer']
        saved = False
        for url in urls:
            cache_dir = sickbeard.CACHE_DIR or helpers._getTempDir()
            base_name = '%s.%s' % (helpers.sanitizeFileName(result.name), self.providerType)
            cache_file = ek.ek(os.path.join, cache_dir, base_name)

            self.session.headers['Referer'] = url
            if helpers.download_file(url, cache_file, session=self.session):

                if self._verify_download(cache_file):
                    logger.log(u'Downloaded %s result from %s' % (self.name, url))
                    final_file = ek.ek(os.path.join, final_dir, base_name)
                    try:
                        helpers.moveFile(cache_file, final_file)
                        msg = 'moved'
                    except:
                        msg = 'copied cached file'
                    logger.log(u'Saved %s link and %s to %s' % (link_type, msg, final_file))
                    saved = True
                    break

                remove_file_failed(cache_file)

        if 'Referer' in self.session.headers:
            if ref_state:
                self.session.headers['Referer'] = ref_state
            else:
                del(self.session.headers['Referer'])

        if not saved:
            logger.log(u'All torrent cache servers failed to return a downloadable result', logger.ERROR)

        return saved
Exemplo n.º 14
0
    def download_result(self, result):
        """
        Save the result to disk.
        """

        # check for auth
        if not self._authorised():
            return False

        if GenericProvider.TORRENT == self.providerType:
            final_dir = sickbeard.TORRENT_DIR
            link_type = 'magnet'
            try:
                torrent_hash = re.findall('(?i)urn:btih:([0-9a-f]{32,40})', result.url)[0].upper()

                if 32 == len(torrent_hash):
                    torrent_hash = b16encode(b32decode(torrent_hash)).lower()

                if not torrent_hash:
                    logger.log('Unable to extract torrent hash from link: ' + ex(result.url), logger.ERROR)
                    return False

                urls = ['http%s://%s/torrent/%s.torrent' % (u + (torrent_hash,))
                        for u in (('s', 'itorrents.org'), ('s', 'torra.pro'), ('s', 'torra.click'),
                                  ('s', 'torrage.info'), ('', 'reflektor.karmorra.info'),
                                  ('s', 'torrentproject.se'), ('', 'thetorrent.org'))]
            except (StandardError, Exception):
                link_type = 'torrent'
                urls = [result.url]

        elif GenericProvider.NZB == self.providerType:
            final_dir = sickbeard.NZB_DIR
            link_type = 'nzb'
            urls = [result.url]

        else:
            return

        ref_state = 'Referer' in self.session.headers and self.session.headers['Referer']
        saved = False
        for url in urls:
            cache_dir = sickbeard.CACHE_DIR or helpers._getTempDir()
            base_name = '%s.%s' % (helpers.sanitizeFileName(result.name), self.providerType)
            cache_file = ek.ek(os.path.join, cache_dir, base_name)

            self.session.headers['Referer'] = url
            if helpers.download_file(url, cache_file, session=self.session):

                if self._verify_download(cache_file):
                    logger.log(u'Downloaded %s result from %s' % (self.name, url))
                    final_file = ek.ek(os.path.join, final_dir, base_name)
                    try:
                        helpers.moveFile(cache_file, final_file)
                        msg = 'moved'
                    except (OSError, Exception):
                        msg = 'copied cached file'
                    logger.log(u'Saved %s link and %s to %s' % (link_type, msg, final_file))
                    saved = True
                    break

                remove_file_failed(cache_file)

        if 'Referer' in self.session.headers:
            if ref_state:
                self.session.headers['Referer'] = ref_state
            else:
                del(self.session.headers['Referer'])

        if not saved and 'magnet' == link_type:
            logger.log(u'All torrent cache servers failed to return a downloadable result', logger.ERROR)
            logger.log(u'Advice: in search settings, change from method blackhole to direct torrent client connect',
                       logger.ERROR)
            final_file = ek.ek(os.path.join, final_dir, '%s.%s' % (helpers.sanitizeFileName(result.name), link_type))
            try:
                with open(final_file, 'wb') as fp:
                    fp.write(result.url)
                    fp.flush()
                    os.fsync(fp.fileno())
                logger.log(u'Saved magnet link to file as some clients (or plugins) support this, %s' % final_file)

            except (StandardError, Exception):
                pass
        elif not saved:
            logger.log(u'Server failed to return anything useful', logger.ERROR)

        return saved