Ejemplo n.º 1
0
    def cache_image(self, image_url):
        """
        Store cache of image in cache dir
        :param image_url: Source URL
        """
        path = os.path.abspath(
            os.path.join(settings.CACHE_DIR, 'images', 'imdb_popular'))

        if not os.path.exists(path):
            os.makedirs(path)

        full_path = os.path.join(path, os.path.basename(image_url))

        if not os.path.isfile(full_path):
            helpers.download_file(image_url, full_path, session=self.session)
Ejemplo n.º 2
0
    def cache_image(self, indexerid):
        """
        Store cache of image in cache dir
        :param indexerid: Source indexer id
        """
        path = os.path.abspath(
            os.path.join(settings.CACHE_DIR, "images", "favorites"))

        if not os.path.exists(path):
            os.makedirs(path)

        full_path = os.path.join(path, str(indexerid))

        if not os.path.isfile(full_path):
            helpers.download_file(
                sickchill.indexer.series_poster_url_by_id(indexerid),
                full_path,
                session=self.session)
Ejemplo n.º 3
0
    def cache_image(self, image_url):
        """
        Store cache of image in cache dir

        :param image_url: Source URL
        """
        if not self.cache_subfolder:
            return

        self.image_src = posixpath.join("images", self.cache_subfolder,
                                        os.path.basename(image_url))

        path = os.path.abspath(
            os.path.join(settings.CACHE_DIR, "images", self.cache_subfolder))

        if not os.path.exists(path):
            os.makedirs(path)

        full_path = os.path.join(path, os.path.basename(image_url))

        if not os.path.isfile(full_path):
            helpers.download_file(image_url, full_path, session=self.session)
Ejemplo n.º 4
0
    def download_result(self, result):
        if not self.login():
            return False

        urls, filename = self._make_url(result)

        for url in urls:
            if 'NO_DOWNLOAD_NAME' in url:
                continue

            if isinstance(url, tuple):
                referer = url[1]
                url = url[0]
            else:
                referer = '/'.join(url.split('/')[:3]) + '/'

            if url.startswith('http'):
                self.headers.update({'Referer': referer})

            logger.info('Downloading a result from {0} at {1}'.format(
                self.name, url))

            downloaded_filename = download_file(
                url,
                filename,
                session=self.session,
                headers=self.headers,
                hooks={'response': self.get_url_hook},
                return_filename=True)
            if downloaded_filename:
                if self._verify_download(downloaded_filename):
                    logger.info(
                        'Saved result to {0}'.format(downloaded_filename))
                    return True

                logger.warning('Could not download {0}'.format(url))
                remove_file_failed(downloaded_filename)

        if urls:
            logger.warning('Failed to download any results')

        return False
Ejemplo n.º 5
0
    def download_result(self, result):
        if not self.login():
            return False

        urls, filename = self._make_url(result)

        for url in urls:
            if "NO_DOWNLOAD_NAME" in url:
                continue

            if isinstance(url, tuple):
                referer = url[1]
                url = url[0]
            else:
                referer = "/".join(url.split("/")[:3]) + "/"

            if url.startswith("http"):
                self.headers.update({"Referer": referer})

            logger.info("Downloading a result from {0} at {1}".format(
                self.name, url))

            downloaded_filename = download_file(
                url,
                filename,
                session=self.session,
                headers=self.headers,
                hooks={"response": self.get_url_hook},
                return_filename=True)
            if downloaded_filename:
                if self._verify_download(downloaded_filename):
                    logger.info(
                        "Saved result to {0}".format(downloaded_filename))
                    return True

                logger.warning("Could not download {0}".format(url))
                remove_file_failed(downloaded_filename)

        if urls:
            logger.warning("Failed to download any results")

        return False
Ejemplo n.º 6
0
    def download_result(self, result):
        """
        Save the result to disk.
        """

        # check for auth
        if not self.login():
            return False

        urls, filename = self._make_url(result)

        for url in urls:
            # Search results don't return torrent files directly, it returns show sheets so we must parse showSheet to access torrent.
            data = self.get_url(url, returns="text")
            url_torrent = re.search(
                r"http://tumejorserie.com/descargar/.+\.torrent", data,
                re.DOTALL).group()

            if url_torrent.startswith("http"):
                self.headers.update(
                    {"Referer": "/".join(url_torrent.split("/")[:3]) + "/"})

            logger.info("Downloading a result from {0}".format(url))

            if helpers.download_file(url_torrent,
                                     filename,
                                     session=self.session,
                                     headers=self.headers):
                if self._verify_download(filename):
                    logger.info("Saved result to {0}".format(filename))
                    return True
                else:
                    logger.warning("Could not download {0}".format(url))
                    helpers.remove_file_failed(filename)

        if urls:
            logger.warning("Failed to download any results")

        return False
Ejemplo n.º 7
0
    def update(self):
        """
        Downloads the latest source tarball from github and installs it over the existing version.
        """

        tar_download_url = f"https://github.com/{settings.GIT_ORG}/{settings.GIT_REPO}/tarball/{self.branch}"

        try:
            # prepare the update dir
            sc_update_dir = Path(settings.DATA_DIR) / "sc-update"

            if sc_update_dir.is_dir():
                logger.info(
                    f"Clearing out update folder {sc_update_dir} before extracting"
                )
                shutil.rmtree(sc_update_dir)

            logger.info(
                f"Creating update folder {sc_update_dir} before extracting")
            sc_update_dir.mkdir()

            # retrieve file
            logger.info(f"Downloading update from {tar_download_url}")
            tar_download_path = sc_update_dir / "sc-update.tar"
            tar_download_path = tar_download_path.resolve()

            helpers.download_file(str(tar_download_url),
                                  str(tar_download_path),
                                  session=self.session)

            if not tar_download_path.is_file():
                logger.warning(
                    f"Unable to retrieve new version from {tar_download_url}, can't update"
                )
                return False

            if not tarfile.is_tarfile(tar_download_path):
                logger.exception(
                    f"Retrieved version from {tar_download_url} is corrupt, can't update"
                )
                return False

            # extract to sc-update dir
            logger.info(f"Extracting file {tar_download_path}")
            tar = tarfile.open(tar_download_path)
            tar.extractall(sc_update_dir)
            tar.close()

            # delete .tar.gz
            logger.info(f"Deleting file {tar_download_path}")
            tar_download_path.unlink()

            # find update dir name
            update_dir_contents = [
                x for x in sc_update_dir.iterdir() if x.is_dir()
            ]

            if len(update_dir_contents) != 1:
                logger.exception(
                    f"Invalid update data, update failed: {str(update_dir_contents)}"
                )
                return False

            # walk temp folder and move files to main folder
            content_dir = sc_update_dir / update_dir_contents[0]
            logger.info(
                f"Moving files from {content_dir} to {os.path.dirname(settings.PROG_DIR)}"
            )

            for dirname, stderr_, filenames in os.walk(content_dir):
                dirname = dirname[len(str(content_dir)) + 1:]
                for curfile in filenames:
                    old_path = content_dir / dirname / curfile
                    new_path = os.path.join(os.path.dirname(settings.PROG_DIR),
                                            dirname, curfile)

                    if os.path.isfile(new_path):
                        os.remove(new_path)
                    os.renames(old_path, new_path)

            settings.CUR_COMMIT_HASH = self._newest_commit_hash
            settings.CUR_COMMIT_BRANCH = self.branch

        except Exception as error:
            logger.exception(f"Error while trying to update: {error}")
            logger.debug(f"Traceback: {traceback.format_exc()}")
            return False

        # Notify update successful
        notifiers.notify_git_update(settings.CUR_COMMIT_HASH or "")
        return True
Ejemplo n.º 8
0
    def update(self):
        """
        Downloads the latest source tarball from github and installs it over the existing version.
        """

        tar_download_url = 'https://github.com/' + settings.GIT_ORG + '/' + settings.GIT_REPO + '/tarball/' + self.branch

        try:
            # prepare the update dir
            sr_update_dir = os.path.join(settings.DATA_DIR, 'sr-update')

            if os.path.isdir(sr_update_dir):
                logger.info("Clearing out update folder " + sr_update_dir +
                            " before extracting")
                shutil.rmtree(sr_update_dir)

            logger.info("Creating update folder " + sr_update_dir +
                        " before extracting")
            os.makedirs(sr_update_dir)

            # retrieve file
            logger.info(
                "Downloading update from {url}".format(url=tar_download_url))
            tar_download_path = os.path.join(sr_update_dir, 'sr-update.tar')
            helpers.download_file(tar_download_url,
                                  tar_download_path,
                                  session=self.session)

            if not os.path.isfile(tar_download_path):
                logger.warning("Unable to retrieve new version from " +
                               tar_download_url + ", can't update")
                return False

            if not tarfile.is_tarfile(tar_download_path):
                logger.exception("Retrieved version from " + tar_download_url +
                                 " is corrupt, can't update")
                return False

            # extract to sr-update dir
            logger.info("Extracting file " + tar_download_path)
            tar = tarfile.open(tar_download_path)
            tar.extractall(sr_update_dir)
            tar.close()

            # delete .tar.gz
            logger.info("Deleting file " + tar_download_path)
            os.remove(tar_download_path)

            # find update dir name
            update_dir_contents = [
                x for x in os.listdir(sr_update_dir)
                if os.path.isdir(os.path.join(sr_update_dir, x))
            ]

            if len(update_dir_contents) != 1:
                logger.exception("Invalid update data, update failed: " +
                                 str(update_dir_contents))
                return False

            # walk temp folder and move files to main folder
            content_dir = os.path.join(sr_update_dir, update_dir_contents[0])
            logger.info("Moving files from " + content_dir + " to " +
                        os.path.dirname(settings.PROG_DIR))
            for dirname, stderr_, filenames in os.walk(content_dir):
                dirname = dirname[len(content_dir) + 1:]
                for curfile in filenames:
                    old_path = os.path.join(content_dir, dirname, curfile)
                    new_path = os.path.join(os.path.dirname(settings.PROG_DIR),
                                            dirname, curfile)

                    if os.path.isfile(new_path):
                        os.remove(new_path)
                    os.renames(old_path, new_path)

            settings.CUR_COMMIT_HASH = self._newest_commit_hash
            settings.CUR_COMMIT_BRANCH = self.branch

        except Exception as error:
            logger.exception("Error while trying to update: {}".format(error))
            logger.debug("Traceback: {}".format(traceback.format_exc()))
            return False

        self._clean_libs()

        # Notify update successful
        notifiers.notify_git_update(settings.CUR_COMMIT_HASH or "")
        return True