def cache_image(self, image_url, default=None): """Store cache of image in cache dir :param image_url: Source URL :param default: default folder """ if default: self.image_src = posixpath.join('images', default) return if not self.cache_subfolder: return path = os.path.abspath( os.path.join(app.CACHE_DIR, 'images', self.cache_subfolder)) if not os.path.exists(path): os.makedirs(path) full_path = os.path.join(path, os.path.basename(image_url)) self.image_src = posixpath.join('cache', 'images', self.cache_subfolder, os.path.basename(image_url)) if not os.path.isfile(full_path): helpers.download_file(image_url, full_path, session=self.session)
def download_result(self, result): """Download result from provider.""" if not self.login(): return False urls, filename = self._make_url(result) for url in urls: if 'NO_DOWNLOAD_NAME' in url: continue if url.startswith('http'): self.headers.update({ 'Referer': '/'.join(url.split('/')[:3]) + '/' }) log.info('Downloading {result} from {provider} at {url}', {'result': result.name, 'provider': self.name, 'url': url}) verify = False if self.public else None if download_file(url, filename, session=self.session, headers=self.headers, verify=verify): if self._verify_download(filename): log.info('Saved {result} to {location}', {'result': result.name, 'location': filename}) return True log.warning('Failed to download any results for {result}', {'result': result.name}) return False
def download_result(self, result): """ Download result from provider. This is used when a blackhole is used for sending the nzb file to the nzb client. For now the url and the post data is stored as one string in the db, using a pipe (|) to separate them. :param result: A SearchResult object. :return: The result of the nzb download (True/False). """ if not self.login(): return False result_name = sanitize_filename(result.name) filename = join(self._get_storage_dir(), result_name + '.' + self.provider_type) if result.url.startswith('http'): self.session.headers.update( {'Referer': '/'.join(result.url.split('/')[:3]) + '/'}) log.info('Downloading {result} from {provider} at {url}', { 'result': result.name, 'provider': self.name, 'url': result.url }) verify = False if self.public else None url, data = result.url.split('|') data = { data.split('=')[1]: 'on', 'action': 'nzb', } if download_file(url, filename, method='POST', data=data, session=self.session, headers=self.headers, verify=verify): if self._verify_download(filename): log.info('Saved {result} to {location}', { 'result': result.name, 'location': filename }) return True return False
def download_result(self, result): """Download result from provider.""" if not self.login(): return False if result.url.startswith('magnet'): return self._save_magnet(result) urls, filename = self._make_url(result) for url in urls: if 'NO_DOWNLOAD_NAME' in url: continue if url.startswith('http'): self.headers.update( {'Referer': '/'.join(url.split('/')[:3]) + '/'}) log.info('Downloading {result} from {provider} at {url}', { 'result': result.name, 'provider': self.name, 'url': url }) verify = False if self.public else None filename_ext = '{filename}.{provider_type}'.format( filename=filename, provider_type=result.provider.provider_type) if download_file(url, filename_ext, session=self.session, headers=self.headers, verify=verify): if self._verify_download(filename_ext): log.info('Saved {result} to {location}', { 'result': result.name, 'location': filename_ext }) return True if result.url.startswith('magnet:') and app.SAVE_MAGNET_FILE: if self.create_magnet(filename, result): return True log.warning('Failed to download any results for {result}', {'result': result.name}) return False
def cache_image(self, image_url, default=None): """Store cache of image in cache dir. :param image_url: Source URL :param default: default folder """ if default: self.image_src = posixpath.join('images', default) return if not self.cache_subfolder: return path = os.path.abspath(os.path.join(app.CACHE_DIR, 'images', self.cache_subfolder)) if not os.path.exists(path): os.makedirs(path) full_path = os.path.join(path, os.path.basename(image_url)) self.image_src = posixpath.join('cache', 'images', self.cache_subfolder, os.path.basename(image_url)) if not os.path.isfile(full_path): helpers.download_file(image_url, full_path, session=self.session)
def download_result(self, result): """Save the result to disk.""" # check for auth if not self.login(): return False urls, filename = self._make_url(result) for url in urls: # Search results don't return torrent files directly, # it returns show sheets so we must parse showSheet to access torrent. response = self.get_url(url, returns='response') url_torrent = re.search( r'http://tumejorserie.com/descargar/.+\.torrent', response.text, re.DOTALL).group() if url_torrent.startswith('http'): self.headers.update( {'Referer': '/'.join(url_torrent.split('/')[:3]) + '/'}) log.info('Downloading a result from {0}', url) if helpers.download_file(url_torrent, filename, session=self.session, headers=self.headers): if self._verify_download(filename): log.info('Saved result to {0}', filename) return True else: log.warning('Could not download {0}', url) helpers.remove_file_failed(filename) if urls: log.warning('Failed to download any results') return False
def update(self): """Download the latest source tarball from github and installs it over the existing version.""" tar_download_url = 'http://github.com/' + self.github_org + '/' + self.github_repo + '/tarball/' + self.branch try: # prepare the update dir app_update_dir = os.path.join(app.PROG_DIR, u'medusa-update') if os.path.isdir(app_update_dir): log.info(u'Clearing out update folder {0!r} before extracting', app_update_dir) shutil.rmtree(app_update_dir) log.info(u'Clearing update folder {0!r} before extracting', app_update_dir) os.makedirs(app_update_dir) # retrieve file log.info(u'Downloading update from {0!r}', tar_download_url) tar_download_path = os.path.join(app_update_dir, u'medusa-update.tar') helpers.download_file(tar_download_url, tar_download_path, session=self.session) if not os.path.isfile(tar_download_path): log.warning( u"Unable to retrieve new version from {0!r}, can't update", tar_download_url) return False if not tarfile.is_tarfile(tar_download_path): log.warning( u"Retrieved version from {0!r} is corrupt, can't update", tar_download_url) return False # extract to medusa-update dir log.info(u'Extracting file {0}', tar_download_path) tar = tarfile.open(tar_download_path) tar.extractall(app_update_dir) tar.close() # delete .tar.gz log.info(u'Deleting file {0}', tar_download_path) os.remove(tar_download_path) # find update dir name update_dir_contents = [ x for x in os.listdir(app_update_dir) if os.path.isdir(os.path.join(app_update_dir, x)) ] if len(update_dir_contents) != 1: log.warning(u'Invalid update data, update failed: {0}', update_dir_contents) return False content_dir = os.path.join(app_update_dir, update_dir_contents[0]) # walk temp folder and move files to main folder log.info(u'Moving files from {0} to {1}', content_dir, app.PROG_DIR) for dirname, _, filenames in os.walk( content_dir): # @UnusedVariable dirname = dirname[len(content_dir) + 1:] for curfile in filenames: old_path = os.path.join(content_dir, dirname, curfile) new_path = os.path.join(app.PROG_DIR, dirname, curfile) # Avoid DLL access problem on WIN32/64 # These files needing to be updated manually # or find a way to kill the access from memory extension = os.path.splitext(curfile)[1] if extension == '.dll': try: log.debug(u'Special handling for {0}', curfile) os.chmod(new_path, stat.S_IWRITE) os.remove(new_path) os.renames(old_path, new_path) except Exception as e: log.debug(u'Unable to update {0}: {1!r}', new_path, e) os.remove( old_path ) # Trash the updated file without moving in new path continue if os.path.isfile(new_path): os.remove(new_path) os.renames(old_path, new_path) app.CUR_COMMIT_HASH = self._newest_commit_hash app.CUR_COMMIT_BRANCH = self.branch except Exception as e: log.exception(u'Error while trying to update: {0}', e) return False # Notify update successful try: notifiers.notify_git_update(app.CUR_COMMIT_HASH or '') except Exception: log.debug( u'Unable to send update notification. Continuing the update process' ) return True
def update(self): """Download the latest source tarball from github and installs it over the existing version.""" tar_download_url = 'http://github.com/' + self.github_org + '/' + self.github_repo + '/tarball/' + self.branch try: # prepare the update dir app_update_dir = os.path.join(app.PROG_DIR, u'medusa-update') if os.path.isdir(app_update_dir): log.info(u'Clearing out update folder {0!r} before extracting', app_update_dir) shutil.rmtree(app_update_dir) log.info(u'Clearing update folder {0!r} before extracting', app_update_dir) os.makedirs(app_update_dir) # retrieve file log.info(u'Downloading update from {0!r}', tar_download_url) tar_download_path = os.path.join(app_update_dir, u'medusa-update.tar') helpers.download_file(tar_download_url, tar_download_path, session=self.session) if not os.path.isfile(tar_download_path): log.warning(u"Unable to retrieve new version from {0!r}, can't update", tar_download_url) return False if not tarfile.is_tarfile(tar_download_path): log.warning(u"Retrieved version from {0!r} is corrupt, can't update", tar_download_url) return False # extract to medusa-update dir log.info(u'Extracting file {0}', tar_download_path) tar = tarfile.open(tar_download_path) tar.extractall(app_update_dir) tar.close() # delete .tar.gz log.info(u'Deleting file {0}', tar_download_path) os.remove(tar_download_path) # find update dir name update_dir_contents = [x for x in os.listdir(app_update_dir) if os.path.isdir(os.path.join(app_update_dir, x))] if len(update_dir_contents) != 1: log.warning(u'Invalid update data, update failed: {0}', update_dir_contents) return False content_dir = os.path.join(app_update_dir, update_dir_contents[0]) # walk temp folder and move files to main folder log.info(u'Moving files from {0} to {1}', content_dir, app.PROG_DIR) for dirname, _, filenames in os.walk(content_dir): # @UnusedVariable dirname = dirname[len(content_dir) + 1:] for curfile in filenames: old_path = os.path.join(content_dir, dirname, curfile) new_path = os.path.join(app.PROG_DIR, dirname, curfile) # Avoid DLL access problem on WIN32/64 # These files needing to be updated manually # or find a way to kill the access from memory extension = os.path.splitext(curfile)[1] if extension == '.dll': try: log.debug(u'Special handling for {0}', curfile) os.chmod(new_path, stat.S_IWRITE) os.remove(new_path) os.renames(old_path, new_path) except Exception as e: log.debug(u'Unable to update {0}: {1!r}', new_path, e) os.remove(old_path) # Trash the updated file without moving in new path continue if os.path.isfile(new_path): os.remove(new_path) os.renames(old_path, new_path) app.CUR_COMMIT_HASH = self._newest_commit_hash app.CUR_COMMIT_BRANCH = self.branch except Exception as e: log.exception(u'Error while trying to update: {0}', e) return False # Notify update successful try: notifiers.notify_git_update(app.CUR_COMMIT_HASH or '') except Exception: log.debug(u'Unable to send update notification. Continuing the update process') return True