def run(self): # Get options via arg from couchpotato.runner import getOptions portable_path = os.path.join( base_path, '../..' ) #args = ['--quiet','--data_dir=' + portable_path + '/CouchPotatoData','--config_file=' + portable_path + '/CouchPotatoData/settings.conf'] args = ['--debug','--data_dir=' + portable_path + '/CouchPotatoData','--config_file=' + portable_path + '/CouchPotatoData/settings.conf'] self.options = getOptions(portable_path, args) # Load settings settings = Env.get('settings') settings.setFile(self.options.config_file) # Create data dir if needed self.data_dir = os.path.expanduser(Env.setting('data_dir')) if self.data_dir == '': from couchpotato.core.helpers.variable import getDataDir self.data_dir = getDataDir() self.data_dir = portable_path + '/CouchPotatoData' if not os.path.isdir(self.data_dir): os.makedirs(self.data_dir) # Create logging dir self.log_dir = os.path.join(self.data_dir, 'logs'); if not os.path.isdir(self.log_dir): os.mkdir(self.log_dir) try: from couchpotato.runner import runCouchPotato runCouchPotato(self.options, base_path, args, data_dir = self.data_dir, log_dir = self.log_dir, Env = Env, desktop = self._desktop) except: pass self._desktop.frame.Close()
def doUpdate(self): try: url = 'https://github.com/%s/%s/tarball/%s' % (self.repo_user, self.repo_name, self.branch) destination = os.path.join(Env.get('cache_dir'), self.update_version.get('hash') + '.tar.gz') extracted_path = os.path.join(Env.get('cache_dir'), 'temp_updater') destination = fireEvent('file.download', url = url, dest = destination, single = True) # Cleanup leftover from last time if os.path.isdir(extracted_path): self.removeDir(extracted_path) self.makeDir(extracted_path) # Extract tar = tarfile.open(destination) tar.extractall(path = extracted_path) tar.close() os.remove(destination) if self.replaceWith(os.path.join(extracted_path, os.listdir(extracted_path)[0])): self.removeDir(extracted_path) # Write update version to file self.createFile(self.version_file, json.dumps(self.update_version)) return True except: log.error('Failed updating: %s', traceback.format_exc()) self.update_failed = True return False
def __init__(self): if Env.get('desktop'): self.updater = DesktopUpdater() elif os.path.isdir(os.path.join(Env.get('app_dir'), '.git')): self.updater = GitUpdater(self.conf('git_command', default = 'git')) else: self.updater = SourceUpdater() fireEvent('schedule.interval', 'updater.check', self.autoUpdate, hours = 6) addEvent('app.load', self.autoUpdate) addEvent('updater.info', self.info) addApiView('updater.info', self.getInfo, docs = { 'desc': 'Get updater information', 'return': { 'type': 'object', 'example': """{ 'last_check': "last checked for update", 'update_version': "available update version or empty", 'version': current_cp_version }"""} }) addApiView('updater.update', self.doUpdateView) addApiView('updater.check', self.checkView, docs = { 'desc': 'Check for available update', 'return': {'type': 'see updater.info'} })
def autoUpdate(self): do_check = True try: last_check = tryInt(Env.prop(self.last_check, default = 0)) now = tryInt(time.time()) do_check = last_check < now - 43200 if do_check: Env.prop(self.last_check, value = now) except: log.error('Failed checking last time to update: %s', traceback.format_exc()) if do_check and self.isEnabled() and self.check() and self.conf('automatic') and not self.updater.update_failed: if self.updater.doUpdate(): # Notify before restarting try: if self.conf('notification'): info = self.updater.info() version_date = datetime.fromtimestamp(info['update_version']['date']) fireEvent('updater.updated', 'CouchPotato: Updated to a new version with hash "%s", this version is from %s' % (info['update_version']['hash'], version_date), data = info) except: log.error('Failed notifying for update: %s', traceback.format_exc()) fireEventAsync('app.restart') return True return False
def decorated(*args, **kwargs): auth = getattr(request, 'authorization') if Env.setting('username') and Env.setting('password'): if (not auth or not check_auth(auth.username.decode('latin1'), md5(auth.password.decode('latin1').encode(Env.get('encoding'))))): return authenticate() return f(*args, **kwargs)
def createBaseUrl(self): host = Env.setting('host') if host == '0.0.0.0' or host == '': host = 'localhost' port = Env.setting('port') return '%s:%d%s' % (cleanHost(host).rstrip('/'), int(port), Env.get('web_base'))
def suggestView(self, limit = 6, **kwargs): movies = splitString(kwargs.get('movies', '')) ignored = splitString(kwargs.get('ignored', '')) seen = splitString(kwargs.get('seen', '')) cached_suggestion = self.getCache('suggestion_cached') if cached_suggestion: suggestions = cached_suggestion else: if not movies or len(movies) == 0: db = get_session() active_movies = db.query(Movie) \ .options(joinedload_all('library')) \ .filter(or_(*[Movie.status.has(identifier = s) for s in ['active', 'done']])).all() movies = [x.library.identifier for x in active_movies] if not ignored or len(ignored) == 0: ignored = splitString(Env.prop('suggest_ignore', default = '')) if not seen or len(seen) == 0: movies.extend(splitString(Env.prop('suggest_seen', default = ''))) suggestions = fireEvent('movie.suggest', movies = movies, ignore = ignored, single = True) self.setCache('suggestion_cached', suggestions, timeout = 6048000) # Cache for 10 weeks return { 'success': True, 'count': len(suggestions), 'suggestions': suggestions[:int(limit)] }
def getCache(self, cache_key, url = None, **kwargs): cache_key = simplifyString(cache_key) cache = Env.get('cache').get(cache_key) if cache: if not Env.get('dev'): log.debug('Getting cache %s', cache_key) return cache if url: try: cache_timeout = 300 if kwargs.get('cache_timeout'): cache_timeout = kwargs.get('cache_timeout') del kwargs['cache_timeout'] opener = None if kwargs.get('opener'): opener = kwargs.get('opener') del kwargs['opener'] if opener: log.info('Opening url: %s', url) f = opener.open(url) data = f.read() f.close() else: data = self.urlopen(url, **kwargs) if data: self.setCache(cache_key, data, timeout = cache_timeout) return data except: pass
def checkDataDir(self): if Env.get("app_dir") in Env.get("data_dir"): log.error( "You should NOT use your CouchPotato directory to save your settings in. Files will get overwritten or be deleted." ) return True
def getDomain(self, url = ''): if not self.domain: for proxy in self.proxy_list: prop_name = 'tpb_proxy.%s' % proxy last_check = float(Env.prop(prop_name, default = 0)) if last_check > time.time() - 1209600: continue data = '' try: data = self.urlopen(proxy, timeout = 3, show_error = False) except: log.debug('Failed tpb proxy %s', proxy) if 'title="Pirate Search"' in data: log.debug('Using proxy: %s', proxy) self.domain = proxy break Env.prop(prop_name, time.time()) if not self.domain: log.error('No TPB proxies left, please add one in settings, or let us know which one to add on the forum.') return None return cleanHost(self.domain).rstrip('/') + url
def getDomain(self, url = ''): forced_domain = self.conf('domain') if forced_domain: return cleanHost(forced_domain).rstrip('/') + url if not self.proxy_domain: for proxy in self.proxy_list: prop_name = 'proxy.%s' % proxy last_check = float(Env.prop(prop_name, default = 0)) if last_check > time.time() - 1209600: continue data = '' try: data = self.urlopen(proxy, timeout = 3, show_error = False) except: log.debug('Failed %s proxy %s', (self.getName(), proxy)) if self.correctProxy(data): log.debug('Using proxy for %s: %s', (self.getName(), proxy)) self.proxy_domain = proxy break Env.prop(prop_name, time.time()) if not self.proxy_domain: log.error('No %s proxies left, please add one in settings, or let us know which one to add on the forum.', self.getName()) return None return cleanHost(self.proxy_domain).rstrip('/') + url
def updateLibrary(self, full = True): last_update = float(Env.prop('manage.last_update', default = 0)) if self.isDisabled() or (last_update > time.time() - 20): return directories = self.directories() added_identifiers = [] for directory in directories: if not os.path.isdir(directory): if len(directory) > 0: log.error('Directory doesn\'t exist: %s' % directory) continue log.info('Updating manage library: %s' % directory) identifiers = fireEvent('scanner.folder', folder = directory, newer_than = last_update, single = True) added_identifiers.extend(identifiers) # Break if CP wants to shut down if self.shuttingDown(): break # If cleanup option is enabled, remove offline files from database if self.conf('cleanup') and full and not self.shuttingDown(): # Get movies with done status done_movies = fireEvent('movie.list', status = 'done', single = True) for done_movie in done_movies: if done_movie['library']['identifier'] not in added_identifiers: fireEvent('movie.delete', movie_id = done_movie['id']) Env.prop('manage.last_update', time.time())
def suggestView(self, limit = 6, **kwargs): movies = splitString(kwargs.get('movies', '')) ignored = splitString(kwargs.get('ignored', '')) seen = splitString(kwargs.get('seen', '')) cached_suggestion = self.getCache('suggestion_cached') if cached_suggestion: suggestions = cached_suggestion else: if not movies or len(movies) == 0: active_movies = fireEvent('media.with_status', ['active', 'done'], single = True) movies = [getIdentifier(x) for x in active_movies] if not ignored or len(ignored) == 0: ignored = splitString(Env.prop('suggest_ignore', default = '')) if not seen or len(seen) == 0: movies.extend(splitString(Env.prop('suggest_seen', default = ''))) suggestions = fireEvent('movie.suggest', movies = movies, ignore = ignored, single = True) self.setCache('suggestion_cached', suggestions, timeout = 6048000) # Cache for 10 weeks return { 'success': True, 'count': len(suggestions), 'suggestions': suggestions[:int(limit)] }
def __init__(self): # Get options via arg from couchpotato.runner import getOptions self.options = getOptions(base_path, sys.argv[1:]) # Load settings settings = Env.get('settings') settings.setFile(self.options.config_file) # Create data dir if needed self.data_dir = os.path.expanduser(Env.setting('data_dir')) if self.data_dir == '': self.data_dir = getDataDir() if not os.path.isdir(self.data_dir): os.makedirs(self.data_dir) # Create logging dir self.log_dir = os.path.join(self.data_dir, 'logs'); if not os.path.isdir(self.log_dir): os.mkdir(self.log_dir) # Logging from couchpotato.core.logger import CPLog self.log = CPLog(__name__) formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s', '%H:%M:%S') hdlr = handlers.RotatingFileHandler(os.path.join(self.log_dir, 'error.log'), 'a', 500000, 10) hdlr.setLevel(logging.CRITICAL) hdlr.setFormatter(formatter) self.log.logger.addHandler(hdlr)
def getCache(self, cache_key, url=None, **kwargs): use_cache = not len(kwargs.get("data", {})) > 0 and not kwargs.get("files") if use_cache: cache_key_md5 = md5(cache_key) cache = Env.get("cache").get(cache_key_md5) if cache: if not Env.get("dev"): log.debug("Getting cache %s", cache_key) return cache if url: try: cache_timeout = 300 if "cache_timeout" in kwargs: cache_timeout = kwargs.get("cache_timeout") del kwargs["cache_timeout"] data = self.urlopen(url, **kwargs) if data and cache_timeout > 0 and use_cache: self.setCache(cache_key, data, timeout=cache_timeout) return data except: if not kwargs.get("show_error", True): raise log.debug("Failed getting cache: %s", (traceback.format_exc(0))) return ""
def registerStatic(self, plugin_file, add_to_head=True): # Register plugin path self.plugin_path = os.path.dirname(plugin_file) static_folder = toUnicode(os.path.join(self.plugin_path, "static")) if not os.path.isdir(static_folder): return # Get plugin_name from PluginName s1 = re.sub("(.)([A-Z][a-z]+)", r"\1_\2", self.__class__.__name__) class_name = re.sub("([a-z0-9])([A-Z])", r"\1_\2", s1).lower() # View path path = "static/plugin/%s/" % class_name # Add handler to Tornado Env.get("app").add_handlers( ".*$", [(Env.get("web_base") + path + "(.*)", StaticFileHandler, {"path": static_folder})] ) # Register for HTML <HEAD> if add_to_head: for f in glob.glob(os.path.join(self.plugin_path, "static", "*")): ext = getExt(f) if ext in ["js", "css"]: fireEvent("register_%s" % ("script" if ext in "js" else "style"), path + os.path.basename(f), f)
def getCache(self, cache_key, url = None, **kwargs): use_cache = not len(kwargs.get('data', {})) > 0 and not kwargs.get('files') if use_cache: cache_key_md5 = md5(cache_key) cache = Env.get('cache').get(cache_key_md5) if cache: if not Env.get('dev'): log.debug('Getting cache %s', cache_key) return cache if url: try: cache_timeout = 300 if 'cache_timeout' in kwargs: cache_timeout = kwargs.get('cache_timeout') del kwargs['cache_timeout'] data = self.urlopen(url, **kwargs) if data and cache_timeout > 0 and use_cache: self.setCache(cache_key, data, timeout = cache_timeout) return data except: if not kwargs.get('show_error', True): raise log.debug('Failed getting cache: %s', (traceback.format_exc(0))) return ''
def registerStatic(self, plugin_file, add_to_head = True): # Register plugin path self.plugin_path = os.path.dirname(plugin_file) static_folder = toUnicode(os.path.join(self.plugin_path, 'static')) if not os.path.isdir(static_folder): return # Get plugin_name from PluginName s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', self.__class__.__name__) class_name = re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower() # View path path = 'static/plugin/%s/' % class_name # Add handler to Tornado Env.get('app').add_handlers(".*$", [(Env.get('web_base') + path + '(.*)', StaticFileHandler, {'path': static_folder})]) # Register for HTML <HEAD> if add_to_head: for f in glob.glob(os.path.join(self.plugin_path, 'static', '*')): ext = getExt(f) if ext in ['js', 'css']: fireEvent('register_%s' % ('script' if ext in 'js' else 'style'), path + os.path.basename(f), f)
def getCache(self, cache_key, url = None, **kwargs): cache_key = md5(ss(cache_key)) cache = Env.get('cache').get(cache_key) if cache: if not Env.get('dev'): log.debug('Getting cache %s', cache_key) return cache if url: try: cache_timeout = 300 if kwargs.get('cache_timeout'): cache_timeout = kwargs.get('cache_timeout') del kwargs['cache_timeout'] data = self.urlopen(url, **kwargs) if data: self.setCache(cache_key, data, timeout = cache_timeout) return data except: if not kwargs.get('show_error', True): raise return ''
def getUserScript(self, script_route, **kwargs): klass = self class UserscriptHandler(RequestHandler): def get(self, random, route): bookmarklet_host = Env.setting('bookmarklet_host') loc = bookmarklet_host if bookmarklet_host else "{0}://{1}".format(self.request.protocol, self.request.headers.get('X-Forwarded-Host') or self.request.headers.get('host')) params = { 'includes': fireEvent('userscript.get_includes', merge = True), 'excludes': fireEvent('userscript.get_excludes', merge = True), 'version': klass.getVersion(), 'api': '%suserscript/' % Env.get('api_base'), 'host': loc, } script = klass.renderTemplate(__file__, 'template.js_tmpl', **params) klass.createFile(os.path.join(Env.get('cache_dir'), 'couchpotato.user.js'), script) self.redirect(Env.get('api_base') + 'file.cache/couchpotato.user.js') Env.get('app').add_handlers(".*$", [('%s%s' % (Env.get('api_base'), script_route), UserscriptHandler)])
def __init__(self): if Env.get('desktop'): self.updater = DesktopUpdater() elif os.path.isdir(os.path.join(Env.get('app_dir'), '.git')): self.updater = GitUpdater(self.conf('git_command', default = 'git')) else: self.updater = SourceUpdater() addEvent('app.load', self.logVersion, priority = 10000) addEvent('app.load', self.setCrons) addEvent('updater.info', self.info) addApiView('updater.info', self.info, docs = { 'desc': 'Get updater information', 'return': { 'type': 'object', 'example': """{ 'last_check': "last checked for update", 'update_version': "available update version or empty", 'version': current_cp_version }"""} }) addApiView('updater.update', self.doUpdateView) addApiView('updater.check', self.checkView, docs = { 'desc': 'Check for available update', 'return': {'type': 'see updater.info'} }) addEvent('setting.save.updater.enabled.after', self.setCrons)
def getCredentials(self, key): request_token = { 'oauth_token': self.conf('username'), 'oauth_token_secret': self.conf('password'), 'oauth_callback_confirmed': True } token = oauth2.Token(request_token['oauth_token'], request_token['oauth_token_secret']) token.set_verifier(key) log.info('Generating and signing request for an access token using key: %s' % key) oauth_consumer = oauth2.Consumer(key = self.consumer_key, secret = self.consumer_secret) oauth_client = oauth2.Client(oauth_consumer, token) resp, content = oauth_client.request(self.url['access'], method = 'POST', body = 'oauth_verifier=%s' % key) access_token = dict(parse_qsl(content)) if resp['status'] != '200': log.error('The request for an access token did not succeed: ' + str(resp['status'])) return False else: log.info('Your Twitter access token is %s' % access_token['oauth_token']) log.info('Access token secret is %s' % access_token['oauth_token_secret']) Env.setting('username', section = 'twitter', value = access_token['oauth_token']) Env.setting('password', section = 'twitter', value = access_token['oauth_token_secret']) return True
def createBaseUrl(self): host = Env.setting('host') if host == '0.0.0.0': host = 'localhost' port = Env.setting('port') return '%s:%d' % (cleanHost(host).rstrip('/'), int(port))
def __init__(self): if Env.get("desktop"): self.updater = DesktopUpdater() elif os.path.isdir(os.path.join(Env.get("app_dir"), ".git")): self.updater = GitUpdater(self.conf("git_command", default="git")) else: self.updater = SourceUpdater() addEvent("app.load", self.setCrons) addEvent("updater.info", self.info) addApiView( "updater.info", self.getInfo, docs={ "desc": "Get updater information", "return": { "type": "object", "example": """{ 'last_check': "last checked for update", 'update_version': "available update version or empty", 'version': current_cp_version }""", }, }, ) addApiView("updater.update", self.doUpdateView) addApiView( "updater.check", self.checkView, docs={"desc": "Check for available update", "return": {"type": "see updater.info"}}, ) addEvent("setting.save.updater.enabled.after", self.setCrons)
def addMovies(self): movies = fireEvent('automation.get_movies', merge = True) movie_ids = [] for imdb_id in movies: if self.shuttingDown(): break prop_name = 'automation.added.%s' % imdb_id added = Env.prop(prop_name, default = False) if not added: added_movie = fireEvent('movie.add', params = {'identifier': imdb_id}, force_readd = False, search_after = False, update_library = True, single = True) if added_movie: movie_ids.append(added_movie['id']) Env.prop(prop_name, True) for movie_id in movie_ids: if self.shuttingDown(): break movie_dict = fireEvent('media.get', movie_id, single = True) fireEvent('movie.searcher.single', movie_dict) return True
def safeMessage(self, msg, replace_tuple = ()): from couchpotato.environment import Env from couchpotato.core.helpers.encoding import ss msg = ss(msg) try: msg = msg % replace_tuple except: try: if isinstance(replace_tuple, tuple): msg = msg % tuple([ss(x) for x in list(replace_tuple)]) else: msg = msg % ss(replace_tuple) except: self.logger.error(u'Failed encoding stuff to log: %s' % traceback.format_exc()) if not Env.get('dev'): for replace in self.replace_private: msg = re.sub('(\?%s=)[^\&]+' % replace, '?%s=xxx' % replace, msg) msg = re.sub('(&%s=)[^\&]+' % replace, '&%s=xxx' % replace, msg) # Replace api key try: api_key = Env.setting('api_key') if api_key: msg = msg.replace(api_key, 'API_KEY') except: pass return msg
def replaceWith(self, path): app_dir = Env.get('app_dir') # Get list of files we want to overwrite self.deletePyc(only_excess = False) existing_files = [] for root, subfiles, filenames in os.walk(app_dir): for filename in filenames: existing_files.append(os.path.join(root, filename)) for root, subfiles, filenames in os.walk(path): for filename in filenames: fromfile = os.path.join(root, filename) tofile = os.path.join(app_dir, fromfile.replace(path + os.path.sep, '')) if not Env.get('dev'): try: os.remove(tofile) except: pass try: os.renames(fromfile, tofile) try: existing_files.remove(tofile) except ValueError: pass except Exception, e: log.error('Failed overwriting file: %s' % e)
def createBaseUrl(self): host = Env.setting('host') if host == '0.0.0.0': host = 'localhost' port = Env.setting('port') return '%s:%d%s' % (cleanHost(host).rstrip('/'), int(port), '/' + Env.setting('url_base').lstrip('/') if Env.setting('url_base') else '')
def createBaseUrl(self): host = Env.setting("host") if host == "0.0.0.0" or host == "": host = "localhost" port = Env.setting("port") return "%s:%d%s" % (cleanHost(host).rstrip("/"), int(port), Env.get("web_base"))
def _minify(self, file_type, files, position, out): cache = Env.get('cache_dir') out_name = 'minified_' + out out = os.path.join(cache, out_name) raw = [] for file_path in files: f = open(file_path, 'r').read() if file_type == 'script': data = jsmin(f) else: data = cssmin(f) data = data.replace('../images/', '../static/images/') raw.append({'file': file_path, 'date': int(os.path.getmtime(file_path)), 'data': data}) # Combine all files together with some comments data = '' for r in raw: data += self.comment.get(file_type) % (r.get('file'), r.get('date')) data += r.get('data') + '\n\n' self.createFile(out, data.strip()) if not self.minified.get(file_type): self.minified[file_type] = {} if not self.minified[file_type].get(position): self.minified[file_type][position] = [] minified_url = 'api/%s/file.cache/%s?%s' % (Env.setting('api_key'), out_name, tryInt(os.path.getmtime(out))) self.minified[file_type][position].append(minified_url)
def moveFile(self, old, dest): dest = ss(dest) try: shutil.move(old, dest) try: os.chmod(dest, Env.getPermission('file')) except: log.error('Failed setting permissions for file: %s, %s', (dest, traceback.format_exc(1))) except OSError, err: # Copying from a filesystem with octal permission to an NTFS file system causes a permission error. In this case ignore it. if not hasattr(os, 'chmod') or err.errno != errno.EPERM: raise else: if os.path.exists(dest): os.unlink(old)
def cleanup(self): # Wait a bit after starting before cleanup time.sleep(3) log.debug('Cleaning up unused files') try: db = get_session() for root, dirs, walk_files in os.walk(Env.get('cache_dir')): for filename in walk_files: file_path = os.path.join(root, filename) f = db.query(File).filter( File.path == toUnicode(file_path)).first() if not f: os.remove(file_path) except: log.error('Failed removing unused file: %s', traceback.format_exc())
def isAvailable(self, test_url): if Env.get('dev'): return True now = time.time() host = urlparse(test_url).hostname if self.last_available_check.get(host) < now - 900: self.last_available_check[host] = now try: self.urlopen(test_url, 30) self.is_available[host] = True except: log.error('"%s" unavailable, trying again in an 15 minutes.', host) self.is_available[host] = False return self.is_available.get(host, False)
def download(self, url='', dest=None, overwrite=False, urlopen_kwargs={}): if not dest: # to Cache dest = os.path.join(Env.get('cache_dir'), '%s.%s' % (md5(url), getExt(url))) if not overwrite and os.path.isfile(dest): return dest try: filedata = self.urlopen(url, **urlopen_kwargs) except: log.error('Failed downloading file %s: %s', (url, traceback.format_exc())) return False self.createFile(dest, filedata, binary=True) return dest
def __init__(self): addApiView('app.shutdown', self.shutdown, docs = { 'desc': 'Shutdown the app.', 'return': {'type': 'string: shutdown'} }) addApiView('app.restart', self.restart, docs = { 'desc': 'Restart the app.', 'return': {'type': 'string: restart'} }) addApiView('app.available', self.available, docs = { 'desc': 'Check if app available.' }) addApiView('app.version', self.versionView, docs = { 'desc': 'Get version.' }) addEvent('app.shutdown', self.shutdown) addEvent('app.restart', self.restart) addEvent('app.load', self.launchBrowser, priority = 1) addEvent('app.base_url', self.createBaseUrl) addEvent('app.api_url', self.createApiUrl) addEvent('app.version', self.version) addEvent('app.load', self.checkDataDir) addEvent('app.load', self.cleanUpFolders) addEvent('app.load.after', self.dependencies) addEvent('setting.save.core.password', self.md5Password) addEvent('setting.save.core.api_key', self.checkApikey) # Make sure we can close-down with ctrl+c properly if not Env.get('desktop'): self.signalHandler() # Set default urlopen timeout import socket socket.setdefaulttimeout(30) # Don't check ssl by default try: if sys.version_info >= (2, 7, 9): import ssl ssl._create_default_https_context = ssl._create_unverified_context except: log.debug('Failed setting default ssl context: %s', traceback.format_exc())
def get(self): nr = int(getParam('nr', 0)) path = '%s%s' % (Env.get('log_path'), '.%s' % nr if nr > 0 else '') # Reverse f = open(path, 'r') lines = [] for line in f.readlines(): lines.insert(0, line) log = '' for line in lines: log += line return jsonified({ 'success': True, 'log': log, })
def _minify(self, file_type, files, position, out): cache = Env.get('cache_dir') out_name = out out = os.path.join(cache, 'minified', out_name) raw = [] for file_path in files: f = open(file_path, 'r').read() if file_type == 'script': data = jsmin(f) else: data = self.prefix(f) data = cssmin(data) data = data.replace('../images/', '../static/images/') data = data.replace('../fonts/', '../static/fonts/') data = data.replace('../../static/', '../static/') # Replace inside plugins raw.append({ 'file': file_path, 'date': int(os.path.getmtime(file_path)), 'data': data }) # Combine all files together with some comments data = '' for r in raw: data += self.comment.get(file_type) % (ss( r.get('file')), r.get('date')) data += r.get('data') + '\n\n' self.createFile(out, data.strip()) if not self.minified.get(file_type): self.minified[file_type] = {} if not self.minified[file_type].get(position): self.minified[file_type][position] = [] minified_url = 'minified/%s?%s' % (out_name, tryInt(os.path.getmtime(out))) self.minified[file_type][position].append(minified_url)
def replaceWith(self, path): path = sp(path) plugins_folder = os.path.dirname(os.path.abspath(__file__)) # Get list of files we want to overwrite removePyc(plugins_folder) existing_files = [] for root, subfiles, filenames in os.walk(plugins_folder): for filename in filenames: existing_files.append(os.path.join(root, filename)) for root, subfiles, filenames in os.walk(path): for filename in filenames: fromfile = os.path.join(root, filename) tofile = os.path.join(plugins_folder, fromfile.replace(path + os.path.sep, '')) if not Env.get('dev'): try: if os.path.isfile(tofile): os.remove(tofile) dirname = os.path.dirname(tofile) if not os.path.isdir(dirname): self.makeDir(dirname) shutil.move(fromfile, tofile) try: existing_files.remove(tofile) except ValueError: pass except: log.error('Failed overwriting file "%s": %s', (tofile, traceback.format_exc())) return False for still_exists in existing_files: try: os.remove(still_exists) except: log.error('Failed removing non-used file: %s', traceback.format_exc()) return True
def __init__(self): addApiView('app.shutdown', self.shutdown, docs={ 'desc': 'Shutdown the app.', 'return': { 'type': 'string: shutdown' } }) addApiView('app.restart', self.restart, docs={ 'desc': 'Restart the app.', 'return': { 'type': 'string: restart' } }) addApiView('app.available', self.available, docs={'desc': 'Check if app available.'}) addApiView('app.version', self.versionView, docs={'desc': 'Get version.'}) addEvent('app.shutdown', self.shutdown) addEvent('app.restart', self.restart) addEvent('app.load', self.launchBrowser, priority=1) addEvent('app.base_url', self.createBaseUrl) addEvent('app.api_url', self.createApiUrl) addEvent('app.version', self.version) addEvent('app.load', self.checkDataDir) addEvent('app.load', self.cleanUpFolders) addEvent('setting.save.core.password', self.md5Password) addEvent('setting.save.core.api_key', self.checkApikey) # Make sure we can close-down with ctrl+c properly if not Env.get('desktop'): self.signalHandler() # Set default urlopen timeout import socket socket.setdefaulttimeout(30)
def download(self, data=None, media=None, filedata=None): if not media: media = {} if not data: data = {} log.info('Sending "%s" to put.io', data.get('name')) url = data.get('url') client = pio.Client(self.conf('oauth_token')) # It might be possible to call getFromPutio from the renamer if we can then we don't need to do this. # Note callback_host is NOT our address, it's the internet host that putio can call too callbackurl = None if self.conf('download'): callbackurl = 'http://' + self.conf( 'callback_host' ) + '/' + '%sdownloader.putio.getfrom/' % Env.get( 'api_base'.strip('/')) resp = client.Transfer.add_url(url, callback_url=callbackurl) log.debug('resp is %s', resp.id) return self.downloadReturnId(resp.id)
def isAvailable(self, test_url): if Env.get('debug'): return True now = time.time() host = urlparse(test_url).hostname if self.last_available_check.get(host) < now - 900: self.last_available_check[host] = now data = self.urlopen(test_url, 30) if not data: log.error('%s unavailable, trying again in an 15 minutes.' % self.name) self.is_available[host] = False else: self.is_available[host] = True return self.is_available[host]
def createFile(self, path, content, binary=False): path = ss(path) self.makeDir(os.path.dirname(path)) if os.path.exists(path): log.debug('%s already exists, overwriting file with new version', path) try: f = open(path, 'w+' if not binary else 'w+b') f.write(content) f.close() os.chmod(path, Env.getPermission('file')) except: log.error('Unable writing to file "%s": %s', (path, traceback.format_exc())) if os.path.isfile(path): os.remove(path)
def registerStatic(self, plugin_file, add_to_head=True): # Register plugin path self.plugin_path = os.path.dirname(plugin_file) # Get plugin_name from PluginName s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', self.__class__.__name__) class_name = re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower() path = 'api/%s/static/%s/' % (Env.setting('api_key'), class_name) addView(path + '<path:filename>', self.showStatic, static=True) if add_to_head: for f in glob.glob(os.path.join(self.plugin_path, 'static', '*')): ext = getExt(f) if ext in ['js', 'css']: fireEvent( 'register_%s' % ('script' if ext in 'js' else 'style'), path + os.path.basename(f))
def download(self, url='', dest=None, overwrite=False): file = self.urlopen(url) if not file: log.error('File is empty, don\'t download') return False if not dest: # to Cache dest = os.path.join(Env.get('cache_dir'), '%s.%s' % (md5(url), getExt(url))) if overwrite or not os.path.exists(dest): log.debug('Writing file to: %s' % dest) output = open(dest, 'wb') output.write(file) output.close() else: log.debug('File already exists: %s' % dest) return dest
def save_view(self, **kwargs): section = kwargs.get('section') option = kwargs.get('name') value = kwargs.get('value') if not self.is_option_writable(section, option): self.log.warning('Option "%s.%s" isn\'t writable', (section, option)) return { 'success': False, } from couchpotato.environment import Env soft_chroot = Env.get('softchroot') if self.get_type(section, option) == 'directory': value = soft_chroot.chroot2abs(value) if self.get_type(section, option) == 'directories': import json value = json.loads(value) if not (value and isinstance(value, list)): value = [] value = list(map(soft_chroot.chroot2abs, value)) value = self.directories_delimiter.join(value) # See if a value handler is attached, use that as value new_value = fire_event('setting.save.%s.%s' % (section, option), value, single=True) self.set(section, option, (new_value if new_value else value).encode('unicode_escape')) self.save() # After save (for re-interval etc) fire_event('setting.save.%s.%s.after' % (section, option), single=True) fire_event('setting.save.%s.*.after' % section, single=True) return {'success': True}
def createFile(self, path, content, binary=False): path = sp(path) self.makeDir(os.path.dirname(path)) if os.path.exists(path): log.debug('%s already exists, overwriting file with new version', path) write_type = 'w+' if not binary else 'w+b' # Stream file using response object if isinstance(content, requests.models.Response): # Write file to temp with open('%s.tmp' % path, write_type) as f: for chunk in content.iter_content(chunk_size=1048576): if chunk: # filter out keep-alive new chunks f.write(chunk) f.flush() # Rename to destination os.rename('%s.tmp' % path, path) else: try: f = open(path, write_type) f.write(content) f.close() try: os.chmod(path, Env.getPermission('file')) except: log.error('Failed writing permission to file "%s": %s', (path, traceback.format_exc())) except: log.error('Unable to write file "%s": %s', (path, traceback.format_exc())) if os.path.isfile(path): os.remove(path)
def setUp(self, conf='/test.cfg'): settings = Settings() settings.setFile(base_path + conf) """ To not regenerate an Travis encrypted token at every ygg hostname change """ if not settings.get('url', 'ygg'): settings.set('ygg', 'url', 'https://ww3.yggtorrent.gg') Env.set('settings', settings) Env.set('http_opener', requests.Session()) Env.set('cache', NoCache()) return YGG()
def download(self, url='', dest=None, overwrite=False): try: file = urllib2.urlopen(url) if not dest: # to Cache dest = os.path.join(Env.get('cache_dir'), '%s.%s' % (md5(url), getExt(url))) if overwrite or not os.path.exists(dest): log.debug('Writing file to: %s' % dest) output = open(dest, 'wb') output.write(file.read()) output.close() else: log.debug('File already exists: %s' % dest) return dest except Exception, e: log.error('Unable to download file "%s": %s' % (url, e))
def clear(self, **kwargs): for x in range(0, 50): path = '%s%s' % (Env.get('log_path'), '.%s' % x if x > 0 else '') if not os.path.isfile(path): continue try: # Create empty file for current logging if x is 0: self.createFile(path, '') else: os.remove(path) except: log.error('Couldn\'t delete file "%s": %s', (path, traceback.format_exc())) return {'success': True}
def register(self): if self.registered: return try: hostname = self.conf('hostname') password = self.conf('password') port = self.conf('port') self.growl = notifier.GrowlNotifier( applicationName=Env.get('appname'), notifications=["Updates"], defaultNotifications=["Updates"], applicationIcon='%s/static/images/couch.png' % fireEvent('app.api_url', single=True), hostname=hostname if hostname else 'localhost', password=password if password else None, port=port if port else 23053) self.growl.register() self.registered = True except: log.error('Failed register of growl: %s', traceback.format_exc())
def searchLibrary(self): # Get all active and online movies db = get_session() library = db.query(Library).all() done_status = fireEvent('status.get', 'done', single = True) for movie in library.movies: for release in movie.releases: # get releases and their movie files if release.status_id is done_status.get('id'): files = [] for file in release.files.filter(FileType.status.has(identifier = 'movie')).all(): files.append(file.path) # get subtitles for those files subliminal.list_subtitles(files, cache_dir = Env.get('cache_dir'), multi = True, languages = self.getLanguages(), services = self.services)
def partial(self, type = 'all', lines = 30, offset = 0, **kwargs): total_lines = try_int(lines) offset = try_int(offset) log_lines = [] for x in range(0, 50): path = '%s%s' % (Env.get('log_path'), '.%s' % x if x > 0 else '') # Check see if the log exists if not os.path.isfile(path): break f = open(path, 'r') log_content = to_unicode(f.read()) raw_lines = self.toList(log_content) raw_lines.reverse() brk = False for line in raw_lines: if type == 'all' or line.get('type') == type.upper(): log_lines.append(line) if len(log_lines) >= (total_lines + offset): brk = True break if brk: break log_lines = log_lines[offset:] log_lines.reverse() return { 'success': True, 'log': log_lines, }
def _createType(self, meta_name, root, movie_info, group, file_type, i): # Get file path camelcase_method = underscoreToCamel(file_type.capitalize()) name = getattr(self, 'get' + camelcase_method + 'Name')(meta_name, root, i) if name and (self.conf('meta_' + file_type) or self.conf('meta_' + file_type) is None): # Get file content content = getattr(self, 'get' + camelcase_method)(movie_info=movie_info, data=group, i=i) if content: log.debug('Creating %s file: %s', (file_type, name)) if os.path.isfile(content): content = sp(content) name = sp(name) if not os.path.exists(os.path.dirname(name)): os.makedirs(os.path.dirname(name)) shutil.copy2(content, name) shutil.copyfile(content, name) # Try and copy stats seperately try: shutil.copystat(content, name) except: pass else: self.createFile(name, content) group['renamed_files'].append(name) try: os.chmod(sp(name), Env.getPermission('file')) except: log.debug('Failed setting permissions for %s: %s', (name, traceback.format_exc()))
def call(self, params, use_json=True): url = cleanHost(self.conf('host')) + 'api?' + tryUrlencode( mergeDicts(params, { 'apikey': self.conf('api_key'), 'output': 'json' })) data = self.urlopen(url, timeout=60, show_error=False, headers={'User-Agent': Env.getIdentifier()}) if use_json: d = json.loads(data) if d.get('error'): log.error('Error getting data from SABNZBd: %s', d.get('error')) return {} return d[params['mode']] else: return data
def __init__(self): super(CoreNotifier, self).__init__() addEvent('notify', self.notify) addEvent('notify.frontend', self.frontend) addApiView('notification.markread', self.markAsRead, docs = { 'desc': 'Mark notifications as read', 'params': { 'ids': {'desc': 'Notification id you want to mark as read. All if ids is empty.', 'type': 'int (comma separated)'}, }, }) addApiView('notification.list', self.listView, docs = { 'desc': 'Get list of notifications', 'params': { 'limit_offset': {'desc': 'Limit and offset the notification list. Examples: "50" or "50,30"'}, }, 'return': {'type': 'object', 'example': """{ 'success': True, 'empty': bool, any notification returned or not, 'notifications': array, notifications found, }"""} }) addNonBlockApiView('notification.listener', (self.addListener, self.removeListener)) addApiView('notification.listener', self.listener) fireEvent('schedule.interval', 'core.check_messages', self.checkMessages, hours = 12, single = True) fireEvent('schedule.interval', 'core.clean_messages', self.cleanMessages, seconds = 15, single = True) addEvent('app.load', self.clean) if not Env.get('dev'): addEvent('app.load', self.checkMessages) self.messages = [] self.listeners = [] self.m_lock = threading.Lock()
def _searchOnHost(self, host, movie, quality, results): arguments = tryUrlencode({ 'imdbid': movie['library']['identifier'].replace('tt', ''), 'apikey': host['api_key'], 'extended': 1 }) url = '%s&%s' % (self.getUrl(host['host'], self.urls['search']), arguments) nzbs = self.getRSSData(url, cache_timeout = 1800, headers = {'User-Agent': Env.getIdentifier()}) for nzb in nzbs: date = None for item in nzb: if item.attrib.get('name') == 'usenetdate': date = item.attrib.get('value') break if not date: date = self.getTextElement(nzb, 'pubDate') nzb_id = self.getTextElement(nzb, 'guid').split('/')[-1:].pop() name = self.getTextElement(nzb, 'title') if not name: continue results.append({ 'id': nzb_id, 'provider_extra': urlparse(host['host']).hostname or host['host'], 'name': self.getTextElement(nzb, 'title'), 'age': self.calculateAge(int(time.mktime(parse(date).timetuple()))), 'size': int(self.getElement(nzb, 'enclosure').attrib['length']) / 1024 / 1024, 'url': (self.getUrl(host['host'], self.urls['download']) % tryUrlencode(nzb_id)) + self.getApiExt(host), 'detail_url': '%sdetails/%s' % (cleanHost(host['host']), tryUrlencode(nzb_id)), 'content': self.getTextElement(nzb, 'description'), 'score': host['extra_score'], })
def searchSingle(self, group): if self.isDisabled(): return try: available_languages = sum(group['subtitle_language'].itervalues(), []) downloaded = [] files = [toUnicode(x) for x in group['files']['movie']] log.debug('Searching for subtitles for: %s', files) for lang in self.getLanguages(): if lang not in available_languages: download = subliminal.download_subtitles( files, multi=True, force=False, languages=[lang], services=self.services, cache_dir=Env.get('cache_dir')) for subtitle in download: downloaded.extend(download[subtitle]) for d_sub in downloaded: log.info('Found subtitle (%s): %s', (d_sub.language.alpha2, files)) group['files']['subtitle'].append(d_sub.path) group['subtitle_language'][d_sub.path] = [ d_sub.language.alpha2 ] return True except: log.error('Failed searching for subtitle: %s', (traceback.format_exc())) return False
def _search(self, movie, quality, results): cat_id_string = 'cat=%s' % ','.join(['%s' % x for x in self.getCatId(quality.get('identifier'))]) arguments = tryUrlencode({ 'imdbid': movie['library']['identifier'].replace('tt', ''), 'apikey': self.conf('api_key'), 'extended': 1 }) url = '%s&%s&%s' % ((self.urls['search']), arguments, cat_id_string) nzbs = self.getRSSData(url, cache_timeout = 1800, headers = {'User-Agent': Env.getIdentifier()}) for nzb in nzbs: date = None for item in nzb: if item.attrib.get('name') == 'usenetdate': date = item.attrib.get('value') break if not date: date = self.getTextElement(nzb, 'pubDate') nzb_id = self.getTextElement(nzb, 'guid').split('/')[-1:].pop() name = self.getTextElement(nzb, 'title') if not name: continue results.append({ 'id': nzb_id, 'name': self.getTextElement(nzb, 'title'), 'age': self.calculateAge(int(time.mktime(parse(date).timetuple()))), 'size': int(self.getElement(nzb, 'enclosure').attrib['length']) / 1024 / 1024, 'url': self.urls['download'] % tryUrlencode(nzb_id) + self.getApiExt(), 'detail_url': self.urls['detail'] % tryUrlencode(nzb_id), 'content': self.getTextElement(nzb, 'description'), })
def partial(self): log_type = getParam('type', 'all') total_lines = tryInt(getParam('lines', 30)) log_lines = [] for x in range(0, 50): path = '%s%s' % (Env.get('log_path'), '.%s' % x if x > 0 else '') # Check see if the log exists if not os.path.isfile(path): break reversed_lines = [] f = open(path, 'r') reversed_lines = toUnicode(f.read()).split('[0m\n') reversed_lines.reverse() brk = False for line in reversed_lines: if log_type == 'all' or '%s ' % log_type.upper() in line: log_lines.append(line) if len(log_lines) >= total_lines: brk = True break if brk: break log_lines.reverse() return jsonified({ 'success': True, 'log': '[0m\n'.join(log_lines), })
def deletePyc(self, only_excess = True): for root, dirs, files in os.walk(ss(Env.get('app_dir'))): pyc_files = filter(lambda filename: filename.endswith('.pyc'), files) py_files = set(filter(lambda filename: filename.endswith('.py'), files)) excess_pyc_files = filter(lambda pyc_filename: pyc_filename[:-1] not in py_files, pyc_files) if only_excess else pyc_files for excess_pyc_file in excess_pyc_files: full_path = os.path.join(root, excess_pyc_file) log.debug('Removing old PYC file: %s', full_path) try: os.remove(full_path) except: log.error('Couldn\'t remove %s: %s', (full_path, traceback.format_exc())) for dir_name in dirs: full_path = os.path.join(root, dir_name) if len(os.listdir(full_path)) == 0: try: os.rmdir(full_path) except: log.error('Couldn\'t remove empty directory %s: %s', (full_path, traceback.format_exc()))