def onComplete(): try: media = fire_event('media.get', media_id, single=True) if media: event_name = '%s.searcher.single' % media.get('type') fire_event_async(event_name, media, on_complete=self.createNotifyFront(media_id), manual=True) except: log.error('Failed creating onComplete: %s', traceback.format_exc())
def refresh(self, id='', **kwargs): handlers = [] ids = split_string(id) for x in ids: refresh_handler = self.createRefreshHandler(x) if refresh_handler: handlers.append(refresh_handler) fire_event('notify.frontend', type='media.busy', data={'_id': ids}) fire_event_async('schedule.queue', handlers=handlers) return { 'success': True, }
def do_update_view(self, **kwargs): self.check() if not self.updater.update_version: log.error('Trying to update when no update is available.') success = False else: success = self.updater.do_update() if success: fire_event_async('app.restart') # Assume the updater handles things if not success: success = True return {'success': success}
def edit(self, id = '', **kwargs): try: db = get_db() ids = split_string(id) for media_id in ids: try: m = db.get('id', media_id) m['profile_id'] = kwargs.get('profile_id') or m['profile_id'] cat_id = kwargs.get('category_id') if cat_id is not None: m['category_id'] = cat_id if len(cat_id) > 0 else m['category_id'] # Remove releases for rel in fire_event('release.for_media', m['_id'], single=True): if rel['status'] is 'available': db.delete(rel) # Default title if kwargs.get('default_title'): m['title'] = kwargs.get('default_title') db.update(m) fire_event('media.restatus', m['_id'], single=True) m = db.get('id', media_id) movie_dict = fire_event('media.get', m['_id'], single=True) fire_event_async('movie.searcher.single', movie_dict, on_complete=self.createNotifyFront(media_id)) except: print((traceback.format_exc())) log.error('Can\'t edit non-existing media') return { 'success': True, } except: log.error('Failed editing media: %s', traceback.format_exc()) return { 'success': False, }
def startup_compact(self): from couchpotato import Env db = self.get_database() # Try fix for migration failures on desktop if Env.get('desktop'): try: list(db.all('profile', with_doc=True)) except RecordNotFound: failed_location = '%s_failed' % db.path old_db = os.path.join(Env.get('data_dir'), 'couchpotato.db.old') if not os.path.isdir(failed_location) and os.path.isfile( old_db): log.error('Corrupt database, trying migrate again') db.close() # Rename database folder os.rename(db.path, '%s_failed' % db.path) # Rename .old database to try another migrate os.rename(old_db, old_db[:-4]) fire_event_async('app.restart') else: log.error( 'Migration failed and couldn\'t recover database. Please report on GitHub, with this message.' ) db.reindex() return # Check size and compact if needed size = db.get_db_details().get('size') prop_name = 'last_db_compact' last_check = int(Env.prop(prop_name, default=0)) if last_check < time.time() - 604800: # 7 days self.compact() Env.prop(prop_name, value=int(time.time()))
def getFromPutio(self, **kwargs): try: file_id = str(kwargs.get('file_id')) except: return { 'success': False, } log.info('Put.io Download has been called file_id is %s', file_id) if file_id not in self.downloading_list: self.downloading_list.append(file_id) fire_event_async('putio.download', fid=file_id) return { 'success': True, } return { 'success': False, }
def auto_update(self): do_check = True try: last_check = try_int(Env.prop(self.last_check, default=0)) now = try_int(time.time()) do_check = last_check < now - 43200 if do_check: Env.prop(self.last_check, value=now) except: log.error('Failed checking last time to update: %s', traceback.format_exc()) if do_check and self.is_enabled() and self.check() and self.conf( 'automatic') and not self.updater.update_failed: if self.updater.do_update(): # Notify before restarting try: if self.conf('notification'): info = self.updater.info() version_date = datetime.fromtimestamp( info['update_version']['date']) fire_event( 'updater.updated', 'CouchPotato: Updated to a new version with hash "%s", this version is from %s' % (info['update_version']['hash'], version_date), data=info) except: log.error('Failed notifying for update: %s', traceback.format_exc()) fire_event_async('app.restart') return True return False
def searchAllView(self, **kwargs): fire_event_async('movie.searcher.all', manual=True) return {'success': not self.in_progress}
def runCouchPotato(options, base_path, args, data_dir=None, log_dir=None, Env=None, desktop=None): try: locale.setlocale(locale.LC_ALL, "") encoding = locale.getpreferredencoding() except (locale.Error, IOError): encoding = None # for OSes that are poorly configured I'll just force UTF-8 if not encoding or encoding in ('ANSI_X3.4-1968', 'US-ASCII', 'ASCII'): encoding = 'UTF-8' Env.set('encoding', encoding) # Do db stuff db_path = sp(os.path.join(data_dir, 'database')) old_db_path = os.path.join(data_dir, 'couchpotato.db') # Remove database folder if both exists if os.path.isdir(db_path) and os.path.isfile(old_db_path): db = SuperThreadSafeDatabase(db_path) db.open() db.destroy() # Check if database exists db = SuperThreadSafeDatabase(db_path) db_exists = db.exists() if db_exists: # Backup before start and cleanup old backups backup_path = sp(os.path.join(data_dir, 'db_backup')) backup_count = 5 existing_backups = [] if not os.path.isdir(backup_path): os.makedirs(backup_path) for root, dirs, files in os.walk(backup_path): # Only consider files being a direct child of the backup_path if root == backup_path: for backup_file in sorted(files): ints = re.findall('\d+', backup_file) # Delete non zip files if len(ints) != 1: try: os.remove(os.path.join(root, backup_file)) except: pass else: existing_backups.append((int(ints[0]), backup_file)) else: # Delete stray directories. shutil.rmtree(root) # Remove all but the last 5 for eb in existing_backups[:-backup_count]: os.remove(os.path.join(backup_path, eb[1])) # Create new backup new_backup = sp( os.path.join(backup_path, '%s.tar.gz' % int(time.time()))) zipf = tarfile.open(new_backup, 'w:gz') for root, dirs, files in os.walk(db_path): for zfilename in files: zipf.add(os.path.join(root, zfilename), arcname='database/%s' % os.path.join(root[len(db_path) + 1:], zfilename)) zipf.close() # Open last db.open() else: db.create() # Force creation of cachedir log_dir = sp(log_dir) cache_dir = sp(os.path.join(data_dir, 'cache')) python_cache = sp(os.path.join(cache_dir, 'python')) if not os.path.exists(cache_dir): os.mkdir(cache_dir) if not os.path.exists(python_cache): os.mkdir(python_cache) session = requests.Session() session.max_redirects = 5 # Register environment settings Env.set('app_dir', sp(base_path)) Env.set('data_dir', sp(data_dir)) Env.set('log_path', sp(os.path.join(log_dir, 'CouchPotato.log'))) Env.set('db', db) Env.set('http_opener', session) Env.set('cache_dir', cache_dir) Env.set('cache', FileSystemCache(python_cache)) Env.set('console_log', options.console_log) Env.set('quiet', options.quiet) Env.set('desktop', desktop) Env.set('daemonized', options.daemon) Env.set('args', args) Env.set('options', options) # Determine debug debug = options.debug or Env.setting('debug', default=False, type='bool') Env.set('debug', debug) # Development development = Env.setting('development', default=False, type='bool') Env.set('dev', development) # Disable logging for some modules for logger_name in [ 'enzyme', 'guessit', 'subliminal', 'apscheduler', 'tornado', 'requests' ]: logging.getLogger(logger_name).setLevel(logging.ERROR) for logger_name in ['gntp']: logging.getLogger(logger_name).setLevel(logging.WARNING) # Disable SSL warning disable_warnings() # Use reloader reloader = debug is True and development and not Env.get( 'desktop') and not options.daemon # Logger logger = logging.getLogger() formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s', '%m-%d %H:%M:%S') level = logging.DEBUG if debug else logging.INFO logger.setLevel(level) logging.addLevelName(19, 'INFO') # To screen if (debug or options.console_log) and not options.quiet and not options.daemon: hdlr = logging.StreamHandler(sys.stderr) hdlr.setFormatter(formatter) logger.addHandler(hdlr) # To file hdlr2 = handlers.RotatingFileHandler(Env.get('log_path'), 'a', 500000, 10, encoding=Env.get('encoding')) hdlr2.setFormatter(formatter) logger.addHandler(hdlr2) # Start logging & enable colors # noinspection PyUnresolvedReferences import color_logs from couchpotato.core.logger import CPLog log = CPLog(__name__) log.debug('Started with options %s', options) # Check soft-chroot dir exists: try: # Load Soft-Chroot soft_chroot = Env.get('softchroot') soft_chroot_dir = Env.setting('soft_chroot', section='core', default=None, type='unicode') soft_chroot.initialize(soft_chroot_dir) except SoftChrootInitError as exc: log.error(exc) return except: log.error('Unable to check whether SOFT-CHROOT is defined') return # Check available space try: total_space, available_space = get_free_space(data_dir) if available_space < 100: log.error( 'Shutting down as CP needs some space to work. You\'ll get corrupted data otherwise. Only %sMB left', available_space) return except: log.error('Failed getting diskspace: %s', traceback.format_exc()) def customwarn(message, category, filename, lineno, file=None, line=None): log.warning('%s %s %s line:%s', (category, message, filename, lineno)) warnings.showwarning = customwarn # Create app from couchpotato import WebHandler web_base = ('/' + Env.setting('url_base').lstrip('/') + '/') if Env.setting('url_base') else '/' Env.set('web_base', web_base) api_key = Env.setting('api_key') if not api_key: api_key = uuid4().hex Env.setting('api_key', value=api_key) api_base = r'%sapi/%s/' % (web_base, api_key) Env.set('api_base', api_base) # Basic config host = Env.setting('host', default='0.0.0.0') host6 = Env.setting('host6', default='::') config = { 'use_reloader': reloader, 'port': try_int(Env.setting('port', default=5050)), 'host': host if host and len(host) > 0 else '0.0.0.0', 'host6': host6 if host6 and len(host6) > 0 else '::', 'ssl_cert': Env.setting('ssl_cert', default=None), 'ssl_key': Env.setting('ssl_key', default=None), } # Load the app application = Application( [], log_function=lambda x: None, debug=config['use_reloader'], gzip=True, cookie_secret=api_key, login_url='%slogin/' % web_base, ) Env.set('app', application) # Request handlers application.add_handlers( ".*$", [ (r'%snonblock/(.*)(/?)' % api_base, NonBlockHandler), # API handlers (r'%s(.*)(/?)' % api_base, ApiHandler), # Main API handler (r'%sgetkey(/?)' % web_base, KeyHandler), # Get API key (r'%s' % api_base, RedirectHandler, { "url": web_base + 'docs/' }), # API docs # Login handlers (r'%slogin(/?)' % web_base, LoginHandler), (r'%slogout(/?)' % web_base, LogoutHandler), # Catch all webhandlers (r'%s(.*)(/?)' % web_base, WebHandler), (r'(.*)', WebHandler), ]) # Static paths static_path = '%sstatic/' % web_base for dir_name in ['fonts', 'images', 'scripts', 'style']: application.add_handlers( ".*$", [('%s%s/(.*)' % (static_path, dir_name), StaticFileHandler, { 'path': sp(os.path.join(base_path, 'couchpotato', 'static', dir_name)) })]) Env.set('static_path', static_path) # Load configs & plugins loader = Env.get('loader') loader.preload(root=sp(base_path)) loader.run() # Fill database with needed stuff fire_event('database.setup') if not db_exists: fire_event('app.initialize', in_order=True) fire_event('app.migrate') # Go go go! from tornado.ioloop import IOLoop from tornado.autoreload import add_reload_hook loop = IOLoop.current() # Reload hook def reload_hook(): fire_event('app.shutdown') add_reload_hook(reload_hook) # Some logging and fire load event try: log.info('Starting server on port %(port)s', config) except: pass fire_event_async('app.load') ssl_options = None if config['ssl_cert'] and config['ssl_key']: ssl_options = { 'certfile': config['ssl_cert'], 'keyfile': config['ssl_key'], } server = HTTPServer(application, no_keep_alive=True, ssl_options=ssl_options) try_restart = True restart_tries = 5 while try_restart: try: if config['host'].startswith('unix:'): server.add_socket(bind_unix_socket(config['host'][5:])) else: server.listen(config['port'], config['host']) if Env.setting('ipv6', default=False): try: server.listen(config['port'], config['host6']) except: log.info2('Tried to bind to IPV6 but failed') loop.start() server.close_all_connections() server.stop() loop.close(all_fds=True) except Exception as e: log.error('Failed starting: %s', traceback.format_exc()) try: nr, msg = e if nr == 48: log.info( 'Port (%s) needed for CouchPotato is already in use, try %s more time after few seconds', (config.get('port'), restart_tries)) time.sleep(1) restart_tries -= 1 if restart_tries > 0: continue else: return except ValueError: return except: pass raise try_restart = False
def updateLibraryView(self, full=1, **kwargs): fire_event_async('manage.update', full=True if full == '1' else False) return {'progress': self.in_progress, 'success': True}
def add(self, params = None, force_readd = True, search_after = True, update_after = True, notify_after = True, status = None): if not params: params = {} # Make sure it's a correct zero filled imdb id params['identifier'] = get_imdb(params.get('identifier', '')) if not params.get('identifier'): msg = 'Can\'t add movie without imdb identifier.' log.error(msg) fire_event('notify.frontend', type='movie.is_tvshow', message=msg) return False elif not params.get('info'): try: is_movie = fire_event('movie.is_movie', identifier=params.get('identifier'), adding=True, single=True) if not is_movie: msg = 'Can\'t add movie, seems to be a TV show.' log.error(msg) fire_event('notify.frontend', type='movie.is_tvshow', message=msg) return False except: pass info = params.get('info') if not info or (info and len(info.get('titles', [])) == 0): info = fire_event('movie.info', merge=True, extended=False, identifier=params.get('identifier')) # Allow force re-add overwrite from param if 'force_readd' in params: fra = params.get('force_readd') force_readd = fra.lower() not in ['0', '-1'] if not isinstance(fra, bool) else fra # Set default title def_title = self.getDefaultTitle(info) # Default profile and category default_profile = {} if (not params.get('profile_id') and status != 'done') or params.get('ignore_previous', False): default_profile = fire_event('profile.default', single=True) cat_id = params.get('category_id') try: db = get_db() media = { '_t': 'media', 'type': 'movie', 'title': def_title, 'identifiers': { 'imdb': params.get('identifier') }, 'status': status if status else 'active', 'profile_id': params.get('profile_id') or default_profile.get('_id'), 'category_id': cat_id if cat_id is not None and len(cat_id) > 0 and cat_id != '-1' else None, } # Update movie info try: del info['in_wanted'] except: pass try: del info['in_library'] except: pass media['info'] = info new = False previous_profile = None try: m = db.get('media', 'imdb-%s' % params.get('identifier'), with_doc = True)['doc'] try: db.get('id', m.get('profile_id')) previous_profile = m.get('profile_id') except RecordNotFound: pass except: log.error('Failed getting previous profile: %s', traceback.format_exc()) except: new = True m = db.insert(media) # Update dict to be usable m.update(media) added = True do_search = False search_after = search_after and self.conf('search_on_add', section = 'moviesearcher') onComplete = None if new: if search_after: onComplete = self.createOnComplete(m['_id']) search_after = False elif force_readd: # Clean snatched history for release in fire_event('release.for_media', m['_id'], single=True): if release.get('status') in ['downloaded', 'snatched', 'seeding', 'done']: if params.get('ignore_previous', False): fire_event('release.update_status', release['_id'], status='ignored') else: fire_event('release.delete', release['_id'], single=True) m['profile_id'] = (params.get('profile_id') or default_profile.get('_id')) if not previous_profile else previous_profile m['category_id'] = cat_id if cat_id is not None and len(cat_id) > 0 else (m.get('category_id') or None) m['last_edit'] = int(time.time()) m['tags'] = [] do_search = True db.update(m) else: try: del params['info'] except: pass log.debug('Movie already exists, not updating: %s', params) added = False # Trigger update info if added and update_after: # Do full update to get images etc fire_event_async('movie.update', m['_id'], default_title=params.get('title'), on_complete=onComplete) # Remove releases for rel in fire_event('release.for_media', m['_id'], single=True): if rel['status'] is 'available': db.delete(rel) movie_dict = fire_event('media.get', m['_id'], single=True) if not movie_dict: log.debug('Failed adding media, can\'t find it anymore') return False if do_search and search_after: onComplete = self.createOnComplete(m['_id']) onComplete() if added and notify_after: if params.get('title'): message = 'Successfully added "%s" to your wanted list.' % params.get('title', '') else: title = get_title(m) if title: message = 'Successfully added "%s" to your wanted list.' % title else: message = 'Successfully added to your wanted list.' fire_event('notify.frontend', type='movie.added', data=movie_dict, message=message) return movie_dict except: log.error('Failed adding media: %s', traceback.format_exc())
def do_restart(e): if e['status'] == 'done': fire_event_async('app.restart') elif e['status'] == 'error': log.error('Failed updating desktop: %s', e['exception']) self.update_failed = True