def process_drive(): file_title = Settings.get_settings('google_drive').get('file_title') if not file_title: return credentials = Settings.get_settings('google_api_credentials').get('credentials') if not credentials: return drive = DriveClient(credentials) try: file_ = drive.get_file_by_title(file_title) if not file_: return if not check_modified(file_['modifiedDate'], delta=MODIFIED_DELTA): return body = drive.get_file_content(file_, mime_type='text/html') patterns = process_file_queries(body) if patterns: body = drive.get_file_content(file_, mime_type='text/html') for pattern, repl in patterns: body = re.sub(pattern, repl, body, 1) drive.set_file_content(file_, body=body, mime_type='text/html') except AccessTokenRefreshError: logger.error('revoked or expired google API credentials %s', credentials) except (DriveError, errors.HttpError), e: logger.error('unexpected error: %s', str(e))
def run(): path = Settings.get_settings('paths')['finished_download'] if os.path.exists(path): media_paths = Settings.get_settings('paths')['media'] for download in downloads(path): if not check_download(download.file): if remove_file(download.file): logger.info('removed %s (bad download)', download.filename) continue # Move the download if download.type not in media_paths: download.type = 'misc' dst = media_paths[download.type] res = move_file(download.file, dst) if res: Media.add_file(res) Download.insert({ 'name': download.filename, 'category': download.type, 'path': dst, 'created': datetime.utcnow(), }, safe=True) logger.info('moved %s to %s', download.filename, dst)
def _search_url(self): date = self.session['last_url_search'] if date and date > datetime.utcnow() - DELTA_URL_SEARCH: return False if self.category not in NETFLIX_CATEGORIES: return False netflix_ = Settings.get_settings('netflix') if not netflix_['username'] or not netflix_['password']: return False netflix = get_netflix_object(netflix_['username'], netflix_['password']) if not netflix: return False res = netflix.get_info(self.name, self.category) if res: Media.add_url(url=res['url'], name=res['title'], category=self.category) logger.info('found "%s" on netflix (%s)', res['title'], res['url']) if self.category == 'movies': MSearch.remove({'_id': self._id}, safe=True) logger.info('removed %s search "%s": found url %s', self.category, self.name, res['url']) return True MSearch.update({'_id': self._id}, {'$set': {'session.last_url_search': datetime.utcnow()}}, safe=True) return False
def create_media(): data = request.json type = data.get('type') if not type: return jsonify(error='missing type') langs = data.get('langs') or [] if 'id' in data: if not data.get('mode'): return jsonify(error='missing mode') id = ObjectId(data['id']) search = _get_object_search(id, type) if not search: return jsonify(error='%s %s does not exist' % (type, id)) search['langs'] = langs search['mode'] = data['mode'] search['safe'] = False if not Search.add(**search): return jsonify(error='failed to create search %s' % search) return jsonify(result=True) name = data.get('name') if not name: return jsonify(error='missing name') if type == 'url': dst = Settings.get_settings('paths')['finished_download'] try: Transfer.add(name, dst) except Exception, e: return jsonify(error='failed to create transfer: %s' % str(e))
def update_obj_extra(objtype, objmodel, objid): model = get_model(objtype, objmodel) if not model: return obj = model.find_one({'_id': objid}) if not obj: return # Check dates in case the object has been updated by another worker if not validate_object(obj['created'], obj.get('updated')): return category = obj.get('info', {}).get('subtype') or obj.get('category') spec = {'_id': obj['_id']} doc = {'updated': datetime.utcnow()} extra = search_extra(obj) if extra: doc['extra'] = extra doc['rating'] = _get_rating(extra, category) if category == 'tv': if objtype == 'media': spec = {'info.name': obj['info']['name']} else: spec = {'name': obj['name']} media_filters = Settings.get_settings('media_filters') doc['valid'] = validate_extra(extra or {}, media_filters) model.update(spec, {'$set': doc}, multi=True, safe=True) name = model.get_query(obj) if objtype == 'search' else obj['name'] logger.info('updated %s %s "%s"', category, objtype, name)
def process_media(): count = 0 root_path = Settings.get_settings('paths')['media']['video'].rstrip('/') + '/' for media in Media.find({ 'type': 'video', 'files': {'$exists': True}, '$or': [ {'updated_subs': {'$exists': False}}, {'updated_subs': {'$lt': datetime.utcnow() - DELTA_UPDATE_DEF[-1][1]}}, ], }, sort=[('updated_subs', ASCENDING)]): if not [f for f in media['files'] if f.startswith(root_path)]: continue if not validate_media(media): continue target = '%s.workers.subtitles.search_subtitles' % settings.PACKAGE_NAME get_factory().add(target=target, args=(media['_id'],), timeout=TIMEOUT_SEARCH) count += 1 if count == WORKERS_LIMIT: return
def get_searches(query): parts = [v.strip() for v in query.split(',')] if len(parts) < 2: return [] category, is_artist = get_category_info(clean(parts.pop(0))) if category is None: return [] name = clean(parts.pop(0), 1) if not name: return [] artist = name if is_artist else None langs = Settings.get_settings('media_langs').get(category, []) search = { 'name': name, 'category': category, 'mode': 'once', 'langs': langs, } if category == 'music': if not parts: artist = name if artist: try: return get_music_searches(artist) except InfoError, e: raise QueryError('failed to find albums from "%s": %s', artist, str(e)) search['album'] = clean(parts.pop(0), 1) if not search['album']: raise QueryError('failed to parse query "%s": album name is missing', query)
def list_settings(): settings = {} for section in ('media_filters', 'search_filters', 'media_langs', 'subtitles_langs', 'sync', 'paths', 'opensubtitles', 'netflix', 'filestube', 'rutracker', 'email', 'google_api', 'google_api_credentials', 'google_drive'): settings[section] = Settings.get_settings(section) _set_default_settings(settings) return serialize({'result': settings})
def _get_plugin_object(plugin): module_ = _get_module(plugin) if not module_: return None args = Settings.get_settings(plugin) try: object_ = getattr(module_, plugin.capitalize())(**args) except Exception, e: logger.error('failed to get %s object: %s', plugin.capitalize(), str(e)) return None
def process_query(query): '''Process the query and return the number of results. ''' if urlparse(query).scheme: dst = Settings.get_settings('paths')['finished_download'] try: Transfer.add(query, dst) except Exception, e: logger.error('failed to create transfer for %s: %s', query, str(e)) return -1 count = 1
def download_torrent(url): res = parse_qs(urlparse(url).query).get('t') if not res: raise DownloadError('failed to get torrent id from %s' % url) id_ = res[0] settings = Settings.get_settings('rutracker') if not settings.get('username') or not settings.get('password'): raise DownloadError('missing username and password') try: rutracker = Rutracker(settings['username'], settings['password']) except LoginError, e: raise DownloadError('failed to get torrent from %s: %s' % (url, str(e)))
def get_auth_flow(): settings = Settings.get_settings('google_api') if settings.get('client_id') \ and settings.get('client_secret') \ and settings.get('oauth_scope') \ and settings.get('redirect_uri'): return OAuth2WebServerFlow( client_id=settings['client_id'], client_secret=settings['client_secret'], scope=settings['oauth_scope'], redirect_uri=settings['redirect_uri'], access_type='offline', approval_prompt='force', )
def process(self): query = self._get_query() dst = Settings.get_settings('paths')['finished_download'] logger.info('processing %s search "%s"', self.category, query) self._search_url() for result in results(query, category=self.category, sort=self.session['sort_results'], pages_max=self.session['pages_max'], **self._get_filters(query)): if not result: self.session['nb_errors'] += 1 continue Result.add_result(result, search_id=self._id) if not result.auto: continue if self.safe and not result.safe: continue if result.get('hash'): spec = {'info.hash': result.hash} else: spec = {'src': result.url} if Transfer.find_one(spec): continue self.session['nb_results'] += 1 if not self._validate_result(result): self.session['nb_pending'] += 1 continue if self.mode == 'inc': self._add_next('episode') transfer_id = Transfer.add(result.url, dst, type=result.type) self.transfers.insert(0, transfer_id) self.session['nb_downloads'] += 1 logger.info('found "%s" on %s (%s)', result.title, result.plugin, result.url) if self.mode != 'ever': break
def process_email(): email_ = Settings.get_settings('email') if not email_.get('host') \ or not email_.get('port') \ or not email_.get('username') \ or not email_.get('password') \ or not email_.get('from_email'): return client = GmailClient(host=email_['host'], port=email_['port'], username=email_['username'], password=email_['password']) for message in client.iter_messages(email_['from_email']): if process_query(message['subject']) != -1: client.delete(message['num']) client.close()
def process_releases(): media_langs = Settings.get_settings('media_langs') for release in Release.find({ 'processed': False, 'valid': {'$exists': True}, }): valid = release['valid'] subtype = release['info'].get('subtype') if not valid and subtype == 'music' \ and _media_exists(name=release['artist'], category='music'): valid = True if valid is None: continue if valid: search = Release.get_search(release) search['langs'] = media_langs.get(subtype, []) add_search(**search) Release.update({'_id': release['_id']}, {'$set': {'processed': datetime.utcnow()}}, safe=True)
def update_path(): paths = Settings.get_settings('paths') excl = paths['media_root_exclude'] re_excl = re.compile(r'^(%s)/' % '|'.join([re.escape(p.rstrip('/')) for p in excl])) for file in iter_files(str(paths['media_root'])): if not re_excl.search(file): Media.add_file(file) time.sleep(.05) for media in Media.find({'files': {'$exists': True}}, timeout=False): files_orig = media['files'][:] for file in files_orig: if not os.path.exists(file) or re_excl.search(file): media['files'].remove(file) if not media['files'] and not media.get('urls'): Media.remove({'_id': media['_id']}, safe=True) elif media['files'] != files_orig: Media.save(media, safe=True) Work.set_info(NAME, 'updated', datetime.utcnow())
def list_syncs(): now = datetime.utcnow() sync_recurrence = timedelta(minutes=Settings.get_settings('sync')['recurrence']) items = [] for res in Sync.find(): date_ = res.get('processed') if date_ and date_ + sync_recurrence > now: res['status'] = 'ok' else: res['status'] = 'pending' media_id = res['parameters'].get('id') if not media_id: src = res['category'] else: media = Media.get(media_id) src = media['name'] if media else media_id user = get_user(res['user']) dst = user['name'] if user else res['user'] res['name'] = '%s to %s' % (src, dst) items.append(res) return serialize({'result': items})
def __init__(self, doc): super(Similar, self).__init__(doc) self.media_filters = Settings.get_settings('media_filters')
def _get_filters(self, query): filters = Settings.get_settings('search_filters') res = copy(filters.get(self.category, {})) res['include'] = Title(query).get_search_re(auto=True) res['langs'] = self.langs return res
def search_subtitles(media_id): media = Media.get(media_id) if not media: return search_langs = Settings.get_settings('subtitles_langs') temp_dir = Settings.get_settings('paths')['tmp'] if not search_langs: logger.error('missing subtitles search langs') return root_path = Settings.get_settings('paths')['media']['video'].rstrip('/') + '/' info = media['info'] if info['subtype'] == 'tv': name = clean(info.get('name'), 6) season = info.get('season') episode = info.get('episode') date = None else: name = info.get('full_name') season = None episode = None date = media.get('extra', {}).get('imdb', {}).get('date') subtitles_langs = [] plugins = { 'subscene': Subscene(), 'opensubtitles': Opensubtitles(**Settings.get_settings('opensubtitles')), } stat = [] for file in media['files']: if not validate_file(file, root_path): continue file_ = get_file(file) dst = file_.get_subtitles_path() processed = False for lang in search_langs: logger.debug('searching %s subtitles for "%s" (%s)', lang, media['name'], file) for obj_name, obj in plugins.items(): if not obj.accessible: continue processed = True lang_ = LANGS_DEF[obj_name].get(lang) if not lang_: continue for url in obj.results(name, season, episode, date, lang_): doc = {'url': url, 'file': file_.file} if Subtitles.find_one(doc): continue try: files_dst = obj.download(url, dst, temp_dir) except RateLimitReached: break if not files_dst: continue for file_dst in files_dst: logger.info('downloaded %s on %s', file_dst, obj_name) doc['created'] = datetime.utcnow() Subtitles.insert(doc, safe=True) for lang in search_langs: if file_.set_subtitles(lang): subtitles_langs.append(lang) stat.append(processed) if False not in stat: media['updated_subs'] = datetime.utcnow() media['subtitles'] = sorted(list(set(subtitles_langs))) Media.save(media, safe=True)
def update_settings(): data = request.json _sanitize_settings(data) for section, settings in data.items(): Settings.set_settings(section, settings, overwrite=True) return jsonify(result=True)
return OAuth2WebServerFlow( client_id=settings['client_id'], client_secret=settings['client_secret'], scope=settings['oauth_scope'], redirect_uri=settings['redirect_uri'], access_type='offline', approval_prompt='force', ) def get_auth_url(): flow = get_auth_flow() return flow.step1_get_authorize_url() if flow else None def set_credentials(token): flow = get_auth_flow() if not flow: return False try: credentials = flow.step2_exchange(token) except Exception, e: logger.error('failed to validate google auth token: %s', str(e)) return False Settings.set_setting('google_api_credentials', 'credentials', Credentials.to_json(credentials)) # refresh_token = credentials.refresh_token # if refresh_token: # Settings.set_setting('google_api_credentials', # 'refresh_token', refresh_token) return True
def get_netflix_object(username, password): path_tmp = Settings.get_settings('paths')['tmp'] res = Netflix(username, password, cookie_file=os.path.join(path_tmp, 'netflix_cookies.txt')) if res.logged: return res