def _get_object_search(id, type): obj = None search = None if type == 'media': obj = Media.get(id) if obj: search = Media.get_search(obj) elif type == 'release': obj = Release.get(id) if obj: search = Release.get_search(obj) elif type == 'search': obj = Search.get(id) if obj: search = { 'name': obj['name'], 'category': obj['category'], 'search_id': id, } if obj.get('album'): search['album'] = obj['album'] if obj and search: search['extra'] = obj.get('extra', {}) return search
def run(): path = Settings.get_settings('paths')['finished_download'] if os.path.exists(path): media_paths = Settings.get_settings('paths')['media'] for download in downloads(path): if not check_download(download.file): if remove_file(download.file): logger.info('removed %s (bad download)', download.filename) continue # Move the download if download.type not in media_paths: download.type = 'misc' dst = media_paths[download.type] res = move_file(download.file, dst) if res: Media.add_file(res) Download.insert({ 'name': download.filename, 'category': download.type, 'path': dst, 'created': datetime.utcnow(), }, safe=True) logger.info('moved %s to %s', download.filename, dst)
def _search_file(self): if self.mode == 'ever': return date = self.session['last_file_search'] if date and date > datetime.utcnow() - DELTA_FILE_SEARCH: return media_ = Media.search(**self) files = [] for res in media_: files.extend(res.get('files', [])) if media_ and files: if self.mode == 'inc': self._add_next('episode') src = self.get('src') if src: Media.update({'_id': {'$in': [m['_id'] for m in media_]}}, {'$set': {'src': src}}, safe=True) MSearch.remove({'_id': self._id}, safe=True) logger.info('removed %s search "%s": found files %s', self.category, self._get_query(), files) return True MSearch.update({'_id': self._id}, {'$set': {'session.last_file_search': datetime.utcnow()}}, safe=True)
def _search_url(self): date = self.session['last_url_search'] if date and date > datetime.utcnow() - DELTA_URL_SEARCH: return False if self.category not in NETFLIX_CATEGORIES: return False netflix_ = Settings.get_settings('netflix') if not netflix_['username'] or not netflix_['password']: return False netflix = get_netflix_object(netflix_['username'], netflix_['password']) if not netflix: return False res = netflix.get_info(self.name, self.category) if res: Media.add_url(url=res['url'], name=res['title'], category=self.category) logger.info('found "%s" on netflix (%s)', res['title'], res['url']) if self.category == 'movies': MSearch.remove({'_id': self._id}, safe=True) logger.info('removed %s search "%s": found url %s', self.category, self.name, res['url']) return True MSearch.update({'_id': self._id}, {'$set': {'session.last_url_search': datetime.utcnow()}}, safe=True) return False
def list_media(type, skip, limit): cache = {} spec = {} category = request.args.get('category') if category: if type in ('search', 'similar'): spec['category'] = category else: spec['info.subtype'] = category query = request.args.get('query') if query: spec.update(_get_search_spec(query)) sort = request.args.get('sort', 'date') if sort == 'name': sort = [('name', ASCENDING)] elif sort == 'rating': sort = [('rating', DESCENDING)] else: sort = [('date', DESCENDING), ('created', DESCENDING)] params = {'sort': sort, 'skip': skip, 'limit': limit} items = [] if type == 'media': for res in Media.find(spec, **params): search = Media.get_search(res) items.append(_get_object(res, type=type, has_search=_has_search(cache, search), has_similar=_has_similar(cache, search))) elif type == 'release': for res in Release.find(spec, **params): search = Release.get_search(res) items.append(_get_object(res, type=type, has_search=_has_search(cache, search), has_similar=_has_similar(cache, search))) elif type == 'search': for res in Search.find(spec, **params): items.append(_get_object(res, type=type, has_search=True, has_similar=_has_similar(cache, res))) elif type == 'similar': for res in SimilarSearch.find(spec, **params): items.append(_get_object(res, type=type, has_similar=True)) return serialize({'result': items})
def process_media(): count = 0 root_path = Settings.get_settings('paths')['media']['video'].rstrip('/') + '/' for media in Media.find({ 'type': 'video', 'files': {'$exists': True}, '$or': [ {'updated_subs': {'$exists': False}}, {'updated_subs': {'$lt': datetime.utcnow() - DELTA_UPDATE_DEF[-1][1]}}, ], }, sort=[('updated_subs', ASCENDING)]): if not [f for f in media['files'] if f.startswith(root_path)]: continue if not validate_media(media): continue target = '%s.workers.subtitles.search_subtitles' % settings.PACKAGE_NAME get_factory().add(target=target, args=(media['_id'],), timeout=TIMEOUT_SEARCH) count += 1 if count == WORKERS_LIMIT: return
def remove_media(): data = request.json ids = data.get('ids') if not ids: return jsonify(error='missing ids') if not isinstance(ids, (tuple, list)): ids = [ids] spec = {'_id': {'$in': [ObjectId(i) for i in ids]}} type = data.get('type') if type == 'media': for id in ids: map(remove_file, Media.get_bases(id)) Media.remove(spec) elif type == 'search': Search.remove(spec) elif type == 'similar': SimilarSearch.remove(spec) else: return jsonify(error='unknown type %s' % type) return jsonify(result=True)
def share_media(): data = request.json if not data.get('id'): return jsonify(error='missing id') id = ObjectId(data['id']) media = Media.get(id) if not media: return jsonify(error='media %s not found' % id) user = data.get('user') if not user: return jsonify(error='user %s not found' % user) parameters = { 'id': id, 'path': data.get('path'), } if not Sync.add(user=ObjectId(user), category=media['info']['subtype'], parameters=parameters): return jsonify(error='failed to create sync') return jsonify(result=True)
def list_syncs(): now = datetime.utcnow() sync_recurrence = timedelta(minutes=Settings.get_settings('sync')['recurrence']) items = [] for res in Sync.find(): date_ = res.get('processed') if date_ and date_ + sync_recurrence > now: res['status'] = 'ok' else: res['status'] = 'pending' media_id = res['parameters'].get('id') if not media_id: src = res['category'] else: media = Media.get(media_id) src = media['name'] if media else media_id user = get_user(res['user']) dst = user['name'] if user else res['user'] res['name'] = '%s to %s' % (src, dst) items.append(res) return serialize({'result': items})
def update_path(): paths = Settings.get_settings('paths') excl = paths['media_root_exclude'] re_excl = re.compile(r'^(%s)/' % '|'.join([re.escape(p.rstrip('/')) for p in excl])) for file in iter_files(str(paths['media_root'])): if not re_excl.search(file): Media.add_file(file) time.sleep(.05) for media in Media.find({'files': {'$exists': True}}, timeout=False): files_orig = media['files'][:] for file in files_orig: if not os.path.exists(file) or re_excl.search(file): media['files'].remove(file) if not media['files'] and not media.get('urls'): Media.remove({'_id': media['_id']}, safe=True) elif media['files'] != files_orig: Media.save(media, safe=True) Work.set_info(NAME, 'updated', datetime.utcnow())
def update_media(): for res in Media.find({'files': {'$exists': True}}, timeout=False): mtime = get_mtime(res['files']) if mtime: Media.update({'_id': res['_id']}, {'$set': {'date': mtime}}, safe=True)
def search_subtitles(media_id): media = Media.get(media_id) if not media: return search_langs = Settings.get_settings('subtitles_langs') temp_dir = Settings.get_settings('paths')['tmp'] if not search_langs: logger.error('missing subtitles search langs') return root_path = Settings.get_settings('paths')['media']['video'].rstrip('/') + '/' info = media['info'] if info['subtype'] == 'tv': name = clean(info.get('name'), 6) season = info.get('season') episode = info.get('episode') date = None else: name = info.get('full_name') season = None episode = None date = media.get('extra', {}).get('imdb', {}).get('date') subtitles_langs = [] plugins = { 'subscene': Subscene(), 'opensubtitles': Opensubtitles(**Settings.get_settings('opensubtitles')), } stat = [] for file in media['files']: if not validate_file(file, root_path): continue file_ = get_file(file) dst = file_.get_subtitles_path() processed = False for lang in search_langs: logger.debug('searching %s subtitles for "%s" (%s)', lang, media['name'], file) for obj_name, obj in plugins.items(): if not obj.accessible: continue processed = True lang_ = LANGS_DEF[obj_name].get(lang) if not lang_: continue for url in obj.results(name, season, episode, date, lang_): doc = {'url': url, 'file': file_.file} if Subtitles.find_one(doc): continue try: files_dst = obj.download(url, dst, temp_dir) except RateLimitReached: break if not files_dst: continue for file_dst in files_dst: logger.info('downloaded %s on %s', file_dst, obj_name) doc['created'] = datetime.utcnow() Subtitles.insert(doc, safe=True) for lang in search_langs: if file_.set_subtitles(lang): subtitles_langs.append(lang) stat.append(processed) if False not in stat: media['updated_subs'] = datetime.utcnow() media['subtitles'] = sorted(list(set(subtitles_langs))) Media.save(media, safe=True)
def _media_exists(**kwargs): files = Media.search_files(name=kwargs.get('name'), category=kwargs.get('category'), album=kwargs.get('album')) return len(files) >= settings.FILES_COUNT_MIN.get(kwargs.get('category'), 1)