def process_media(): count = 0 root_path = Settings.get_settings('paths')['media']['video'].rstrip('/') + '/' for media in Media.find({ 'type': 'video', 'files': {'$exists': True}, '$or': [ {'updated_subs': {'$exists': False}}, {'updated_subs': {'$lt': datetime.utcnow() - DELTA_UPDATE_DEF[-1][1]}}, ], }, sort=[('updated_subs', ASCENDING)]): if not [f for f in media['files'] if f.startswith(root_path)]: continue if not validate_media(media): continue target = '%s.workers.subtitles.search_subtitles' % settings.PACKAGE_NAME get_factory().add(target=target, args=(media['_id'],), timeout=TIMEOUT_SEARCH) count += 1 if count == WORKERS_LIMIT: return
def run(): if validate_update_path(): target = '%s.workers.file.update_path' % settings.PACKAGE_NAME get_factory().add(target=target, timeout=TIMEOUT_UPDATE) target = '%s.workers.file.update_media' % settings.PACKAGE_NAME get_factory().add(target=target, timeout=TIMEOUT_UPDATE)
def process_similars(): count = 0 for search in SimilarSearch.find(sort=[('processed', ASCENDING)]): processed = search.get('processed') delta = timedelta(hours=search.get('recurrence', DEFAULT_RECURRENCE)) if processed and processed > datetime.utcnow() - delta: continue target = '%s.workers.dig.process_similar' % settings.PACKAGE_NAME get_factory().add(target=target, args=(search['_id'],), timeout=TIMEOUT_SEEK) count += 1 if count == WORKERS_LIMIT: break
def process_searches(): count = 0 for search in MSearch.find( sort=[('session.last_search', ASCENDING)]): search = Search(search) if not search.validate(): continue target = '%s.workers.search.process_search' % settings.PACKAGE_NAME get_factory().add(target=target, args=(search._id,), timeout=TIMEOUT_SEARCH) count += 1 if count == WORKERS_LIMIT: break
def run(): if Google().accessible: factory = get_factory() for type in ('imdb', 'metacritic', 'rottentomatoes', 'vcdquality', 'tvrage', 'sputnikmusic'): target = '%s.workers.release.import_releases' % settings.PACKAGE_NAME factory.add(target=target, args=(type,), timeout=TIMEOUT_IMPORT) Release.remove({'date': {'$lt': datetime.utcnow() - DELTA_RELEASE}}, safe=True)
def update_extra(objtype, objmodel): count = 0 sort = [('date', DESCENDING)] if objtype == 'release' else [('created', DESCENDING)] model = get_model(objtype, objmodel) if not model: return for obj in model.find({ '$or': [ {'updated': {'$exists': False}}, {'updated': {'$lt': datetime.utcnow() - DELTA_UPDATE_DEF[-1][1]}}, ], }, sort=sort): if not validate_object(obj['created'], obj.get('updated')): continue target = '%s.workers.extra.update_obj_extra' % settings.PACKAGE_NAME get_factory().add(target=target, args=(objtype, objmodel, obj['_id']), timeout=TIMEOUT_UPDATE) count += 1 if count == WORKERS_LIMIT: break
def main(): if not check_commands(CMDS): sys.exit(1) factory = get_factory() factory.remove(daemon=True) # Logging handlers fh = RotatingFileHandler(settings.LOG_FILE, 'a', settings.LOG_SIZE, settings.LOG_COUNT) fh.setFormatter(logging.Formatter(settings.LOG_FORMAT)) dh = DbHandler(logging.ERROR) factory.logging_handlers = (fh, dh) for module in get_package_modules(WORKERS_DIR): if module != '__init__': target = '%s.%s.%s.run' % (settings.PACKAGE_NAME, WORKERS_DIR, module) factory.add(target=target, daemon=True) factory.run()
return jsonify(error='failed to create search %s' % search) return jsonify(result=True) name = data.get('name') if not name: return jsonify(error='missing name') if type == 'url': dst = Settings.get_settings('paths')['finished_download'] try: Transfer.add(name, dst) except Exception, e: return jsonify(error='failed to create transfer: %s' % str(e)) elif type == 'movies_artist': get_factory().add(target='mediacore.model.search.add_movies', args=(clean(name, 1), langs)) elif type == 'music_artist': get_factory().add(target='mediacore.model.search.add_music', args=(clean(name, 1),)) else: if not data.get('mode'): return jsonify(error='missing mode') search = { 'name': clean(name, 1), 'category': type, 'mode': data['mode'], 'langs': langs, 'safe': False, }