def compute_trending(test, **kwargs): if test: logger.debug("get_trending task") return trendings_movies = [] torrents_manager = TorrentsManager(config, PDbManager, CATEGORY_NAME['movies']) try: with db_factory.get_instance(): # trendings_torrents = PDbManager.get_trending_torrents_by_category(Torrent._CAT_MOVIE, mindate, maxdate, delta_hours) trendings_torrents = torrents_manager.torrents_trending trendings_movies = movies_from_torrents( [t for t, _, _ in trendings_torrents]) except ValueError as err: logger.warning(err) return with db_factory.get_instance(): for movie in trendings_movies: try: PDbManager.imdb_object_to_db(movie, update=True) except Exception as err: logger.error('Error during imdb object creation (%s): %s' % (movie.imdb_id, err))
def torrents_stats_with_tracker(tracker: Tracker, category: list = None): nb_stats = 0 with db_factory.get_instance(): torrents = PDbManager.get_torrents_by_tracker( tracker, status=[Torrent._STATUS_NEW, Torrent._STATUS_FOLLOW], category=category) logger.debug("Torrents number: %s", len(torrents)) stats_scraper = stats.StatsScraper(tracker) stats_scraper.torrents = torrents stats_scraper.run_by_batch() stats_collection = stats_scraper.stats_collection logger.debug("Stats number: %s", stats_collection.count()) if stats_collection.count() != len(torrents): logger.warning("Statistics count is wrong %s/%s", stats_collection.count(), len(torrents)) with db_factory.get_instance(): for tracker_stats in stats_collection.stats: _, created = PDbManager.save_stats(tracker_stats) nb_stats += created return nb_stats
def get_trending(test, category: list = None, mindate=None, maxdate=None, delta_hours=1, **kwargs): if category is not None: category = [CATEGORY_NAME.get(c) for c in category] results = None if test: logger.debug("get_trending task") return try: with db_factory.get_instance(): results = PDbManager.get_trending_torrents_by_category( category, mindate, maxdate, delta_hours) for t, score, valid_date in results: print("%s / %s / %s" % (t, score, valid_date)) except ValueError as err: logger.warning(err) return results
def create_database(test, **kwargs): if test: logger.debug("reset_database task") return models = [v for v in _TABLES_MODELS.values()] with db_factory.get_instance() as db: db.create_tables(models)
def load_sqlite_backup(backup_date: str, test, **kwargs): if test: logger.debug("load_sqlite_backup task") return with db_factory.get_instance() as db: assert isinstance(db, CSqliteExtDatabase) backup_filename = os.path.join(config.get('sqlite', 'backup_dir'), _BACKUP_FORMAT % (backup_date)) db_backup = CSqliteExtDatabase(backup_filename) db_backup.backup(db)
def sqlite_backup(test, **kwargs): if test: logger.debug("sqlite_backup task") return with db_factory.get_instance() as db: assert isinstance(db, CSqliteExtDatabase) filename = os.path.join( config.get('sqlite', 'backup_dir'), _BACKUP_FORMAT % (datetime.datetime.now().strftime(_BACKUP_DATE_FORMAT))) db.backup_to_file(filename)
def compute_trending(test, category: list = None, **kwargs): if category is not None: category = [CATEGORY_NAME.get(c) for c in category] if test: logger.debug("compute_trending task") return with db_factory.get_instance(): trends_manager = TrendsManager(config, PDbManager, category) trends_manager.evaluate(NormalizedTrendsEngine(config)) trends_manager.save_trends()
def update_status(test, category: list = None, **kwargs): if test: logger.debug("get_trending task") return if category is not None: category = [CATEGORY_NAME.get(c) for c in category] torrents_manager = TorrentsManager(config, PDbManager, category) with db_factory.get_instance(): torrents_manager.update_torrents_status() return
def setUpClass(cls): indexers_config = { "indexer_1": { "movies": { "active": True, "action": "search", "params": { "cat": 102183 } }, "wrong_cat": { "active": False, "action": "search" } }, "indexer_2": { "series": { "active": True, "action": "search", "params": { "cat": 102185 } } } } cls.indexers_patch = patch.dict('mediastrends.indexers_config', indexers_config, clear=True) cls.indexers_mock = cls.indexers_patch.start() trackers_config = { 'tracker_1': { 'active': True, 'scheme': 'http', 'netloc': 'netloc:8080', }, 'tracker_2': { 'active': True, 'scheme': 'udp', 'netloc': 'netloc:6060', }, 'tracker_3': { 'active': False, 'scheme': 'udp', 'netloc': 'netloc:5217', }, } cls.trackers_patch = patch.dict('mediastrends.trackers_config', trackers_config, clear=True) cls.trackers_mock = cls.trackers_patch.start() # db db_factory.defaut_instance = 'sqlite-app-test' PDbManager.create_database(db_factory.get_instance())
def torrents_add(test, indexer: str, category: list = None, **kwargs): nb_torrent = 0 if test: logger.debug("torrents_add task") return nb_torrent assert category is not None assert indexer in indexers_config for cat in category: logger.debug("Category: %s" % cat) logger.debug("Indexer: %s" % indexer) if cat not in ['movies', 'series']: logger.error("category must be movies or series " % cat) break try: client = create_torznab_from_cli_params(indexer, cat) except Exception as err: logger.error("Error during jacket creation: %s" % str(err)) break try: rss_content = client.get_rss_content() rss_parser = TorznabJackettRSS(rss_content) rss_parser.process_items() except Exception as err: logger.error("Error while contacting jackett: %s" % str(err)) if len(rss_parser.items) == 0: logger.warning('RSS feed is empty') break db = db_factory.get_instance() for item in rss_parser.items: try: torznab_result = elements_from_torznab_result(item) if not torznab_result['keep']: pass with db: for tracker in torznab_result['trackers']: _, _, to_created, _ = PDbManager.save_torrent_tracker( torrent=torznab_result['torrent'], tracker=tracker) nb_torrent += to_created except Exception as err: logger.error("Error during elements creation: %s" % str(err)) pass logger.debug("%s torrents added" % nb_torrent) return nb_torrent
def main(): db = db_factory.get_instance() migrator = SqliteMigrator(db) genres = TextField(null=True) language_codes = TextField(null=True) with db: migrate( migrator.add_column('pimdbobject', 'genres', genres), migrator.add_column('pimdbobject', 'language_codes', language_codes), )
def reset_database(test, no_backup=False, **kwargs): if test: logger.debug("reset_database task") return if no_backup: logger.warning("No way you don't backup") return else: sqlite_backup(test) models = [v for v in _TABLES_MODELS.values()] with db_factory.get_instance() as db: db.drop_tables(models, safe=True) db.create_tables(models)
def reset_table_(model_name, no_backup, test, **kwargs): assert model_name in _TABLES_MODELS.keys() if no_backup: logger.debug("Not implemented yey") return model = _TABLES_MODELS.get(model_name, None) if test: logger.debug("reset_table_ task") return with db_factory.get_instance(): model.drop_table() model.create_table()
def get_trending(test, mindate=None, maxdate=None, delta_hours=1, **kwargs): results = None if test: logger.debug("get_trending task") return try: with db_factory.get_instance(): results = PDbManager.get_trending_movies(mindate, maxdate, delta_hours) for item in results: print(item) except ValueError as err: logger.warning(err) return results
def setUpModule(): db_factory.defaut_instance = 'sqlite-app-test' PDbManager.create_database(db_factory.get_instance())
def tearDownModule(): PDbManager.drop_database(db_factory.get_instance())
def tearDownClass(cls): patch.stopall() PDbManager.drop_database(db_factory.get_instance())