def update_trakt(release_key): """ :param release_key: :type release_key: BaseReleaseKey|TVReleaseKey|TVDailyReleaseKey|MovieReleaseKey """ session = Session() media_info = None try: if release_key.media_type == constants.MEDIA_TV: if release_key.daily: info = trakt.show_episode_summary_daily( release_key.name, release_key.day, release_key.month, release_key.year ) else: info = trakt.show_episode_summary(release_key.name, release_key.season, release_key.episode) raise_unless(info, exceptions.ApiError, "Failed to fetch metadata for: {}".format(release_key)) media_info = _update_trakt_tv(session, info) elif release_key.media_type == constants.MEDIA_MOVIE: info = trakt.movie_summary(release_key.name) raise_unless(info, exceptions.ApiError, "Failed to fetch metadata for: {}".format(release_key)) media_info = _update_trakt_movie(session, info) else: return None session.commit() except DBAPIError as e: log.exception("Error querying media info") session.rollback() except exceptions.ApiError as e: log.warn(e.message) except Exception as e: log.exception("Could not update trakt info") return media_info
def update_imdb(media_info=None, release_key=None): session = Session() try: if media_info.imdb_id: movie_info = imdb.get_movie(media_info.imdb_id) if movie_info: media_info.imdb_score = movie_info['rating'] media_info.imdb_votes = movie_info['votes'] media_info.cover_url = movie_info['cover_url'] for director in movie_info.get('director', []): person = get_person_imdb(session, director['person_id'], name=director['name']) if not person in media_info.directors: media_info.directors.append(person) for cast_member in movie_info.get('cast', []): person = get_person_imdb(session, cast_member['person']['person_id'], name=cast_member['person']['name']) if not person in media_info.cast: media_info.cast.append(person) session.commit() except DBAPIError: session.rollback() except exceptions.ApiError as e: log.warn(e.message) return media_info
def test_geoip(self): session = Session() db_file_path = get_fixture("GeoIPCountryCSV.zip") self.assertTrue(geoip.update(session, db_file_path)) self.assertEqual(10, session.query(geoip.models.GeoIP).count()) self.assertEqual("AU", geoip.find_country_code(session, 16777217)) self.assertEqual("AU", geoip.find_country_code(session, net.int2ip(16777217))) self.assertIsNone(geoip.find_country_code(session, 1000))
def test_geoip(self): session = Session() db_file_path = get_fixture("GeoIPCountryCSV.zip") self.assertTrue(geoip.update(session, db_file_path)) self.assertEqual(10, session.query(geoip.models.GeoIP).count()) self.assertEqual("AU", geoip.find_country(session, 16777217)) self.assertEqual("AU", geoip.find_country(session, net.int2ip(16777217))) self.assertIsNone(geoip.find_country(session, 1000))
def cmd_geoip(options): from tranny.service import geoip from tranny.app import Session from tranny.app import config engine = create_engine(config.get_db_uri()) Session.configure(bind=engine) Base.metadata.create_all(bind=engine) db_file_path = geoip.fetch_update(download=options.nodownload) geoip.update(Session(), db_file_path)
def update_imdb(media_info=None, release_key=None): session = Session() try: if media_info.imdb_id: movie_info = imdb.get_movie(media_info.imdb_id) if movie_info: media_info.imdb_score = movie_info['rating'] media_info.imdb_votes = movie_info['votes'] media_info.cover_url = movie_info['cover_url'] for director in movie_info.get('director', []): person = get_person_imdb(session, director['person_id'], name=director['name']) if not person in media_info.directors: media_info.directors.append(person) for cast_member in movie_info.get('cast', []): person = get_person_imdb(session, cast_member['person']['person_id'], name=cast_member['person']['name']) if person not in media_info.cast: media_info.cast.append(person) session.commit() except DBAPIError: session.rollback() except exceptions.ApiError as e: log.warn(e.message) return media_info
def index(): """ Show the home view which is mostly a dashboard type view so we are calculating metrics for various areas TODO remove the 2nd download query once we fix not having a source_id, or at least make sure its not actually a problem :return: Mostly metric data and newest releases :rtype: dict """ session = Session() downloads = session.query(Download).filter(Download.source_id > 0).all() provider_totals = stats.count_totals(downloads, lambda v: v.source.source_name).items() section_totals = stats.count_totals(downloads, lambda v: v.section.section_name).items() provider_type_totals = stats.provider_type_counter(downloads).items() newest = Session.query(Download).order_by(Download.entity_id.desc()).limit(25).all() return dict( newest=newest, provider_totals=provider_totals, section_totals=section_totals, provider_type_totals=provider_type_totals, )
def index(): """ Show the home view which is mostly a dashboard type view so we are calculating metrics for various areas TODO remove the 2nd download query once we fix not having a source_id, or at least make sure its not actually a problem :return: Mostly metric data and newest releases :rtype: dict """ session = Session() downloads = session.query(Download).filter(Download.source_id > 0).all() provider_totals = stats.count_totals( downloads, lambda v: v.source.source_name).items() section_totals = stats.count_totals( downloads, lambda v: v.section.section_name).items() provider_type_totals = stats.provider_type_counter(downloads).items() newest = Session.query(Download).order_by( Download.entity_id.desc()).limit(25).all() return dict(newest=newest, provider_totals=provider_totals, section_totals=section_totals, provider_type_totals=provider_type_totals)
def find_matches(self): """ :return: :rtype: """ t0 = time() delta = t0 - self.last_update if not delta > self.interval or not self.enabled: raise StopIteration self.last_update = t0 session = Session() for torrent, release_info in self.fetch_releases(session): yield session, [torrent, release_info]
def login_perform(): """ Handle a user login form :return: Redirect the user to the previous page or the home index :rtype: dict """ try: user_name = request.values['user_name'] user_password = request.values['user_password'] except KeyError: pass else: session = Session() user = session.query(User).filter_by(user_name=user_name).first() if not user or not user.password == hashlib.sha1(user_password).hexdigest(): flash("Invalid credentials", "alert") return redirect(url_for(".login")) try: remember = request.values['remember'].lower() == "on" except KeyError: remember = False login_user(user, remember=remember) return redirect(request.args.get("next") or url_for("home.index"))
def login_perform(): """ Handle a user login form :return: Redirect the user to the previous page or the home index :rtype: dict """ try: user_name = request.values['user_name'] user_password = request.values['user_password'] except KeyError: pass else: session = Session() user = session.query(User).filter_by(user_name=user_name).first() if not user or not user.password == hashlib.sha1( user_password).hexdigest(): flash("Invalid credentials", "alert") return redirect(url_for(".login")) try: remember = request.values['remember'].lower() == "on" except KeyError: remember = False login_user(user, remember=remember) return redirect(request.args.get("next") or url_for("home.index"))
def db_init(username="******", password="******", wipe=False): from tranny.app import Base, engine, Session Session.configure(bind=engine) try: if wipe: db_drop() Base.metadata.create_all(bind=engine) except DBAPIError: log.exception("Failed to initialize db schema") else: log.info("Initialized db schema successfully") session = Session() try: admin = User(user_name=username, password=password, role=constants.ROLE_ADMIN) session.add(admin) session.commit() except DBAPIError: session.rollback() else: log.info("Created admin user successfully") return True return False
def torrent_peers(self, info_hash): data_mapping = { 'client': 'p.get_client_version=', 'down_speed': 'p.get_down_rate=', 'progress': 'p.completed_percent=', 'ip': 'p.address=', 'up_speed': 'p.get_up_rate=', } parray = self._server.p.multicall(info_hash, '+0', *list(data_mapping.values())) pdata = [] session = Session() for peer in parray: country_code = geoip.find_country_code(session, peer[3]) peer_dict = client.ClientPeerData({'country': country_code}) for index, value in enumerate(data_mapping.keys()): peer_dict[value] = peer[index] pdata.append(peer_dict) return pdata
def torrent_peers(self, info_hash): torrent = self.client.get_torrent( info_hash, arguments=['id', 'hashString', 'peers']) peers = [] session = Session() # TODO country code lookup for peer in torrent.peers: peers.append({ 'client': peer['clientName'], 'down_speed': peer['rateToClient'], 'up_speed': peer['rateToPeer'], 'progress': peer['progress'], 'ip': peer['address'], 'country': geoip.find_country_code(session, peer['address']) }) return peers
def handler(): """ Called when a user used the popup modal to upload a torrent manually :return: Redirect response :rtype: Response """ form = forms.UploadForm() if form.validate_on_submit(): file_data = request.files['torrent_file'].stream.read() file_name = request.files['torrent_file'].filename if file_name.endswith(".torrent"): file_name = file_name[0:len(file_name) - 8] try: torrent_struct = torrent.Torrent.from_str(file_data) tor_data = release.TorrentData(torrent_struct.name, file_data, form.section.data) if ServiceManager.add(Session(), tor_data, WebProvider()): flash( "Torrent {} uploaded successfully".format( torrent_struct.name), "success") else: flash("Failed to upload torrent", "alert") except exceptions.TrannyException as err: flash(err.message, "alert") elif form.errors: for field, error in form.errors.items(): try: flash("[{}] {}".format(field, ' && '.join(error)), "alert") except: pass try: next_url = request.form['next_url'] except KeyError: next_url = url_for("home.index") return redirect(next_url)
def init_db(self, uri="sqlite://"): Session.remove() engine = create_engine(uri) Session.configure(bind=engine) Base.metadata.create_all(bind=engine)
def init_db(self): """ Bind our sqlalchemy engine and create any missing tables """ Session.configure(bind=engine) Base.metadata.create_all(bind=engine)