def get_data(url, params): params = { k: unicode(v).encode('utf-8') for k, v in params.iteritems() if v } url = "%s%s.php?%s" % (BASE_URL, url, urllib.urlencode(params)) return utils.get_JSON_response(url=url, folder="TheAudioDB")
def get_movies(movie_type): movies = [] url = '%s.json?apikey=%s' % (movie_type, RT_KEY) results = utils.get_JSON_response(BASE_URL + url, folder="RottenTomatoes") if not results or "movies" not in results: return [] for item in results["movies"]: if "alternate_ids" not in item: continue imdb_id = str(item["alternate_ids"]["imdb"]) if addon.bool_setting("infodialog_onclick"): path = PLUGIN_BASE + 'extendedinfo&&imdb_id=%s' % imdb_id else: search_string = "%s %s trailer" % (item["title"], item["year"]) path = PLUGIN_BASE + "playtrailer&&title=%s&&imdb_id=%s" % (search_string, imdb_id) movie = ListItem(label=item.get('title'), path=path) movie.set_infos({'title': item["title"], 'mediatype': "movie", 'duration': item["runtime"] * 60, 'year': item["year"], 'premiered': item["release_dates"].get("theater", ""), 'rating': item["ratings"]["audience_score"] / 10.0, 'plot': item["synopsis"], 'imdbnumber': imdb_id, 'mpaa': item["mpaa_rating"]}) movie.set_properties({'imdb_id': imdb_id, 'duration(h)': utils.format_time(item["runtime"], "h"), 'duration(m)': utils.format_time(item["runtime"], "m")}) movie.set_artwork({'thumb': item["posters"]["original"], 'poster': item["posters"]["original"]}) movies.append(movie) return local_db.merge_with_local(media_type="movie", items=movies, library_first=False)
def get_data(url, params=None, cache_days=10): params = params if params else {} params["limit"] = 10 url = "%s%s?%s" % (BASE_URL, url, urllib.urlencode(params)) return utils.get_JSON_response(url=url, folder="Trakt", headers=HEADERS, cache_days=cache_days)
def get_movie_info(imdb_id): try: url = 'i=%s' % (imdb_id) results = utils.get_JSON_response(BASE_URL + url, 20, "OMDB") return dict((k, v) for (k, v) in results.iteritems() if v != "N/A") except Exception: utils.log("Exception: Error when fetching Omdb data from net") return {}
def get_near_events(artists): # not possible with api 2.0 arts = [urllib.quote(art['artist'].encode("utf-8")) for art in artists[:50]] artist_str = 'artists[]=' + '&artists[]='.join(arts) url = BASE_URL + 'location=use_geoip&radius=50&per_page=100&%s' % (artist_str) results = utils.get_JSON_response(url, folder="BandsInTown") if results: return handle_events(results) return []
def get_data(method, params=None, cache_days=0.5): params = params if params else {} params["key"] = YT_KEY params = {k: unicode(v).encode('utf-8') for k, v in params.iteritems() if v} url = "{base_url}{method}?{params}".format(base_url=BASE_URL, method=method, params=urllib.urlencode(params)) return utils.get_JSON_response(url=url, cache_days=cache_days, folder="YouTube")
def get_data(method, params=None, cache_days=0.5): params = params if params else {} params["method"] = method params["api_key"] = LAST_FM_API_KEY params["format"] = "json" params = {k: unicode(v).encode('utf-8') for k, v in params.iteritems() if v} url = "{base_url}{params}".format(base_url=BASE_URL, params=urllib.urlencode(params)) return utils.get_JSON_response(url=url, cache_days=cache_days, folder="LastFM")
def get_data(method, params=None, cache_days=0.5): """ fetch data from youtube API """ params = params if params else {} params["key"] = YT_KEY params = {k: str(v) for k, v in params.iteritems() if v} url = "{base_url}{method}?{params}".format(base_url=BASE_URL, method=method, params=urllib.urlencode(params)) return utils.get_JSON_response(url=url, cache_days=cache_days, folder="YouTube")
def get_near_events(artists): # not possible with api 2.0 arts = [] for art in artists[:50]: try: arts.append(urllib.quote(art['artist'])) except Exception: arts.append(urllib.quote(art['artist'].encode("utf-8"))) artist_str = 'artists[]=' + '&artists[]='.join(arts) url = BASE_URL + 'location=use_geoip&radius=50&per_page=100&%s' % (artist_str) results = utils.get_JSON_response(url, folder="BandsInTown") if results: return handle_events(results) return []
def get_movies(movie_type): movies = [] url = '%s.json?apikey=%s' % (movie_type, RT_KEY) results = utils.get_JSON_response(BASE_URL + url, folder="RottenTomatoes") if not results or "movies" not in results: return [] for item in results["movies"]: if "alternate_ids" not in item: continue imdb_id = str(item["alternate_ids"]["imdb"]) if addon.bool_setting("infodialog_onclick"): path = PLUGIN_BASE + 'extendedinfo&&imdb_id=%s' % imdb_id else: search_string = "%s %s trailer" % (item["title"], item["year"]) path = PLUGIN_BASE + "playtrailer&&title=%s&&imdb_id=%s" % ( search_string, imdb_id) movie = ListItem(label=item.get('title'), path=path) movie.set_infos({ 'title': item["title"], 'mediatype': "movie", 'duration': item["runtime"] * 60, 'year': item["year"], 'premiered': item["release_dates"].get("theater", ""), 'rating': item["ratings"]["audience_score"] / 10.0, 'plot': item["synopsis"], 'imdbnumber': imdb_id, 'mpaa': item["mpaa_rating"] }) movie.set_properties({ 'imdb_id': imdb_id, 'duration(h)': utils.format_time(item["runtime"], "h"), 'duration(m)': utils.format_time(item["runtime"], "m") }) movie.set_artwork({ 'thumb': item["posters"]["original"], 'poster': item["posters"]["original"] }) movies.append(movie) return local_db.merge_with_local(media_type="movie", items=movies, library_first=False)
def get_movie_info(imdb_id): url = 'i=%s' % (imdb_id) results = utils.get_JSON_response(BASE_URL + url, 20, "OMDB") if not results: return None return {k: v for (k, v) in results.iteritems() if v != "N/A"}
def get_data(url, params): params = {k: unicode(v).encode('utf-8') for k, v in params.iteritems() if v} url = "%s%s.php?%s" % (BASE_URL, url, urllib.urlencode(params)) return utils.get_JSON_response(url=url, folder="TheAudioDB")