def get_backgrounds(self): while True: amount = self.background_max_requests - len(self.background_images) if amount == 0: time.sleep(30) continue result = RequestFactory.make_request( "https://api.unsplash.com/photos/random/" + "?count=" + str(amount) + "&orientation=landscape" + "&collections=827743,3178572,225,573009" + "&client_id=825216e69ea20d24e5b3ddeeab316f6569dcecc4965e16a0725aee3eeb143872" ) if result is None: time.sleep(30) continue json_data = json.loads(result.decode('utf-8')) urls = [ x['urls']['raw'] + "&w=" + str(self.parent.winfo_screenwidth()) + "&h=" + str(self.parent.winfo_screenheight()) + "&fit=scale" for x in json_data ] for url in urls: image_byt = urlopen(url).read() image = Image.open(BytesIO(image_byt)) self.background_images.append(image) time.sleep(30)
def get_weather_data(self): while True: api_key = SecureSettings.get_string("open_weather_map_key") url = "http://api.openweathermap.org/data/2.5/group?id=2750947&units=metric&appid=" + api_key result = RequestFactory.make_request(url) if not result: Logger().write(LogVerbosity.Info, "Failed to get weather data") return data = json.loads(result.decode('utf8')) current_temp = data['list'][0]['main']['temp'] icon = data['list'][0]['weather'][0]['icon'].replace('n', 'd') self.background_canvas.itemconfigure( self.weather_temp, text=str(round(current_temp, 1)) + "°C") image = Image.open(self.base_image_path + "Weather/" + icon + ".png") resized = image.resize((140, 140), Image.ANTIALIAS) self.background_canvas.weather_icon = ImageTk.PhotoImage(resized) self.background_canvas.itemconfigure( self.weather_icon_image, image=self.background_canvas.weather_icon) time.sleep(60 * 30)
def get_torrents(url): search_result = RequestFactory.make_request(url, timeout=10) if search_result is None: return [] parsed = BeautifulSoup(search_result, "lxml") table_rows = parsed.find_all('tr') torrent_rows = [ row.contents for row in table_rows if len([ child for child in row.contents if isinstance(child, Tag) and child.name == "td" and ('name' in child.attrs['class'] or 'seeds' in child.attrs['class']) ]) != 0 ] result = [] for row in torrent_rows: childs = [x for x in row if isinstance(x, Tag)] name = [x for x in childs if 'name' in x.attrs['class']][0].text seeds = int([x for x in childs if 'seeds' in x.attrs['class']][0].text) leeches = int([x for x in childs if 'leeches' in x.attrs['class']][0].text) size = [x for x in childs if 'size' in x.attrs['class']][0].contents[0] torrent = [x for x in childs if 'name' in x.attrs['class'] ][0].contents[1].attrs['href'] result.append(TorrentModel(name, seeds, leeches, size, torrent)) return result
def get_by_id_internal(show_id): Logger().write(LogVerbosity.Debug, "Get show by id " + show_id) response = RequestFactory.make_request(ShowController.shows_api_path + "show/" + show_id) data = json.loads(response.decode('utf-8')) seen_episodes = [] data['favorite'] = False if not Settings.get_bool("slave"): seen_episodes = Database().get_history_for_id(show_id) data['favorite'] = show_id in [ x.id for x in Database().get_favorites() ] for episode in data['episodes']: seen = [ x for x in seen_episodes if episode['season'] == x.season and episode['episode'] == x.episode ] episode['seen'] = len(seen) != 0 if len(seen) == 0: continue seen = seen[-1] episode['seen'] = True episode['played_for'] = seen.played_for episode['length'] = seen.length return data
def announce_torrent(self, torrent): self.last_announce = current_time() announce_message = TrackerMessages.TrackerAnnounceMessage.for_http(torrent.info_hash, 2, torrent.total_size - torrent.left, torrent.left, torrent.uploaded, self.tracker_peer_request_amount) path = self.uri.path + announce_message.as_param_string() response = RequestFactory.make_request(path) if response is None: return False try: response_dict = Bencode.bdecode(response) except BTFailure: Logger().write(LogVerbosity.Info, 'Invalid tracker response: ' + str(response)) return False if b"peers" not in response_dict: return False peers_data = response_dict[b"peers"] total_peers = int(len(peers_data) / 6) offset = 0 peers = [] for index in range(total_peers): peers.append(uri_from_bytes(peers_data[offset:offset + 6])) offset += 6 EventManager.throw_event(EventType.PeersFound, [peers, PeerSource.HttpTracker])
def request_movies(url): data = RequestFactory.make_request(url) if data is not None: return MovieController.parse_movie_data(data.decode('utf-8')) else: EventManager.throw_event(EventType.Error, ["get_error", "Could not get movie data"]) Logger().write(LogVerbosity.Info, "Error fetching movies") return []
def get_magnet_url(url): torrent_result = RequestFactory.make_request( TorrentController.base_url + url, timeout=10) parsed = BeautifulSoup(torrent_result, "lxml") magnet_link = parsed.findAll('a', href=re.compile('^magnet:\?xt=urn:btih:')) if len(magnet_link) == 0: return None return magnet_link[0].attrs['href']
def get_subtitles(self, size, file_length, file_name, first_64k, last_64k): result_raw = RequestFactory.make_request( "https://rest.opensubtitles.org/search/moviebytesize-" + str(size) + "/moviehash-" + str(self.get_hash(size, first_64k, last_64k)) + "/sublanguageid-eng", "GET", useragent="mediaplayerjk") if not result_raw: Logger().write(LogVerbosity.Info, "Failed to get subtitles") return [] result = json.loads(result_raw.decode('utf8')) paths = [] results_correct_name = [ x for x in result if x['MovieReleaseName'] in file_name ] Logger().write( LogVerbosity.Debug, "Subs with correct name (" + file_name + "): " + str(len(results_correct_name))) added = 0 for sub in results_correct_name: path = self.download_sub(sub) paths.append(path) added += 1 if added == 2: break results_correct_size = [ x for x in result if abs(int(x['MovieTimeMS']) - file_length) < 10 ] Logger().write( LogVerbosity.Debug, "Subs with correct size (" + str(file_length) + "): " + str(len(results_correct_size))) added = 0 for sub in results_correct_size: path = self.download_sub(sub) paths.append(path) added += 1 if added == 2: break results_other = [ x for x in result if x not in results_correct_size and x not in results_correct_name ] Logger().write(LogVerbosity.Debug, "Subs other: " + str(len(results_other))) added = 0 for sub in results_other: path = self.download_sub(sub) paths.append(path) added += 1 if added == 2: break return paths
def shelly(): ip = request.args.get("ip") state = "on" if request.args.get("state") == "true" else "off" Logger().write(LogVerbosity.Info, "Set shelly " + ip + " to " + state) result = RequestFactory.make_request("http://" + ip + "?state=" + state) if result is not None: Logger().write(LogVerbosity.Info, result) return "OK"
def get_movie_by_id(): movie_id = request.args.get('id') Logger().write(LogVerbosity.Debug, "Get movie by id " + movie_id) response = RequestFactory.make_request(MovieController.movies_api_path + "movie/" + movie_id) data = json.loads(response.decode('utf-8')) seen = Database().get_history_for_id(movie_id) data['seen'] = len(seen) > 0 if len(seen) > 0: seen = seen[-1] data['played_for'] = seen.played_for data['length'] = seen.length return json.dumps(data).encode('utf-8')
def get_subtitles(size, file_length, filename, first_64k, last_64k): data = first_64k + last_64k file_hash = hashlib.md5(data).hexdigest() result = RequestFactory.make_request( "http://sandbox.thesubdb.com/?action=download&hash=" + file_hash + "&language=en", useragent="SubDB/1.0 (MediaPi/0.1; http://github.com/jkorf/mediapi)" ) if not result: Logger().write(LogVerbosity.Info, "SubDB: no subtitles found for " + file_hash) return [] Logger().write(LogVerbosity.Info, "SubDB: Found a subtitle for hash " + file_hash) return [SubtitleSourceBase.save_file("SubDB", result)]
def download_sub(sub): download_link = sub['SubDownloadLink'] download_result = RequestFactory.make_request(download_link) sub_act_data = gzip.decompress(download_result) return SubtitleSourceBase.save_file(sub['IDSubtitleFile'], sub_act_data)