def fetch_stations(self): """ Grabs the xml list of stations from the shoutcast server """ self.shout_url = 'http://www.shoutcast.com/sbin/newxml.phtml?genre=' + self.genre self.urlhandler = FancyURLopener() self.fd = self.urlhandler.open(self.shout_url) self.stations = self.fd.read() self.fd.close() return self.stations
def fetch_genres(self): """ Grabs genres and returns tuple of genres """ self.genre_url = 'http://www.shoutcast.com/sbin/newxml.phtml' self.urlhandler = FancyURLopener() self.fd = self.urlhandler.open(self.genre_url) self.genre = self.fd.read() self.fd.close() return self.genre
def __install_grinder(self, grinder_path): """ Installs Grinder. Grinder version and download link may be set in config: "download-link":"http://domain/resource-{version}.zip" "version":"1.2.3" """ dest = os.path.dirname( os.path.dirname(os.path.expanduser(grinder_path))) if not dest: dest = os.path.expanduser("~/grinder-taurus") dest = os.path.abspath(dest) grinder_full_path = os.path.join(dest, "lib", "grinder.jar") try: self.__grinder(grinder_full_path) return grinder_full_path except CalledProcessError: self.log.info("Will try to install grinder into %s", dest) downloader = FancyURLopener() grinder_zip_path = self.engine.create_artifact("grinder-dist", ".zip") version = self.settings.get("version", GrinderExecutor.VERSION) download_link = self.settings.get("download-link", GrinderExecutor.DOWNLOAD_LINK) download_link = download_link.format(version=version) self.log.info("Downloading %s", download_link) try: downloader.retrieve(download_link, grinder_zip_path, download_progress_hook) except BaseException as exc: self.log.error("Error while downloading %s", download_link) raise exc self.log.info("Unzipping %s", grinder_zip_path) unzip(grinder_zip_path, dest, 'grinder-' + version) os.remove(grinder_zip_path) self.log.info("Installed grinder successfully") return grinder_full_path
def __install_gatling(self, gatling_path): """ Installs Gatling. Gatling version and download link may be set in config: "download-link":"http://domain/resource-{version}.zip" "version":"1.2.3" """ dest = os.path.dirname( os.path.dirname(os.path.expanduser(gatling_path))) # ../.. dest = os.path.abspath(dest) try: self.__gatling(gatling_path) return gatling_path except OSError: self.log.info("Will try to install Gatling into %s", dest) # download gatling downloader = FancyURLopener() gatling_zip_path = self.engine.create_artifact("gatling-dist", ".zip") version = self.settings.get("version", GatlingExecutor.VERSION) download_link = self.settings.get("download-link", GatlingExecutor.DOWNLOAD_LINK) download_link = download_link.format(version=version) self.log.info("Downloading %s", download_link) # TODO: check archive checksum/hash before unzip and run try: downloader.retrieve(download_link, gatling_zip_path, download_progress_hook) except BaseException as exc: self.log.error("Error while downloading %s", download_link) raise exc self.log.info("Unzipping %s", gatling_zip_path) unzip(gatling_zip_path, dest, 'gatling-charts-highcharts-bundle-' + version) os.remove(gatling_zip_path) os.chmod(os.path.expanduser(gatling_path), 0o755) self.log.info("Installed Gatling successfully")
def get_file(url, destination_dir, fname): if not os.path.exists(destination_dir): os.makedirs(destination_dir) file_dest = os.path.join(destination_dir, fname) try: f = open(file_dest) except: print('Downloading data from', url) FancyURLopener().retrieve(url, file_dest) return file_dest
def __install_grinder(self, grinder_path): """ Installs Grinder. Grinder version and download link may be set in config: "download-link":"http://domain/resource-{version}.zip" "version":"1.2.3" """ dest = os.path.dirname(os.path.dirname(os.path.expanduser(grinder_path))) if not dest: dest = os.path.expanduser("~/grinder-taurus") dest = os.path.abspath(dest) grinder_full_path = os.path.join(dest, "lib", "grinder.jar") try: self.__grinder(grinder_full_path) return grinder_full_path except CalledProcessError: self.log.info("Will try to install grinder into %s", dest) downloader = FancyURLopener() grinder_zip_path = self.engine.create_artifact("grinder-dist", ".zip") version = self.settings.get("version", GrinderExecutor.VERSION) download_link = self.settings.get("download-link", GrinderExecutor.DOWNLOAD_LINK) download_link = download_link.format(version=version) self.log.info("Downloading %s", download_link) try: downloader.retrieve(download_link, grinder_zip_path, download_progress_hook) except BaseException as exc: self.log.error("Error while downloading %s", download_link) raise exc self.log.info("Unzipping %s", grinder_zip_path) unzip(grinder_zip_path, dest, 'grinder-' + version) os.remove(grinder_zip_path) self.log.info("Installed grinder successfully") return grinder_full_path
class ShoutcastFeed: def __init__(self, genre, min_bitrate=128, cache_ttl=600, cache_dir='/tmp/pyshout_cache'): """ Parses the xml feed and spits out a list of dictionaries with the station info keyed by genre. Params are as follows: min_bitrate - 128 default, Minimum bitrate filter cache_ttl - 600 default, 0 disables, Seconds cache is considered valid cache_dir - /tmp/pyshout_cache default, Path to cache directory """ self.min_bitrate = min_bitrate self.cache_ttl = cache_ttl self.genre = genre self.cache_file = cache_dir + '/' + self.genre + '.pickle' self.station_list = [] def fetch_stations(self): """ Grabs the xml list of stations from the shoutcast server """ self.shout_url = 'http://www.shoutcast.com/sbin/newxml.phtml?genre=' + self.genre self.urlhandler = FancyURLopener() self.fd = self.urlhandler.open(self.shout_url) self.stations = self.fd.read() self.fd.close() return self.stations def parse_stations(self): ct = None if self.cache_ttl: ct = cacheTime(self.cache_file) if ct: try: self.station_list = load_cache(self.cache_file) except: print("Failed to load cache.") if not ct or (time.time() - ct) > self.cache_ttl: try: parseXML = StationParser(self.min_bitrate) self.stations = self.fetch_stations() parseString(self.stations, parseXML) self.station_list = parseXML.station_list write_cache(self.cache_file, self.station_list) except: print("Failed to get a new station list, sorry.") return self.station_list
def __install_gatling(self, gatling_path): """ Installs Gatling. Gatling version and download link may be set in config: "download-link":"http://domain/resource-{version}.zip" "version":"1.2.3" """ dest = os.path.dirname(os.path.dirname(os.path.expanduser(gatling_path))) # ../.. dest = os.path.abspath(dest) try: self.__gatling(gatling_path) return gatling_path except OSError: self.log.info("Will try to install Gatling into %s", dest) # download gatling downloader = FancyURLopener() gatling_zip_path = self.engine.create_artifact("gatling-dist", ".zip") version = self.settings.get("version", GatlingExecutor.VERSION) download_link = self.settings.get("download-link", GatlingExecutor.DOWNLOAD_LINK) download_link = download_link.format(version=version) self.log.info("Downloading %s", download_link) # TODO: check archive checksum/hash before unzip and run try: downloader.retrieve(download_link, gatling_zip_path, download_progress_hook) except BaseException as exc: self.log.error("Error while downloading %s", download_link) raise exc self.log.info("Unzipping %s", gatling_zip_path) unzip(gatling_zip_path, dest, 'gatling-charts-highcharts-bundle-' + version) os.remove(gatling_zip_path) os.chmod(os.path.expanduser(gatling_path), 0o755) self.log.info("Installed Gatling successfully")
class GenreFeed: def __init__(self, cache_ttl=3600, cache_dir='/tmp/pyshout_cache'): self.cache_ttl = cache_ttl self.cache_file = cache_dir + '/genres.cache' self.genre_list = [ 'Sorry, failed to load', '...try again later', 'Rock', 'Pop', 'Alternative' ] def fetch_genres(self): """ Grabs genres and returns tuple of genres """ self.genre_url = 'http://www.shoutcast.com/sbin/newxml.phtml' self.urlhandler = FancyURLopener() self.fd = self.urlhandler.open(self.genre_url) self.genre = self.fd.read() self.fd.close() return self.genre def parse_genres(self): ct = None if self.cache_ttl: ct = cacheTime(self.cache_file) try: self.genre_list = load_cache(self.cache_file) except: ct = None if not ct or (time.time() - ct) > self.cache_ttl: if DEBUG == 1: print('Getting fresh feed') try: parseXML = GenreParse() self.genres = self.fetch_genres() parseString(self.genres, parseXML) self.genre_list = parseXML.genreList write_cache(self.cache_file, self.genre_list) except: print("Failed to get genres from server, sorry.") return self.genre_list