Exemplo n.º 1
0
def loginToWiki(wiki):
    if wiki in WIKILOGINS:
        return WIKILOGINS[wiki]
    desktop = XSCRIPTCONTEXT.getDesktop()
    model = desktop.getCurrentComponent()
    sheet = config("Wikipedia")
    if not model.Sheets.hasByName(sheet):
        model.Sheets.insertNewByName(sheet, model.Sheets.getCount() + 1)
        sheet = model.Sheets.getByName(sheet)
        sheet.getCellRangeByName("A1").setString("URL")
        sheet.getCellRangeByName("A2").setString("Username")
        sheet.getCellRangeByName("A3").setString("Password")
    else:
        sheet = model.Sheets.getByName(sheet)
    urlColumn = getColumn("URL")
    userColumn = getColumn("Username")
    passwordColumn = getColumn("Password")
    index = 2
    levelCell = sheet.getCellRangeByName(urlColumn + str(index))
    while (levelCell.getType().value != "EMPTY"):
        if (levelCell.String == wiki):
            break
        index = index + 1
        levelCell = sheet.getCellRangeByName(urlColumn + str(index))
    username = sheet.getCellRangeByName(userColumn + str(index))
    password = sheet.getCellRangeByName(passwordColumn + str(index))
    session = CacheControl(requests.session())
    session.headers.update(USERAGENT)
    logintokens = session.get(url = wiki, params = {"action": "query", "meta": "tokens", "type": "login", "maxlag": 5, "format": "json"}).json()["query"]["tokens"]["logintoken"]
    session.post(wiki, data = { "action": "login", "lgname": username, "lgpassword": password, "lgtoken": logintokens})
    CSRF = session.get(url=wiki, params = { "action": "query", "meta": "tokens", "maxlag": 5, "format": "json"}).json()["query"]["tokens"]["csrftoken"]
    WIKILOGINS = {wiki: CSRF}
    return CSRF
class FeatureRequesterImpl(FeatureRequester):
    def __init__(self, sdk_key, config):
        self._sdk_key = sdk_key
        self._session = CacheControl(requests.Session())
        self._config = config

    def get_all(self):
        hdrs = _headers(self._sdk_key)
        uri = self._config.get_latest_features_uri
        log.debug("Getting all flags using uri: " + uri)
        r = self._session.get(uri,
                              headers=hdrs,
                              timeout=(self._config.connect_timeout,
                                       self._config.read_timeout))
        r.raise_for_status()
        features = r.json()
        return features

    def get_one(self, key):
        hdrs = _headers(self._sdk_key)
        uri = self._config.get_latest_features_uri + '/' + key
        log.debug("Getting one feature flag using uri: " + uri)
        r = self._session.get(uri,
                              headers=hdrs,
                              timeout=(self._config.connect_timeout,
                                       self._config.read_timeout))
        r.raise_for_status()
        feature = r.json()
        return feature
Exemplo n.º 3
0
 def test_file_cache_recognizes_consumed_file_handle(self, url):
     s = CacheControl(Session(), FileCache("web_cache"))
     the_url = url + "cache_60"
     s.get(the_url)
     r = s.get(the_url)
     assert r.from_cache
     s.close()
Exemplo n.º 4
0
class CacheControlHelper(object):
    def __init__(self):
        self.sess = CacheControl(requests.session(),
                                 heuristic=CustomHeuristic(days=30),
                                 cache=FileCache('.web_cache'))
        self.exceptions = requests.exceptions

    def get(self,
            url,
            params=None,
            timeout=120,
            cookies=None,
            headers={'Accept': 'application/json'}):
        if cookies:
            return self.sess.get(url,
                                 params=params,
                                 timeout=timeout,
                                 cookies=cookies,
                                 headers=headers)
        else:
            return self.sess.get(url,
                                 params=params,
                                 timeout=timeout,
                                 headers=headers)

    def post(self,
             url,
             data,
             timeout=120,
             headers={'Accept': 'application/json'}):
        return self.sess.post(url, data=data, timeout=timeout, headers=headers)
Exemplo n.º 5
0
def getTracks(area, bboxInformation):
    apis = getApis()
    session = requests.session()
    session.headers.update({"User-Agent": USER_AGENT})
    cached_session = CacheControl(session)
    for api in apis:
        if "data" in api:
            data = api["data"]
            if "bbTopLeft" in data:
                data["bbTopLeft"] = "{lat},{lon}".format(
                    lat=bboxInformation[3], lon=bboxInformation[0])
            if "bbBottomRight" in data:
                data["bbBottomRight"] = "{lat},{lon}".format(
                    lat=bboxInformation[1], lon=bboxInformation[2])
            response = cached_session.post(api["api"] + api["tracks"],
                                           data=data)
            tJson = response.json()
        else:
            turl = api["api"] + api["tracks"]
            params = api["params"]
            params["bbox"] = params["bbox"].format(
                minx=bboxInformation[0],
                miny=bboxInformation[1],
                maxx=bboxInformation[2],
                maxy=bboxInformation[3],
            )
            response = cached_session.get(turl, params=params)
            print(response.url)
            while response.status_code != requests.codes.ok:
                time.sleep(1)
                response = cached_session.get(turl, params=params)
            try:
                tJson = response.json()
            except json.decoder.JSONDecodeError as e:
                print(response.url)
                print(response.text)
                raise e
            while "next" in response.links:
                next_url = response.links["next"]["url"]
                response = cached_session.get(next_url)
                while response.status_code != requests.codes.ok:
                    time.sleep(1)
                    response = cached_session.get(next_url)
                try:
                    tJson["features"] = (tJson["features"] +
                                         response.json()["features"])
                except json.decoder.JSONDecodeError as e:
                    print(response.url)
                    print(response.text)
                    raise e
        if response.status_code != requests.codes.ok:
            raise ValueError("{} gave us a status code of {}".format(
                response.url, response.status_code))
        if api["name"] == "openstreetcam":
            tJson = convertJson(tJson)
        save(area, api["name"], tJson)
Exemplo n.º 6
0
def full():

    sess = requests.session()
    cached_sess = CacheControl(sess)

    r_articles = cached_sess.get('http://localhost/articles/retrieve_articles/10/', auth=('email', 'password'))
    
    items = []
    if r_articles.status_code == 200:
        articles = r_articles.json()
        for article in articles:
            r_comments = cached_sess.get('http://localhost/comments/count/' + str(article['article_id']), auth=('email', 'password'))
            
            if r_comments.status_code == 200:
                commentCount = r_comments.json()
            else:
                print(str(r_comments.status_code), file=sys.stderr)
                return "comments error"

            r_tags = cached_sess.get('http://localhost/tags/all/' + str(article['article_id']), auth=('email', 'password'))
            
            if r_tags.status_code == 200:
                allTags = r_tags.json()

                tags_arr = []
                for tag in allTags:
                    tags_arr.append(
                        tag['tag']
                    )
            else:
                print(str(r_tags.status_code), file=sys.stderr)
                return "tags error"

            items.append(
                Item(
                    title = article['title'],
                    author = article['author'],
                    description = article['content'],
                    categories = tags_arr,
                    comments = commentCount['numOfComments']
                ))
        
        feed = Feed(
            title = "Full RSS Feed",
            link = "http://localhost/rss/full",
            description = "This feed shows a full content for 10 articles",
            language = "en-US",
            lastBuildDate = datetime.datetime.now(),
            items = items
        )
        return feed.rss()

    else:
        print(str(r_articles.status_code), file=sys.stderr)
        return "articles error"
Exemplo n.º 7
0
class AzureVerifier:
    api_version = 'v2.0'
    authority = 'https://login.microsoftonline.com'
    tenant = 'common'
    discovery_document = '.well-known/openid-configuration'

    audiences = None
    issuer = None

    def __init__(self, tenant='common', issuer=None, audiences=None):
        self.tenant = tenant or self.tenant
        self.issuer = issuer
        self.audiences = audiences
        self.session = CacheControl(requests.Session())

    @property
    def documents_uri(self):
        return f'{self.authority}/{self.tenant}/{self.api_version}/{self.discovery_document}'

    @property
    def jwks_uri(self):
        return self.session.get(self.documents_uri).json()['jwks_uri']

    @property
    def jwks(self):
        return self.session.get(self.jwks_uri).json()

    def get_jwk(self, kid):
        for jwk in self.jwks.get('keys'):
            if jwk.get('kid') == kid:
                return jwk
        raise InvalidAuthorizationToken('kid not recognized')

    def get_kid(self, token):
        headers = jwt.get_unverified_header(token)
        if not headers:
            raise InvalidAuthorizationToken('missing headers')
        try:
            return headers['kid']
        except KeyError:
            raise InvalidAuthorizationToken('missing kid')

    def get_public_key(self, token):
        return rsa_pem_from_jwk(self.get_jwk(self.get_kid(token)))

    def verify(self, token):
        public_key = self.get_public_key(token)

        return jwt.decode(token,
                          public_key,
                          verify=True,
                          algorithms=['RS256'],
                          audience=self.audiences,
                          issuer=self.issuer)
Exemplo n.º 8
0
class TestStream(object):
    def setup(self):
        self.sess = CacheControl(requests.Session())

    def test_stream_is_cached(self, url):
        resp_1 = self.sess.get(url + 'stream')
        content_1 = resp_1.content

        resp_2 = self.sess.get(url + 'stream')
        content_2 = resp_1.content

        assert not resp_1.from_cache
        assert resp_2.from_cache
        assert content_1 == content_2
Exemplo n.º 9
0
class SQLiteCacheTest(unittest.TestCase):
    def setUp(self):
        self.url = "https://httpbin.org/cache/60"
        self.sess = CacheControl(requests.Session(),
                                 cache=SQLiteCache(":memory:"))

    def tearDown(self):
        self.sess.close()

    def test_simple(self):
        response = self.sess.get(self.url)
        assert not response.from_cache
        response = self.sess.get(self.url)
        assert response.from_cache
Exemplo n.º 10
0
    def test_not_modified_releases_connection(self, server):
        sess = CacheControl(requests.Session())
        etag_url = urljoin(server.application_url, "/etag")
        sess.get(etag_url)

        resp = Mock(status=304, headers={})

        # This is how the urllib3 response is created in
        # requests.adapters
        response_mod = "requests.adapters.HTTPResponse.from_httplib"

        with patch(response_mod, Mock(return_value=resp)):
            sess.get(etag_url)
            assert resp.read.called
            assert resp.release_conn.called
Exemplo n.º 11
0
class JSONLocator(Locator):
    def __init__(self, url=PYPI_JSON_URL):
        self.url = url
        self.session = CacheControl(requests.session())
    def versions(self, distribution):
        url = "{}/{}/json".format(self.url, distribution)
        response = self.session.get(url)
        ret = []
        j = response.json()['releases']
        return [v for v, d in j.items() if len(d) > 0]
    def get(self, distribution, version):
        url = "{}/{}/json".format(self.url, distribution)
        response = self.session.get(url)
        # Reformat the data...
        return response.json()['releases'][version]
Exemplo n.º 12
0
    def test_not_modified_releases_connection(self, server, url):
        sess = CacheControl(requests.Session())
        etag_url = urljoin(url, "/etag")
        sess.get(etag_url)

        resp = Mock(status=304, headers={})

        # This is how the urllib3 response is created in
        # requests.adapters
        response_mod = "requests.adapters.HTTPResponse.from_httplib"

        with patch(response_mod, Mock(return_value=resp)):
            sess.get(etag_url)
            assert resp.read.called
            assert resp.release_conn.called
Exemplo n.º 13
0
class TestStream(object):

    def setup(self):
        self.sess = CacheControl(requests.Session())

    def test_stream_is_cached(self, url):
        resp_1 = self.sess.get(url + 'stream')
        content_1 = resp_1.content

        resp_2 = self.sess.get(url + 'stream')
        content_2  = resp_1.content

        assert not resp_1.from_cache
        assert resp_2.from_cache
        assert content_1 == content_2
Exemplo n.º 14
0
def all_sites(sitemap_url='http://library.link/harvest/sitemap.xml'):
    '''
    >>> from librarylink.util import all_sites
    >>> [ s.host for s in all_sites() if 'denverlibrary' in s.host ]
    ['link.denverlibrary.org']
    '''
    #FIXME: Avoid accumulating all the nodes, which will require improvements to xml.treesequence
    @coroutine
    def sink(accumulator):
        while True:
            e = yield
            loc = next(select_name(e, 'loc'))
            lastmod = next(select_name(e, 'lastmod'))
            s = liblink_site()
            s.sitemap = loc.xml_value
            s.url, _, tail = s.sitemap.partition('harvest/sitemap.xml')
            s.base_url = s.url #Legacy property name
            #Early warning for funky URLs breaking stuff downstream
            assert not tail
            protocol, s.host, path, query, fragment = iri.split_uri_ref(s.sitemap)
            s.lastmod = lastmod.xml_value
            accumulator.append(s)

    nodes = []
    ts = xml.treesequence(('sitemapindex', 'sitemap'), sink(nodes))
    if hasattr (all_sites, 'cachedir'):
        sess = CacheControl(requests.Session(), cache=FileCache(all_sites.cachedir))
    else:
        sess = CacheControl(requests.Session())
    result = sess.get(sitemap_url)
    ts.parse(result.text)
    yield from nodes
Exemplo n.º 15
0
def overpass_status(api_status_url="https://overpass-api.de/api/status"):
    """Get the overpass status -- this returns an int with the time to wait"""
    session = requests.session()
    session.headers.update({"User-Agent": USER_AGENT})
    cached_session = CacheControl(session)
    response = cached_session.get(api_status_url)
    if response.status_code != requests.codes.ok:
        raise ValueError("Bad Request: {}".format(api_status_url))
    parsed_response = {"wait_time": []}
    for i in response.text.splitlines():
        if "Connected as" in i:
            parsed_response["connected_as"] = i.split(":")[1].strip()
        elif "Current time" in i:
            parsed_response["current_time"] = i.split(":")[1].strip()
        elif "Rate limit" in i:
            parsed_response["rate_limit"] = int(i.split(":")[1].strip())
        elif "slots available now" in i:
            parsed_response["slots_available"] = int(i.split(" ")[0].strip())
        elif "Slot available after" in i:
            parsed_response["wait_time"].append(int(i.split(" ")[5]))
    if "slots_available" not in parsed_response:
        parsed_response["slots_available"] = 0
    wait_time = 0
    if (parsed_response["rate_limit"] - parsed_response["slots_available"] >= 2
            and len(parsed_response["wait_time"]) > 0):
        return max(parsed_response["wait_time"])
    return wait_time
Exemplo n.º 16
0
def download(workdir, url):
    """Download a file, using .cache inside workdir as an HTTP cache."""
    logging.debug(u"initializing requests and cache-control")
    session = CacheControl(requests.Session(),
                           cache=FileCache(os.path.join(workdir, '.cache')))
    session.mount('file://', LocalFileAdapter())
    req = session.get(url, stream=True)
    try:
        downloaded_file = tempfile.TemporaryFile()
        size = 0
        start = datetime.datetime.now()
        for chunk in req.iter_content(chunk_size=1024000):
            if chunk:
                sys.stdout.write('.')
                sys.stdout.flush()
                downloaded_file.write(chunk)
                size += len(chunk)
        # print newline
        print()
        downloaded_file.flush()
        logging.info(u"downloaded {} - {} o. in {} s.", url, size,
                     (datetime.datetime.now() - start).total_seconds())
        logging.debug(u"reset file pointer - seek(0)")
        downloaded_file.seek(0)
        return downloaded_file
    except Exception as exc:
        logging.debug(u"error on download, closing and deleting file")
        downloaded_file.close()
        raise exc
Exemplo n.º 17
0
    def fetch(self):
        feed = None
        if InformantConfig().get_argv_use_cache():
            cachefile = InformantConfig().get_cachefile()
            os.umask(
                0o0002
            )  # unrestrict umask so we can cache with proper permissions
            try:
                session = CacheControl(requests.Session(),
                                       cache=FileCache(cachefile,
                                                       filemode=0o0664,
                                                       dirmode=0o0775))
                feed = feedparser.parse(session.get(self.url).content)
            except Exception as e:
                ui.err_print('Unable to read cache information: {}'.format(e))
                feed = feedparser.parse(self.url)
        else:
            feed = feedparser.parse(self.url)

        if feed.bozo:
            ui.err_print('Encountered feed error: {}'.format(
                feed.bozo_exception))
            sys.exit(255)
        else:
            return feed
Exemplo n.º 18
0
class MAL(BaseLibrary, BaseMetadata):
    def __init__(self):
        super().__init__()
        self.name = "MAL"
        self.logo_url = 'https://upload.wikimedia.org/wikipedia/commons/7/7a/MyAnimeList_Logo.png'
        self.website_url = 'https://myanimelist.net/'
        client_id = "add1ed488bd218c2e10146345377a0b8"
        url_auth = "https://myanimelist.net/v1/oauth2/authorize"
        url_token = "https://myanimelist.net/v1/oauth2/token"
        self.authenticator = OAuth(self.name, client_id, url_auth, url_token)
        self.requests_session = CacheControl(requests.Session(),
                                             cache=FileCache('.Cache/MAL'),
                                             heuristic=MALHeuristic())
        # self.requests_session = requests.Session()
        self.rate_limiter = AsyncRateLimiter(max_calls=100,
                                             period=1,
                                             callback=limited)

    # Currently MAL is not rate limiting, but if it starts, i'll leave this here.
    def load(self, url):
        header = {
            "Authorization":
            str(self.authenticator.token_type + " " + self.authenticator.token)
        }
        with self.rate_limiter:
            result = self.requests_session.get(url, headers=header)
        try:
            if result.from_cache:
                # deletes last call if it was cached. only real api calls need to be slowed down
                self.rate_limiter.calls.pop()
        except:
            pass
        if result.ok:
            return json.loads(result.content)
        else:
            print(result.content)
            raise RuntimeError('Failed to grab data')

    def Lists(self):
        print("MAL: Obtaining User list")
        all_fields = "?fields=list_status&limit=1000"
        url = URL_MAIN + URL_ANIME_LIST.format(user_name="@me") + all_fields
        # Todo: implement paging
        list = self.load(url)['data']
        AnimeList = MALFormatter.AnimeList(list)
        return AnimeList

    def PopulateAnime(self, database: Database, anime_hash: str):
        oldAnimeData = AnimeStruct.Anime.from_db(anime_hash, database)
        if oldAnimeData.id.getID("MAL"):
            malID = oldAnimeData.id.getID("MAL")
            print(f"MAL: Obtaining Anime Metadata: {malID}")
            url = URL_MAIN + URL_DETAILS.format(
                id=str(malID)) + ANIME_ALL_FIELDS
            anime_metadata = self.load(url)
            properAnime = MALFormatter.AnimeMetadata(anime_metadata,
                                                     oldAnimeData)
            # remove edges to stop the anime from keeping some old info like type
            database.remove_successor_edges(oldAnimeData.hash)
            properAnime.to_db(database)
Exemplo n.º 19
0
    def open(self):
        global SESSION
        if SESSION is None:
            SESSION = CacheControl(Session(),
                                   cache=FileCache(SESSION_CACHE_PATH))

        try:
            self._response = SESSION.get(self.uri, headers=self.headers)
        except InvalidSchema as e:
            raise DocumentNotFoundException(
                u'document not found: "{0}"'.format(self.uri), cause=e)
        except ConnectionError as e:
            raise LoaderException(u'request connection error: "{0}"'.format(
                self.uri),
                                  cause=e)
        except Exception as e:
            raise LoaderException(u'request error: "{0}"'.format(self.uri),
                                  cause=e)

        status = self._response.status_code
        if status == 404:
            self._response = None
            raise DocumentNotFoundException(
                u'document not found: "{0}"'.format(self.uri))
        elif status != 200:
            self._response = None
            raise LoaderException(u'request error {0:d}: "{1}"'.format(
                status, self.uri))
Exemplo n.º 20
0
class LDClient(object):

    def __init__(self, apiKey, config=Config.default()):
        self._apiKey = apiKey
        self._config = config
        self._session = CacheControl(requests.Session())

    def get_flag(self, key, user, default=False):
        try:
            return self._get_flag(key, user, default)
        except:
            logging.exception('Unhandled exception in get_flag. Returning default value for flag.')
            return default

    def _get_flag(self, key, user, default):
        hdrs = {'Authorization': 'api_key ' + self._apiKey,
             'User-Agent': 'PythonClient/' + __version__}
        uri = self._config._base_uri + '/api/eval/features/' + key
        r = self._session.get(uri, headers=hdrs, timeout = (self._config._connect, self._config._read))
        dict = r.json()
        val = _evaluate(dict, user)
        if val is None:
            return default
        else:
            return val
Exemplo n.º 21
0
class SocrataApi:
    """
    Class for starting a session for requests via Socrata APIs.
    Initialize with a base_url
    """
    def __init__(self, base_url: str):
        self.session = CacheControl(requests.Session())
        self.base_url = base_url
        self.resource_url = urljoin(self.base_url, '/resource/')
        self.metadata_url = urljoin(self.base_url, '/api/views/metadata/v1/')

    def request(self, url:str, **kwargs: Any) -> Dict:
        try:
            response = self.session.get(url, **kwargs)
            response.raise_for_status()
            return response.json()
        except requests.exceptions.HTTPError as http_err:
            try:
                server_message = response.json()['message'] # see if the API returned message data
            except Exception:
                # if no JSON data, re-rasie the original error
                raise http_err
            raise BadRequest(server_message, response=response)

    def resource(self, resource_id: str, **kwargs: Any) -> Dict:
        return self.request(f'{self.resource_url}{resource_id}', **kwargs)

    def metadata(self, resource_id: str, **kwargs: Any) -> Dict:
        return self.request(f'{self.metadata_url}{resource_id}.json', **kwargs)
Exemplo n.º 22
0
def summary():

    # 
    sess = requests.session()
    cached_sess = CacheControl(sess)

    response = cached_sess.get('http://localhost/articles/retrieve_articles/10/', auth=('email', 'password'))
    articles = response.json()


    items = []
    # if r_articles.status_code == 200:
    if response.status_code == 200:
        for article in articles:
            items.append(
                Item(
                    title = article['title'],
                    link = "http://localhost/articles/" + str(article['article_id']),
                    author = article['author'],
                    pubDate = datetime.datetime.strptime(str(article['date_published']), "%a, %d %b %Y %H:%M:%S GMT")
                ))
                 
        feed = Feed(
            title = "Summary RSS Feed",
            link = "http://localhost/rss/summary",
            description = "This feed shows a summary of the 10 most recent articles",
            language = "en-US",
            lastBuildDate = datetime.datetime.now(),
            items = items
        )
        return feed.rss()
    else:
        # print(str(r_articles.status_code), file=sys.stderr)
        print(str(response.status_code), file=sys.stderr)
        return "articles error"
Exemplo n.º 23
0
def center_iterator(client=None) -> Iterator[Dict]:
    if not PLATFORM_ENABLED:
        logger.warning(
            f"{PLATFORM.capitalize()} scrap is disabled in configuration file."
        )
        return []

    session = CacheControl(requests.Session(), cache=FileCache("./cache"))

    if client:
        session = client
    try:
        url = f'{get_config().get("base_urls").get("github_public_path")}{get_conf_outputs().get("centers_json_path").format(PLATFORM)}'
        response = session.get(url)
        # Si on ne vient pas des tests unitaires
        if not client:
            if response.from_cache:
                logger.info(
                    f"Liste des centres pour {PLATFORM} vient du cache")
            else:
                logger.info(
                    f"Liste des centres pour {PLATFORM} est une vraie requête")

        data = response.json()
        logger.info(
            f"Found {len(data)} {PLATFORM.capitalize()} centers (external scraper)."
        )
        for center in data:
            yield center
    except Exception as e:
        logger.warning(f"Unable to scrape {PLATFORM} centers: {e}")
Exemplo n.º 24
0
def main():
    """
    Execution begins here.
    """

    # Use our standard logger template
    logging.basicConfig(
        format="%(asctime)s %(levelname)-8s %(message)s",
        datefmt="%Y-%m-%d %H:%M:%S",
        level=logging.DEBUG,
    )
    logger = logging.getLogger()

    # Specify list of test files to download.
    # Go to http://njrusmc.net/cache/cache.html to see all test files
    base_url = "http://njrusmc.net/cache"
    test_list = [
        "zero128k_public60.test",  # Cache-Control: public, max-age=60
        "zero128k_nostore.test",  # Cache-Control: no-store
    ]

    # For each file, run two GET requests, and use the logger to print out
    # the relevant information as requests are processed
    for test_file in test_list:

        # Assemble the complete URL to feed into the HTTP GET request
        url = f"{base_url}/{test_file}"

        # Create the cached session object, which automatically intereprets
        # caching-related headers (requests doesn't do it natively)
        cached_sess = CacheControl(requests.session())

        # Print information from first run, include key headers
        logger.info("First GET to %s", url)
        resp = cached_sess.get(url)
        resp.raise_for_status()
        print_response(resp, dump_body=False)
        print(f"\n\n{'*' * 80}\n\n")

        # Slight delay just to show the cache timer countdown
        # Print information from second run, but focus is on background debugs
        time.sleep(2)
        logger.info("Second GET to %s", url)
        resp = cached_sess.get(url)
        resp.raise_for_status()
        print_response(resp, dump_body=False)
        print(f"\n\n{'*' * 80}\n\n")
Exemplo n.º 25
0
def getUserID(user):
    api_url = "https://www.openstreetmap.org/api/0.6/changesets"
    data = {"display_name": user}
    session = CacheControl(requests.session())
    result = session.get(api_url, params=data).text
    root = ElementTree.fromstring(result)
    changeset = root.find("changeset")
    return changeset.attrib['uid']
class TestHeuristicWith3xxResponse(object):
    def setup(self):
        class DummyHeuristic(BaseHeuristic):
            def update_headers(self, resp):
                return {"x-dummy-header": "foobar"}

        self.sess = CacheControl(Session(), heuristic=DummyHeuristic())

    def test_heuristic_applies_to_301(self, url):
        the_url = url + "permanent_redirect"
        resp = self.sess.get(the_url)
        assert "x-dummy-header" in resp.headers

    def test_heuristic_applies_to_304(self, url):
        the_url = url + "conditional_get"
        resp = self.sess.get(the_url)
        assert "x-dummy-header" in resp.headers
Exemplo n.º 27
0
    def get_reader(self):
        sess = CacheControl(requests.Session(), cache=FileCache(gettempdir()))
        req = sess.get(self.file)

        # if the response is not 200, an exception will be raised
        req.raise_for_status()

        return io.BufferedReader(io.BytesIO(req.content))
Exemplo n.º 28
0
def Request(url,
            method="GET",
            headers=DEFAULT_HEADERS,
            additional_headers=None,
            data=None,
            session=None,
            allow_redirects=True,
            timeout=10,
            load_cookies=True,
            mobile=False):
    if additional_headers:
        headers.update(additional_headers)
    try:
        session = CacheControl(session)
    except Exception as e:
        pass
        # Error("Init web cache failed!!!", e)
    if mobile:
        headers["User-Agents"] = MOBILE_IOS_AGENTS
    xbmc.log("Requests headers: {0}".format(json.dumps(headers)), 1)
    if session:
        session.headers.update(headers)
        domain = re.search("https*\://(.+?)($|/)", url).group(1)
        if load_cookies:
            LoadCookies(session, cookies_name=domain)
        if data:
            response = session.post(url,
                                    data=data,
                                    allow_redirects=allow_redirects,
                                    timeout=timeout,
                                    verify=False)
        else:
            if method == "HEAD":
                response = session.head(url,
                                        allow_redirects=allow_redirects,
                                        timeout=timeout,
                                        verify=False)
            else:
                response = session.get(url,
                                       allow_redirects=allow_redirects,
                                       timeout=timeout,
                                       verify=False)
        response.encoding = "utf8"
        SaveCookies(session, cookies_name=domain)
        return response
    else:
        if method == "HEAD":
            return requests.head(url,
                                 headers=headers,
                                 allow_redirects=allow_redirects,
                                 timeout=timeout,
                                 verify=False)
        else:
            return requests.get(url,
                                headers=headers,
                                allow_redirects=allow_redirects,
                                timeout=timeout,
                                verify=False)
Exemplo n.º 29
0
    def get_reader(self):
        sess = CacheControl(requests.Session(),
                            cache=FileCache(gettempdir()))
        req = sess.get(self.file)

        # if the response is not 200, an exception will be raised
        req.raise_for_status()

        return io.BufferedReader(io.BytesIO(req.content))
Exemplo n.º 30
0
def getURL(url,
           post_data=None,
           params=None,
           headers={},
           timeout=30,
           session=None,
           json=False):
    """
    Returns a byte-string retrieved from the url provider.
    """

    # request session
    cache_dir = sickbeard.CACHE_DIR or _getTempDir()
    session = CacheControl(sess=session,
                           cache=caches.FileCache(
                               os.path.join(cache_dir, 'sessions')))

    # request session headers
    session.headers.update({
        'User-Agent': USER_AGENT,
        'Accept-Encoding': 'gzip,deflate'
    })
    session.headers.update(headers)

    # request session ssl verify
    session.verify = False

    # request session paramaters
    session.params = params

    try:
        # request session proxies
        if sickbeard.PROXY_SETTING:
            logger.log("Using proxy for url: " + url, logger.DEBUG)
            session.proxies = {
                "http": sickbeard.PROXY_SETTING,
                "https": sickbeard.PROXY_SETTING,
            }

        # decide if we get or post data to server
        if post_data:
            resp = session.post(url, data=post_data, timeout=timeout)
        else:
            resp = session.get(url, timeout=timeout)

        if not resp.ok:
            logger.log(
                u"Requested url " + url + " returned status code is " +
                str(resp.status_code) + ': ' +
                clients.http_error_code[resp.status_code], logger.DEBUG)
            return

    except requests.exceptions.HTTPError, e:
        logger.log(u"HTTP error " + str(e.errno) + " while loading URL " + url,
                   logger.WARNING)
        return
Exemplo n.º 31
0
def comments():
    sess = requests.session()
    cached_sess = CacheControl(sess)

    r_articles = cached_sess.get('http://localhost/articles/retrieve_articles/10/' , auth=('email', 'password'))
       
    if r_articles.status_code == 200:
        articles = r_articles.json()

        items = [] 
        for article in articles:
            r_comments = cached_sess.get('http://localhost/comments/recent/' + str(article['article_id']) + '/10', auth=('email', 'password'))
            
            if r_comments.status_code == 200:
                comments = r_comments.json()
                all_comments = []
                for comment in comments:
                    all_comments.append(
                        comment['comment']
                    )
            elif r_comments.status_code == 404:
                pass
            else:
                print(str(r_comments.status_code), file=sys.stderr)
                return "comments error"

            items.append(
                Item(
                    title = article['title'],
                    pubDate = datetime.datetime.strptime(str(article['date_published']), "%a, %d %b %Y %H:%M:%S GMT"),
                    description =  all_comments
                ))
        feed = Feed(
            title = "Comments feed for articles",
            link = "http://localhost/rss/comments",
            description = "This feed shows comments for each article.",
            language = "en-US",
            lastBuildDate = datetime.datetime.now(),
            items = items
        )
        return feed.rss()
    else:
        print(str(r_articles.status_code), file=sys.stderr)
Exemplo n.º 32
0
def main():
    """
    Execution begins here.
    """

    # Use our standard logger template
    logging.basicConfig(
        format="%(asctime)s %(levelname)-8s %(message)s",
        datefmt="%Y-%m-%d %H:%M:%S",
        level=logging.DEBUG,
    )
    logger = logging.getLogger()

    # Specify list of URLs to perform an HTTP GET against
    # Author's note: These files don't have "Cache-Control" anymore as I removed
    # them after the demo. Please replace these URLs with your own!
    url_list = [
        "http://njrusmc.net/jobaid/mpls_pcap.zip",  # Cache-Control: public (300s)
        "http://njrusmc.net/jobaid/ipsec_pcap.zip",  # Cache-Control: no-store
    ]

    # For each URL, run two GET requests, and use the logger to print out
    # the relevant information as requests are processed
    for url in url_list:

        # Create the cached session object, which automatically intereprets
        # caching-related headers (requests doesn't do it natively)
        cached_sess = CacheControl(requests.session())

        # Print information from first run, include key headers
        logger.info("First GET to %s", url)
        resp = cached_sess.get(url)
        resp.raise_for_status()
        print_response(resp, dump_body=False)

        # Slight delay just to show the cache timer countdown
        # Print information from second run, but focus is on background debugs
        time.sleep(2)
        logger.info("Second GET to %s", url)
        resp = cached_sess.get(url)
        resp.raise_for_status()
        print_response(resp, dump_body=False)
Exemplo n.º 33
0
def main():
    """
    Execution begins here.
    """

    # Create a logger object to let us see what is happening behind the
    # scenes with the HTTP URLs
    logging.basicConfig()
    logger = logging.getLogger()
    logger.setLevel(logging.DEBUG)

    # Specify list of URLs to perform an HTTP GET against
    # Author's note: These files don't have "Cache-Control" anymore as I removed
    # them after the demo. Please replace these URLs with your own!
    url_list = [
        "http://njrusmc.net/jobaid/wlan_pcap.zip",  # Cache-Control: public (300s)
        "http://njrusmc.net/jobaid/lmnop_answers.pdf",  # Cache-Control: no-store
    ]

    # For each URL, run two GET requests, and use the logger to print out
    # the relevant information as requests are processed
    for url in url_list:

        # Create the cached session object, which automatically intereprets
        # caching-related headers (requests doesn't do it natively)
        cached_sess = CacheControl(requests.session())

        # Print information from first run, include key headers
        logger.info(
            "---------------------------------------------------------")
        logger.info("First GET to URL: %s", url)
        resp = cached_sess.get(url)
        logger.info("Response %s / %s", resp.status_code, resp.reason)
        logger.info("Cache-Control: %s", resp.headers.get("Cache-Control"))
        logger.info("ETag: %s", resp.headers.get("ETag"))

        # Slight delay just to show the cache timer countdown
        # Print information from second run, but focus is on background debugs
        time.sleep(1.5)
        logger.info("Second GET to URL: %s", url)
        resp = cached_sess.get(url)
        logger.info("Response %s / %s", resp.status_code, resp.reason)
Exemplo n.º 34
0
class TestHeuristicWith3xxResponse(object):

    def setup(self):

        class DummyHeuristic(BaseHeuristic):

            def update_headers(self, resp):
                return {"x-dummy-header": "foobar"}

        self.sess = CacheControl(Session(), heuristic=DummyHeuristic())

    def test_heuristic_applies_to_301(self, url):
        the_url = url + "permanent_redirect"
        resp = self.sess.get(the_url)
        assert "x-dummy-header" in resp.headers

    def test_heuristic_applies_to_304(self, url):
        the_url = url + "conditional_get"
        resp = self.sess.get(the_url)
        assert "x-dummy-header" in resp.headers
Exemplo n.º 35
0
class reQuiver(object):

    def __init__(self):
        self._raw_endpoint = "http://quiver.archerdx.com/results?query="
        self._sesh = CacheControl(requests.Session())

    def query(self, query):

        if len(query) == 0:
            raise EmptyQueryStringException()

        q_string = self._raw_endpoint + str(query)
        response = self._sesh.get(q_string)

        if response.status_code != 200:
            raise NetworkErrorException(response.status_code)

        soup = BeautifulSoup(response.content, "html.parser")

        # parse the panels
        panels = soup.find(panel_table_filter)
        panels_list  = []
        
        if panels is not None:
            for row in panels.find_all("tr"):
                cells = row.find_all("td")
                
                if len(cells) == 2:
                    link = cells[0].a['href']
                    genes = [clean_string(gene) for gene in cells[1].string.split()]
                    panels_list.append(QuiverFushionPlexPanel(link, genes))

        # parse the fusions
        fusions = soup.find_all(fusion_table_filter)
        fusions_list = []

        if fusions is not None:
            for fusion in fusions:
                table = fusion.find('table')
                for row in table.find_all('tr'):
                    cells = row.find_all('td')
                    if len(cells) != 2:

                        # get the link
                        link = cells[0].a['href']
                        original_annotation = clean_string(cells[1].string)
                        disease = cells[2].string.strip()
                        pubmed_link = cells[3].a['href']
                        evidence_count = int(cells[4].string)

                        fusions_list.append(QuiverGeneFushion(link, original_annotation, disease,
                                            pubmed_link, evidence_count))

        return QuiverResultSet(panels_list, fusions_list, query)
Exemplo n.º 36
0
def amalgama_lyrics(artist, song):
    url = amalgama.get_url(artist, song)
    try:
        cached_sess = CacheControl(sess, cache=FileCache('.amalgama'))
        response = cached_sess.get(url)
        response.raise_for_status()
    except requests.exceptions.HTTPError:
        print(f'{artist}-{song} not found in amalgama {url}')
        return None
    text = amalgama.get_html(response.text)
    return text
Exemplo n.º 37
0
def get_cached_session():
    sess = CacheControl(requests.Session(),
                    cache=FileCache(CACHE_DIR), heuristic=LastModifiedNoDate(require_date=False))

    original_get = sess.get
    def wrapped_get(*args, **kwargs):
        try:
            return original_get(*args, **kwargs)
        except (OSError, IOError) as e:
            return requests.get(*args, **kwargs)
    sess.get = wrapped_get
    return sess
Exemplo n.º 38
0
def getURL(url, post_data=None, params=None, headers=None, timeout=30, session=None, json=False):
    """
    Returns a byte-string retrieved from the url provider.
    """

    # request session
    cache_dir = sickbeard.CACHE_DIR or _getTempDir()
    session = CacheControl(sess=session, cache=caches.FileCache(os.path.join(cache_dir, "sessions")))

    # request session headers
    req_headers = {"User-Agent": USER_AGENT, "Accept-Encoding": "gzip,deflate"}
    if headers:
        req_headers.update(headers)
    session.headers.update(req_headers)

    # request session ssl verify
    session.verify = False

    # request session paramaters
    session.params = params

    try:
        # Remove double-slashes from url
        parsed = list(urlparse.urlparse(url))
        parsed[2] = re.sub("/{2,}", "/", parsed[2])  # replace two or more / with one
        url = urlparse.urlunparse(parsed)

        # request session proxies
        if sickbeard.PROXY_SETTING:
            logger.log("Using proxy for url: " + url, logger.DEBUG)
            session.proxies = {"http": sickbeard.PROXY_SETTING, "https": sickbeard.PROXY_SETTING}

        # decide if we get or post data to server
        if post_data:
            resp = session.post(url, data=post_data, timeout=timeout)
        else:
            resp = session.get(url, timeout=timeout)

        if not resp.ok:
            logger.log(
                u"Requested url "
                + url
                + " returned status code is "
                + str(resp.status_code)
                + ": "
                + clients.http_error_code[resp.status_code],
                logger.DEBUG,
            )
            return

    except requests.exceptions.HTTPError, e:
        logger.log(u"HTTP error " + str(e.errno) + " while loading URL " + url, logger.WARNING)
        return
Exemplo n.º 39
0
def fetch_file(url, encoding=None):
    s = requests.session()
    s = CacheControl(s, cache=FileCache(os.path.expanduser('~/.tst/cache')))

    try:
        response = s.get(url, headers={})
    except requests.ConnectionError:
        _assert(False, "Connection failed... check your internet connection")

    _assert(response.ok, "%s\nFile request failed: %s (%d)" % (url, response.reason, response.status_code))
    if encoding:
        response.encoding = encoding

    return response.text
Exemplo n.º 40
0
class TwistedHttpFeatureRequester(FeatureRequester):

    def __init__(self, api_key, config):
        self._api_key = api_key
        self._session = CacheControl(txrequests.Session())
        self._config = config

    def get(self, key, callback):
        d = self.toggle(key)
        d.addBoth(callback)
        return d

    def toggle(self, key):
        @defer.inlineCallbacks
        def run(should_retry):
            # noinspection PyBroadException
            try:
                val = yield self._toggle(key)
                defer.returnValue(val)
            except ProtocolError as e:
                inner = e.args[1]
                if inner.errno == errno.ECONNRESET and should_retry:
                    log.warning(
                        'ProtocolError exception caught while getting flag. Retrying.')
                    d = yield run(False)
                    defer.returnValue(d)
                else:
                    log.exception(
                        'Unhandled exception. Returning default value for flag.')
                    defer.returnValue(None)
            except Exception:
                log.exception(
                    'Unhandled exception. Returning default value for flag.')
                defer.returnValue(None)

        return run(True)

    @defer.inlineCallbacks
    def _toggle(self, key):
        hdrs = _headers(self._api_key)
        uri = self._config.base_uri + '/api/eval/features/' + key
        r = yield self._session.get(uri, headers=hdrs, timeout=(self._config.connect, self._config.read))
        r.raise_for_status()
        feature = r.json()
        defer.returnValue(feature)
Exemplo n.º 41
0
class TestHeuristicWithoutWarning(object):

    def setup(self):

        class NoopHeuristic(BaseHeuristic):
            warning = Mock()

            def update_headers(self, resp):
                return {}

        self.heuristic = NoopHeuristic()
        self.sess = CacheControl(Session(), heuristic=self.heuristic)

    def test_no_header_change_means_no_warning_header(self, url):
        the_url = url + "optional_cacheable_request"
        resp = self.sess.get(the_url)

        assert not self.heuristic.warning.called
Exemplo n.º 42
0
def download_file(url, filename, session=None):
    # create session
    cache_dir = sickbeard.CACHE_DIR or _getTempDir()
    session = CacheControl(sess=session, cache=caches.FileCache(os.path.join(cache_dir, "sessions")))

    # request session headers
    session.headers.update({"User-Agent": USER_AGENT, "Accept-Encoding": "gzip,deflate"})

    # request session ssl verify
    session.verify = False

    # request session streaming
    session.stream = True

    # request session proxies
    if sickbeard.PROXY_SETTING:
        logger.log("Using proxy for url: " + url, logger.DEBUG)
        session.proxies = {"http": sickbeard.PROXY_SETTING, "https": sickbeard.PROXY_SETTING}

    try:
        resp = session.get(url)
        if not resp.ok:
            logger.log(
                u"Requested url "
                + url
                + " returned status code is "
                + str(resp.status_code)
                + ": "
                + clients.http_error_code[resp.status_code],
                logger.DEBUG,
            )
            return False

        with open(filename, "wb") as fp:
            for chunk in resp.iter_content(chunk_size=1024):
                if chunk:
                    fp.write(chunk)
                    fp.flush()

        chmodAsParent(filename)
    except requests.exceptions.HTTPError, e:
        _remove_file_failed(filename)
        logger.log(u"HTTP error " + str(e.errno) + " while loading URL " + url, logger.WARNING)
        return False
Exemplo n.º 43
0
def _get_filehandle(filepath_or, *args, **kwargs):
    """Open file if `filepath_or` looks like a string/unicode/bytes, else
    pass through.
    """
    if _is_string_or_bytes(filepath_or):
        if requests.compat.urlparse(filepath_or).scheme in {'http', 'https'}:
            sess = CacheControl(requests.Session(),
                                cache=FileCache(gettempdir()))
            req = sess.get(filepath_or, **kwargs)

            # if the response is not 200, an exception will be raised
            req.raise_for_status()

            fh, own_fh = BytesIO(req.content), True
        else:
            fh, own_fh = open(filepath_or, *args, **kwargs), True
    else:
        fh, own_fh = filepath_or, False
    return fh, own_fh
Exemplo n.º 44
0
def getURL(url, post_data=None, params=None, headers={}, timeout=30, session=None, json=False):
    """
    Returns a byte-string retrieved from the url provider.
    """

    # request session
    cache_dir = sickbeard.CACHE_DIR or _getTempDir()
    session = CacheControl(sess=session, cache=caches.FileCache(os.path.join(cache_dir, 'sessions')))

    # request session headers
    session.headers.update({'User-Agent': USER_AGENT, 'Accept-Encoding': 'gzip,deflate'})
    session.headers.update(headers)

    # request session ssl verify
    session.verify = False

    # request session paramaters
    session.params = params

    try:
        # request session proxies
        if sickbeard.PROXY_SETTING:
            logger.log("Using proxy for url: " + url, logger.DEBUG)
            session.proxies = {
                "http": sickbeard.PROXY_SETTING,
                "https": sickbeard.PROXY_SETTING,
            }

        # decide if we get or post data to server
        if post_data:
            resp = session.post(url, data=post_data, timeout=timeout)
        else:
            resp = session.get(url, timeout=timeout)

        if not resp.ok:
            logger.log(u"Requested url " + url + " returned status code is " + str(
                resp.status_code) + ': ' + clients.http_error_code[resp.status_code], logger.DEBUG)
            return

    except requests.exceptions.HTTPError, e:
        logger.log(u"HTTP error " + str(e.errno) + " while loading URL " + url, logger.WARNING)
        return
Exemplo n.º 45
0
def Request(
	url,
	method="GET",
	headers=DEFAULT_HEADERS,
	additional_headers=None,
	data=None,
	session=None,
	allow_redirects=True,
	timeout=10,
	load_cookies=True,
	mobile=False
):
	if additional_headers:
		headers.update(additional_headers)
	try:
		session = CacheControl(session)
	except Exception as e:
		pass
		# Error("Init web cache failed!!!", e)
	if mobile:
		headers["User-Agents"] = MOBILE_IOS_AGENTS
	xbmc.log("Requests headers: {0}".format(json.dumps(headers)), 1)
	if session:
		session.headers.update(headers)
		domain = re.search("https*\://(.+?)($|/)", url).group(1)
		if load_cookies:
			LoadCookies(session, cookies_name=domain)
		if data:
			response = session.post(url, data=data, allow_redirects=allow_redirects, timeout=timeout, verify=False)
		else:
			if method == "HEAD":
				response = session.head(url, allow_redirects=allow_redirects, timeout=timeout, verify=False)
			else:
				response = session.get(url, allow_redirects=allow_redirects, timeout=timeout, verify=False)
		response.encoding = "utf8"
		SaveCookies(session, cookies_name=domain)
		return response
	else:
		if method == "HEAD":
			return requests.head(url, headers=headers, allow_redirects=allow_redirects, timeout=timeout, verify=False)
		else:
			return requests.get(url, headers=headers, allow_redirects=allow_redirects, timeout=timeout, verify=False)
Exemplo n.º 46
0
def main():
    current = pkg_resources.get_distribution('tst').version
    if not sys.stdout.isatty():
        print(current)
        return

    cprint(WHITE, current, file=sys.stdout)
    try:
        s = requests.session()
        s = CacheControl(s, cache=FileCache(os.path.expanduser('~/.tst/cache')))
        response = s.get('https://pypi.org/pypi/tst/json')
        data = response.json()
    except requests.ConnectionError:
        pass

    latest_version = data['info']['version']
    if current != latest_version:
        cprint(YELLOW, 'Latest version available: %s' % latest_version, file=sys.stdout)
        cprint(RESET, '---\nUse `pip install --upgrade tst`')
        cprint(RESET, ' or `pip install --upgrade --user tst`')
Exemplo n.º 47
0
class RequestsFeatureRequester(FeatureRequester):

    def __init__(self, api_key, config):
        self._api_key = api_key
        self._session = CacheControl(requests.Session())
        self._config = config

    def get(self, key, callback):
        # return callback(do_toggle(key))

        def do_toggle(should_retry):
            # noinspection PyBroadException,PyUnresolvedReferences
            try:
                val = self._toggle(key)
                return val
            except ProtocolError as e:
                inner = e.args[1]
                if inner.errno == errno.ECONNRESET and should_retry:
                    log.warning(
                        'ProtocolError exception caught while getting flag. Retrying.')
                    return do_toggle(False)
                else:
                    log.exception(
                        'Unhandled exception. Returning default value for flag.')
                    return None
            except Exception:
                log.exception(
                    'Unhandled exception. Returning default value for flag.')
                return None

        return callback(do_toggle(True))

    def _toggle(self, key):
        hdrs = _headers(self._api_key)
        uri = self._config.base_uri + '/api/eval/features/' + key
        r = self._session.get(uri, headers=hdrs, timeout=(
            self._config.connect, self._config.read))
        r.raise_for_status()
        feature = r.json()
        return feature
Exemplo n.º 48
0
def downloadHttpFile(httpurl):
    # type: (Text) -> Text
    cache_session = None
    if "XDG_CACHE_HOME" in os.environ:
        directory = os.environ["XDG_CACHE_HOME"]
    elif "HOME" in os.environ:
        directory = os.environ["HOME"]
    else:
        directory = os.path.expanduser('~')

    cache_session = CacheControl(
        requests.Session(),
        cache=FileCache(
            os.path.join(directory, ".cache", "cwltool")))

    r = cache_session.get(httpurl, stream=True)
    with NamedTemporaryFile(mode='wb', delete=False) as f:
        for chunk in r.iter_content(chunk_size=16384):
            if chunk:  # filter out keep-alive new chunks
                f.write(chunk)
    r.close()
    return f.name
Exemplo n.º 49
0
def getURL(url, post_data=None, params=None, headers=None, timeout=30, session=None, json=False):
    """
    Returns a byte-string retrieved from the url provider.
    """

    # request session
    session = CacheControl(sess=session, cache=caches.FileCache(os.path.join(sickbeard.CACHE_DIR, 'sessions')))

    # request session headers
    req_headers = {'User-Agent': USER_AGENT, 'Accept-Encoding': 'gzip,deflate'}
    if headers:
        req_headers.update(headers)
    session.headers.update(req_headers)

    # request session ssl verify
    session.verify = False

    # request session paramaters
    session.params = params

    try:
        # Remove double-slashes from url
        parsed = list(urlparse.urlparse(url))
        parsed[2] = re.sub("/{2,}", "/", parsed[2])  # replace two or more / with one
        url = urlparse.urlunparse(parsed)

        # request session proxies
        if sickbeard.PROXY_SETTING:
            logger.log("Using proxy for url: " + url, logger.DEBUG)
            session.proxies = {
                "http": sickbeard.PROXY_SETTING,
                "https": sickbeard.PROXY_SETTING,
            }

        resp = session.get(url, data=post_data, timeout=timeout)
    except requests.exceptions.HTTPError, e:
        logger.log(u"HTTP error " + str(e.errno) + " while loading URL " + url, logger.WARNING)
        return
Exemplo n.º 50
0
def download_file(url, filename, session=None):
    # create session
    session = CacheControl(sess=session, cache=caches.FileCache(os.path.join(sickbeard.CACHE_DIR, 'sessions')))

    # request session headers
    session.headers.update({'User-Agent': USER_AGENT, 'Accept-Encoding': 'gzip,deflate'})

    # request session ssl verify
    session.verify = False

    # request session streaming
    session.stream = True

    # request session proxies
    if sickbeard.PROXY_SETTING:
        logger.log("Using proxy for url: " + url, logger.DEBUG)
        session.proxies = {
            "http": sickbeard.PROXY_SETTING,
            "https": sickbeard.PROXY_SETTING,
        }

    try:
        resp = session.get(url)
        if not resp.ok:
            return False

        with open(filename, 'wb') as fp:
            for chunk in resp.iter_content(chunk_size=1024):
                if chunk:
                    fp.write(chunk)
                    fp.flush()

        chmodAsParent(filename)
    except requests.exceptions.HTTPError, e:
        _remove_file_failed(filename)
        logger.log(u"HTTP error " + str(e.errno) + " while loading URL " + url, logger.WARNING)
        return False
Exemplo n.º 51
0
    def urls(self):
        s = requests.session()
        s = CacheControl(s, cache=FileCache(os.path.expanduser('~/.tst/cache')))

        headers = {}
        tokens = JsonFile(os.path.expanduser('~/.tst/tokens.json'))
        token = tokens.get(self.name)
        try:
            response = s.get(self.url, allow_redirects=True)
        except requests.ConnectionError:
            _assert(False, "Connection failed... check your internet connection")

        if not response.ok:
            return None

        response.encoding = 'utf-8'
        try:
            resource = response.json()
            resource['_response'] = response

        except ValueError:
            return None

        return resource
Exemplo n.º 52
0
    def get(self, key):
        s = requests.session()
        s = CacheControl(s, cache=FileCache(os.path.expanduser('~/.tst/cache')))

        url = "%s/%s" % (self.url, key)
        headers = {}
        tokens = JsonFile(os.path.expanduser('~/.tst/tokens.json'))
        token = tokens.get(self.name)
        if token:
            headers['Authorization'] = 'Bearer %s' % token

        try:
            response = s.get(url, headers=headers, allow_redirects=True)
        except requests.ConnectionError:
            _assert(False, "Connection failed... check your internet connection")

        if not response.ok:
            self.last_error = response.status_code
            self.last_response = response
            return None

        response.encoding = 'utf-8'
        try:
            resource = response.json()
            resource['_response'] = response
            validate_tst_object(resource)

        except ValueError:
            #_assert(False, "Resource is not valid json")
            return None

        except AssertionError as e:
            print(resource)
            _assert(False, "Not a TST Object: %s" % e.message)

        return resource
Exemplo n.º 53
0
class Scrapper:
    scrape_delay = 1

    _requests = requests.session()
    _last_scrape = 0

    def __init__(self):
        try:
            from cachecontrol import CacheControl
            from cachecontrol.caches import FileCache
            import tempfile
            self._requests = CacheControl(self._requests, cache=FileCache(tempfile.gettempdir()+'/cagematch-cache', forever=True))
        except:
            logging.warning('CacheControl not available')

        self._requests.headers.update({'User-Agent': 'Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)'})

    def get(self, url, **kwargs):
        if self._last_scrape:
            sleeptime = max(0, self.scrape_delay - (time.time() - self._last_scrape))
            time.sleep(sleeptime)

        self._last_scrape = time.time()
        return self._requests.get(url, **kwargs)
Exemplo n.º 54
0
class TestPermanentRedirects(object):

    def setup(self):
        self.sess = CacheControl(requests.Session())

    def test_redirect_response_is_cached(self, url):
        self.sess.get(url + 'permanent_redirect', allow_redirects=False)

        resp = self.sess.get(url + 'permanent_redirect',
                             allow_redirects=False)
        assert resp.from_cache

    def test_bust_cache_on_redirect(self, url):
        self.sess.get(url + 'permanent_redirect', allow_redirects=False)

        resp = self.sess.get(url + 'permanent_redirect',
                             headers={'cache-control': 'no-cache'},
                             allow_redirects=False)
        assert not resp.from_cache
    def test_max_bytes(self, tmpdir, sess):
        """
        Test that caches the first url but not the second because
        the maximum bytes have been reached for the cache.
        """
        # use a cache with max_bytes set
        max_bytes = 1400
        self.cache = FileCache(str(tmpdir), max_bytes=max_bytes)
        sess = CacheControl(requests.Session(), cache=self.cache)

        url1 = self.url + ''.join(sample(string.ascii_lowercase, randint(2, 4)))
        url2 = self.url + ''.join(sample(string.ascii_lowercase, randint(2, 4)))
        assert url1 != url2

        # fill up the cache with url1
        response = sess.get(url1)
        assert not response.from_cache

        # make sure it got into the cache
        response = sess.get(url1)
        assert response.from_cache

        # do url2 now
        response = sess.get(url2)
        assert not response.from_cache

        # make sure url2 was NOT cached
        response = sess.get(url2)
        assert not response.from_cache

        # clear the cache
        response = sess.delete(url1)
        assert not response.from_cache

        # re-add to cache since bytes should be back to 0
        response = sess.get(url1)
        assert not response.from_cache

        # verify from cache again
        response = sess.get(url1)
        assert response.from_cache
Exemplo n.º 56
0
class TestMultipleChoicesRedirects(object):

    def setup(self):
        self.sess = CacheControl(requests.Session())

    def test_multiple_choices_is_cacheable(self, url):
        self.sess.get(url + "multiple_choices_redirect", allow_redirects=False)

        resp = self.sess.get(url + "multiple_choices_redirect", allow_redirects=False)

        assert resp.from_cache

    def test_bust_cache_on_redirect(self, url):
        self.sess.get(url + "multiple_choices_redirect", allow_redirects=False)

        resp = self.sess.get(
            url + "multiple_choices_redirect",
            headers={"cache-control": "no-cache"},
            allow_redirects=False,
        )

        assert not resp.from_cache
Exemplo n.º 57
0
 def test_file_cache_recognizes_consumed_file_handle(self):
     s = CacheControl(Session(), FileCache('web_cache'))
     s.get('http://httpbin.org/cache/60')
     r = s.get('http://httpbin.org/cache/60')
     assert r.from_cache
Exemplo n.º 58
0
class SpotifyPlugin(object):
    def __init__(self):
        self.client = None
        self.server = None
        self.play_lock      = Semaphore(1)
        self.start_lock     = Semaphore(1)
        self.start_marker   = Event()
        self.last_track_uri = None
        self.last_track_object = None

        Dict.Reset()
        Dict['play_count']             = 0
        Dict['last_restart']           = 0
        Dict['schedule_restart_each']  = 5*60    # restart each  X minutes
        Dict['play_restart_each']      = 2       # restart each  X plays
        Dict['check_restart_each']     = 5       # check if I should restart each X seconds

        Dict['radio_salt']             = False   # Saves last radio salt so multiple queries return the same radio track list

        self.start()

        self.session = requests.session()
        self.session_cached = CacheControl(self.session)

        Thread.CreateTimer(Dict['check_restart_each'], self.check_automatic_restart, globalize=True)

    @property
    def username(self):
        return Prefs["username"]

    @property
    def password(self):
        return Prefs["password"]

    def check_automatic_restart(self):

        can_restart = False

        try:

            diff = time.time() - Dict['last_restart']
            scheduled_restart  = diff >= Dict['schedule_restart_each']
            play_count_restart = Dict['play_count'] >= Dict['play_restart_each']
            must_restart = play_count_restart or scheduled_restart

            if must_restart:
                can_restart = self.play_lock.acquire(blocking=False)
                if can_restart:
                    Log.Debug('Automatic restart started')
                    self.start()
                    Log.Debug('Automatic restart finished')

        finally:

            if can_restart:
                self.play_lock.release()

            Thread.CreateTimer(Dict['check_restart_each'], self.check_automatic_restart, globalize=True)

    @check_restart
    def preferences_updated(self):
        """ Called when the user updates the plugin preferences"""
        self.start() # Trigger a client restart

    def start(self):
        """ Start the Spotify client and HTTP server """
        if not self.username or not self.password:
            Log("Username or password not set: not logging in")
            return False

        can_start = self.start_lock.acquire(blocking=False)
        try:
            # If there is a start in process, just wait until it finishes, but don't raise another one
            if not can_start:
                Log.Debug("Start already in progress, waiting it finishes to return")
                self.start_lock.acquire()
            else:
                Log.Debug("Start triggered, entering private section")
                self.start_marker.clear()

                if self.client:
                    self.client.restart(self.username, self.password)
                else:
                    self.client = SpotifyClient(self.username, self.password)

                self.last_track_uri = None
                self.last_track_object = None
                Dict['play_count']   = 0
                Dict['last_restart'] = time.time()
                self.start_marker.set()
                Log.Debug("Start finished, leaving private section")
        finally:
            self.start_lock.release()

        return self.client and self.client.is_logged_in()

    @check_restart
    def play(self, uri):
        Log('play(%s)' % repr(uri))

        uri = urllib.quote(uri.encode("utf8")).replace("%3A", ":").decode("utf8")

        track_url = None
        if not self.client.is_track_uri_valid(uri):
            Log("Play track callback invoked with invalid URI (%s). This is very bad :-(" % uri)
            track_url = "http://www.xamuel.com/blank-mp3-files/2sec.mp3"
        else:
            self.play_lock.acquire(blocking=True)
            try:
                track_url = self.get_track_url(uri)

                # If first request failed, trigger re-connection to spotify
                retry_num = 0
                while not track_url and retry_num < 2:
                    Log.Info('get_track_url (%s) failed, re-connecting to spotify...' % uri)
                    time.sleep(retry_num*0.5) # Wait some time based on number of failures
                    if self.start():
                        track_url = self.get_track_url(uri)
                    retry_num = retry_num + 1

                if track_url == False or track_url is None:
                    # Send an empty and short mp3 so player do not fail and we can go on listening next song
                    Log.Error("Play track (%s) couldn't be obtained. This is very bad :-(" % uri)
                    track_url = 'http://www.xamuel.com/blank-mp3-files/2sec.mp3'
                elif retry_num == 0: # If I didn't restart, add 1 to playcount
                    Dict['play_count'] = Dict['play_count'] + 1
            finally:
                self.play_lock.release()

        return Redirect(track_url)

    def get_track_url(self, track_uri):
        if not self.client.is_track_uri_valid(track_uri):
            return None

        track_url = None

        track = self.client.get(track_uri)
        if track:
            track_url = track.getFileURL(urlOnly=True, retries=1)

        return track_url

    #
    # TRACK DETAIL
    #
    @check_restart
    def metadata(self, uri):
        Log('metadata(%s)' % repr(uri))

        uri = urllib.quote(uri.encode("utf8")).replace("%3A", ":").decode("utf8")

        oc = ObjectContainer()
        track_object = None

        if not self.client.is_track_uri_valid(uri):
            Log("Metadata callback invoked with invalid URI (%s)" % uri)
            track_object = self.create_track_object_empty(uri)
        else:
            if self.last_track_uri == uri:
                track_object = self.last_track_object
            else:
                track_metadata = self.get_track_metadata(uri)

                if track_metadata:
                    track_object = self.create_track_object_from_metatada(track_metadata)
                    self.last_track_uri = uri
                    self.last_track_object = track_object
                else:
                    track_object = self.create_track_object_empty(uri)

        oc.add(track_object)
        return oc

    def get_track_metadata(self, track_uri):
        if not self.client.is_track_uri_valid(track_uri):
            return None

        track = self.client.get(track_uri)
        if not track:
            return None

        #track_uri       = track.getURI().decode("utf-8")
        title           = track.getName().decode("utf-8")
        image_url       = self.select_image(track.getAlbumCovers())
        track_duration  = int(track.getDuration())
        track_number    = int(track.getNumber())
        track_album     = track.getAlbum(nameOnly=True).decode("utf-8")
        track_artists   = track.getArtists(nameOnly=True).decode("utf-8")
        metadata        = TrackMetadata(title, image_url, track_uri, track_duration, track_number, track_album, track_artists)

        return metadata

    @staticmethod
    def select_image(images):
        if images == None:
            return None

        if images.get(640):
            return images[640]
        elif images.get(320):
            return images[320]
        elif images.get(300):
            return images[300]
        elif images.get(160):
            return images[160]
        elif images.get(60):
            return images[60]

        Log.Info('Unable to select image, available sizes: %s' % images.keys())
        return None

    def get_uri_image(self, uri):
        images = None
        obj = self.client.get(uri)
        if isinstance(obj, SpotifyArtist):
            images = obj.getPortraits()
        elif isinstance(obj, SpotifyAlbum):
            images = obj.getCovers()
        elif isinstance(obj, SpotifyTrack):
            images = obj.getAlbum().getCovers()
        elif isinstance(obj, SpotifyPlaylist):
            images = obj.getImages()

        return self.select_image(images)

    @authenticated
    @check_restart
    def image(self, uri):
        if not uri:
            # TODO media specific placeholders
            return Redirect(R('placeholder-artist.png'))

        Log.Debug('Getting image for: %s' % uri)

        uri = urllib.quote(uri.encode("utf8")).replace("%3A", ":").decode("utf8")

        if uri.startswith('spotify:'):
            # Fetch object for spotify URI and select image
            image_url = self.get_uri_image(uri)

            if not image_url:
                # TODO media specific placeholders
                return Redirect(R('placeholder-artist.png'))
        else:
            # pre-selected image provided
            Log.Debug('Using pre-selected image URL: "%s"' % uri)
            image_url = uri

        return self.session_cached.get(image_url).content

    #
    # SECOND_LEVEL_MENU
    #

    @authenticated
    @check_restart
    def explore(self):
        Log("explore")

        """ Explore shared music
        """
        return ObjectContainer(
            objects=[
                DirectoryObject(
                    key=route_path('explore/featured_playlists'),
                    title=L("MENU_FEATURED_PLAYLISTS"),
                    thumb=R("icon-explore-featuredplaylists.png")
                ),
                DirectoryObject(
                    key=route_path('explore/top_playlists'),
                    title=L("MENU_TOP_PLAYLISTS"),
                    thumb=R("icon-explore-topplaylists.png")
                ),
                DirectoryObject(
                    key=route_path('explore/new_releases'),
                    title=L("MENU_NEW_RELEASES"),
                    thumb=R("icon-explore-newreleases.png")
                ),
                DirectoryObject(
                    key=route_path('explore/genres'),
                    title=L("MENU_GENRES"),
                    thumb=R("icon-explore-genres.png")
                )
            ],
        )

    @authenticated
    @check_restart
    def discover(self):
        Log("discover")

        oc = ObjectContainer(
            title2=L("MENU_DISCOVER"),
            view_group=ViewMode.Stories
        )

        stories = self.client.discover()
        for story in stories:
            self.add_story_to_directory(story, oc)
        return oc

    @authenticated
    @check_restart
    def radio(self):
        Log("radio")

        """ Show radio options """
        return ObjectContainer(
            objects=[
                DirectoryObject(
                    key=route_path('radio/stations'),
                    title=L("MENU_RADIO_STATIONS"),
                    thumb=R("icon-radio-stations.png")
                ),
                DirectoryObject(
                    key=route_path('radio/genres'),
                    title=L("MENU_RADIO_GENRES"),
                    thumb=R("icon-radio-genres.png")
                )
            ],
        )

    @authenticated
    @check_restart
    def your_music(self):
        Log("your_music")

        """ Explore your music
        """
        return ObjectContainer(
            objects=[
                DirectoryObject(
                    key=route_path('your_music/playlists'),
                    title=L("MENU_PLAYLISTS"),
                    thumb=R("icon-playlists.png")
                ),
                DirectoryObject(
                    key=route_path('your_music/starred'),
                    title=L("MENU_STARRED"),
                    thumb=R("icon-starred.png")
                ),
                DirectoryObject(
                    key=route_path('your_music/albums'),
                    title=L("MENU_ALBUMS"),
                    thumb=R("icon-albums.png")
                ),
                DirectoryObject(
                    key=route_path('your_music/artists'),
                    title=L("MENU_ARTISTS"),
                    thumb=R("icon-artists.png")
                ),
            ],
        )

    #
    # EXPLORE
    #

    @authenticated
    @check_restart
    def featured_playlists(self):
        Log("featured playlists")

        oc = ObjectContainer(
            title2=L("MENU_FEATURED_PLAYLISTS"),
            content=ContainerContent.Playlists,
            view_group=ViewMode.Playlists
        )

        playlists = self.client.get_featured_playlists()

        for playlist in playlists:
            self.add_playlist_to_directory(playlist, oc)

        return oc

    @authenticated
    @check_restart
    def top_playlists(self):
        Log("top playlists")

        oc = ObjectContainer(
            title2=L("MENU_TOP_PLAYLISTS"),
            content=ContainerContent.Playlists,
            view_group=ViewMode.Playlists
        )

        playlists = self.client.get_top_playlists()

        for playlist in playlists:
            self.add_playlist_to_directory(playlist, oc)

        return oc

    @authenticated
    @check_restart
    def new_releases(self):
        Log("new releases")

        oc = ObjectContainer(
            title2=L("MENU_NEW_RELEASES"),
            content=ContainerContent.Albums,
            view_group=ViewMode.Albums
        )

        albums = self.client.get_new_releases()

        for album in albums:
            self.add_album_to_directory(album, oc)

        return oc

    @authenticated
    @check_restart
    def genres(self):
        Log("genres")

        oc = ObjectContainer(
            title2=L("MENU_GENRES"),
            content=ContainerContent.Playlists,
            view_group=ViewMode.Playlists
        )

        genres = self.client.get_genres()

        for genre in genres:
            self.add_genre_to_directory(genre, oc)

        return oc

    @authenticated
    @check_restart
    def genre_playlists(self, genre_name):
        Log("genre playlists")

        oc = ObjectContainer(
            title2=genre_name,
            content=ContainerContent.Playlists,
            view_group=ViewMode.Playlists
        )

        playlists = self.client.get_playlists_by_genre(genre_name)

        for playlist in playlists:
            self.add_playlist_to_directory(playlist, oc)

        return oc

    #
    # RADIO
    #

    @authenticated
    @check_restart
    def radio_stations(self):
        Log('radio stations')

        Dict['radio_salt'] = False
        oc = ObjectContainer(title2=L("MENU_RADIO_STATIONS"))
        stations = self.client.get_radio_stations()
        for station in stations:
            oc.add(PopupDirectoryObject(
                        key=route_path('radio/stations/' + station.getURI()),
                        title=station.getTitle(),
                        thumb=function_path('image.png', uri=self.select_image(station.getImages()))
                        ))
        return oc

    @authenticated
    @check_restart
    def radio_genres(self):
        Log('radio genres')

        Dict['radio_salt'] = False
        oc = ObjectContainer(title2=L("MENU_RADIO_GENRES"))
        genres = self.client.get_radio_genres()
        for genre in genres:
            oc.add(PopupDirectoryObject(
                        key=route_path('radio/genres/' + genre.getURI()),
                        title=genre.getTitle(),
                        thumb=function_path('image.png', uri=self.select_image(genre.getImages()))
                        ))
        return oc

    @authenticated
    @check_restart
    def radio_track_num(self, uri):
        Log('radio track num')

        uri = urllib.quote(uri.encode("utf8")).replace("%3A", ":").decode("utf8")

        return ObjectContainer(
            title2=L("MENU_RADIO_TRACK_NUM"),
            objects=[
                DirectoryObject(
                    key=route_path('radio/play/' + uri + '/10'),
                    title=localized_format("MENU_TRACK_NUM", "10"),
                    thumb=R("icon-radio-item.png")
                ),
                DirectoryObject(
                    key=route_path('radio/play/' + uri + '/20'),
                    title=localized_format("MENU_TRACK_NUM", "20"),
                    thumb=R("icon-radio-item.png")
                ),
                DirectoryObject(
                    key=route_path('radio/play/' + uri + '/50'),
                    title=localized_format("MENU_TRACK_NUM", "50"),
                    thumb=R("icon-radio-item.png")
                ),
                DirectoryObject(
                    key=route_path('radio/play/' + uri + '/80'),
                    title=localized_format("MENU_TRACK_NUM", "80"),
                    thumb=R("icon-radio-item.png")
                ),
                DirectoryObject(
                    key=route_path('radio/play/' + uri + '/100'),
                    title=localized_format("MENU_TRACK_NUM", "100"),
                    thumb=R("icon-radio-item.png")
                )
            ],
        )

    @authenticated
    @check_restart
    def radio_tracks(self, uri, num_tracks):
        Log('radio tracks')

        uri = urllib.quote(uri.encode("utf8")).replace("%3A", ":").decode("utf8")

        oc     = None
        radio  = self.client.get_radio(uri)

        if not Dict['radio_salt']:
            Dict['radio_salt'] = radio.generateSalt()

        salt = Dict['radio_salt']
        tracks = radio.getTracks(salt=salt, num_tracks=int(num_tracks))

        oc = ObjectContainer(
            title2     = radio.getTitle().decode("utf-8"),
            content    = ContainerContent.Tracks,
            view_group = ViewMode.Tracks
        )

        for track in tracks:
            self.add_track_to_directory(track, oc)

        return oc

    #
    # YOUR_MUSIC
    #

    @authenticated
    @check_restart
    def playlists(self):
        Log("playlists")

        oc = ObjectContainer(
            title2=L("MENU_PLAYLISTS"),
            content=ContainerContent.Playlists,
            view_group=ViewMode.Playlists
        )

        playlists = self.client.get_playlists()

        for playlist in playlists:
            self.add_playlist_to_directory(playlist, oc)

        return oc

    @authenticated
    @check_restart
    def starred(self):
        Log("starred")

        oc = ObjectContainer(
            title2=L("MENU_STARRED"),
            content=ContainerContent.Tracks,
            view_group=ViewMode.Tracks
        )

        starred = self.client.get_starred()

        for x, track in enumerate(starred.getTracks()):
            self.add_track_to_directory(track, oc, index=x)

        return oc

    @authenticated
    @check_restart
    def albums(self):
        Log("albums")

        oc = ObjectContainer(
            title2=L("MENU_ALBUMS"),
            content=ContainerContent.Albums,
            view_group=ViewMode.Albums
        )

        albums = self.client.get_my_albums()

        for album in albums:
            self.add_album_to_directory(album, oc)

        return oc

    @authenticated
    @check_restart
    def artists(self):
        Log("artists")

        oc = ObjectContainer(
            title2=L("MENU_ARTISTS"),
            content=ContainerContent.Artists,
            view_group=ViewMode.Artists
        )

        artists = self.client.get_my_artists()

        for artist in artists:
            self.add_artist_to_directory(artist, oc)

        return oc

    #
    # ARTIST DETAIL
    #

    @authenticated
    @check_restart
    def artist(self, uri):
        Log("artist")

        uri = urllib.quote(uri.encode("utf8")).replace("%3A", ":").decode("utf8")

        artist = self.client.get(uri)
        return ObjectContainer(
            title2=artist.getName().decode("utf-8"),

            objects=[
                DirectoryObject(
                    key  = route_path('artist/%s/top_tracks' % uri),
                    title=L("MENU_TOP_TRACKS"),
                    thumb=R("icon-artist-toptracks.png")
                ),
                DirectoryObject(
                    key  = route_path('artist/%s/albums' % uri),
                    title =L("MENU_ALBUMS"),
                    thumb =R("icon-albums.png")
                ),
                DirectoryObject(
                    key  = route_path('artist/%s/related' % uri),
                    title =L("MENU_RELATED"),
                    thumb =R("icon-artist-related.png")
                ),
                DirectoryObject(
                    key=route_path('radio/stations/' + uri),
                    title =L("MENU_RADIO"),
                    thumb =R("icon-radio-custom.png")
                )
            ],
        )

    @authenticated
    @check_restart
    def artist_albums(self, uri):
        Log("artist_albums")

        uri = urllib.quote(uri.encode("utf8")).replace("%3A", ":").decode("utf8")

        artist = self.client.get(uri)

        oc = ObjectContainer(
            title2=artist.getName().decode("utf-8"),
            content=ContainerContent.Albums
        )

        for album in artist.getAlbums():
            self.add_album_to_directory(album, oc)

        return oc

    @authenticated
    @check_restart
    def artist_top_tracks(self, uri):
        Log("artist_top_tracks")

        uri = urllib.quote(uri.encode("utf8")).replace("%3A", ":").decode("utf8")

        oc          = None
        artist      = self.client.get(uri)
        top_tracks  = artist.getTracks()

        if top_tracks:
            oc = ObjectContainer(
                title2=artist.getName().decode("utf-8"),
                content=ContainerContent.Tracks,
                view_group=ViewMode.Tracks
            )
            for track in artist.getTracks():
                self.add_track_to_directory(track, oc)
        else:
            oc = MessageContainer(
                header=L("MSG_TITLE_NO_RESULTS"),
                message=localized_format("MSG_FMT_NO_RESULTS", artist.getName().decode("utf-8"))
            )
        return oc

    @authenticated
    @check_restart
    def artist_related(self, uri):
        Log("artist_related")

        uri = urllib.quote(uri.encode("utf8")).replace("%3A", ":").decode("utf8")

        artist = self.client.get(uri)

        oc = ObjectContainer(
            title2=localized_format("MSG_RELATED_TO", artist.getName().decode("utf-8")),
            content=ContainerContent.Artists
        )

        for artist in artist.getRelatedArtists():
            self.add_artist_to_directory(artist, oc)

        return oc

    #
    # ALBUM DETAIL
    #

    @authenticated
    @check_restart
    def album(self, uri):
        Log("album")

        uri = urllib.quote(uri.encode("utf8")).replace("%3A", ":").decode("utf8")

        album = self.client.get(uri)

        oc = ObjectContainer(
            title2=album.getName().decode("utf-8"),
            content=ContainerContent.Artists
        )

        oc.add(DirectoryObject(
                    key  = route_path('album/%s/tracks' % uri),
                    title=L("MENU_ALBUM_TRACKS"),
                    thumb=R("icon-album-tracks.png")))

        artists = album.getArtists()
        for artist in artists:
            self.add_artist_to_directory(artist, oc)

        oc.add(DirectoryObject(
                    key=route_path('radio/stations/' + uri),
                    title =L("MENU_RADIO"),
                    thumb =R("icon-radio-custom.png")))

        return oc

    @authenticated
    @check_restart
    def album_tracks(self, uri):
        Log("album_tracks")

        uri = urllib.quote(uri.encode("utf8")).replace("%3A", ":").decode("utf8")

        album = self.client.get(uri)

        oc = ObjectContainer(
            title2=album.getName().decode("utf-8"),
            content=ContainerContent.Tracks,
            view_group=ViewMode.Tracks
        )

        for track in album.getTracks():
            self.add_track_to_directory(track, oc)

        return oc

    #
    # PLAYLIST DETAIL
    #

    @authenticated
    @check_restart
    def playlist(self, uri):
        Log("playlist")

        uri = urllib.quote(uri.encode("utf8")).replace("%3A", ":").decode("utf8")
        
        pl = self.client.get(uri)
        if pl is None:
            # Unable to find playlist
            return MessageContainer(
                header=L("MSG_TITLE_UNKNOWN_PLAYLIST"),
                message='URI: %s' % uri
            )

        Log("Get playlist: %s", pl.getName().decode("utf-8"))
        Log.Debug('playlist truncated: %s', pl.obj.contents.truncated)

        oc = ObjectContainer(
            title2=pl.getName().decode("utf-8"),
            content=ContainerContent.Tracks,
            view_group=ViewMode.Tracks,
            mixed_parents=True
        )

        for x, track in enumerate(pl.getTracks()):
            self.add_track_to_directory(track, oc, index=x)

        return oc

    #
    # MAIN MENU
    #
    def main_menu(self):
        Log("main_menu")

        return ObjectContainer(
            objects=[
                InputDirectoryObject(
                    key=route_path('search'),
                    prompt=L("PROMPT_SEARCH"),
                    title=L("MENU_SEARCH"),
                    thumb=R("icon-search.png")
                ),
                DirectoryObject(
                    key=route_path('explore'),
                    title=L("MENU_EXPLORE"),
                    thumb=R("icon-explore.png")
                ),
                DirectoryObject(
                    key=route_path('discover'),
                    title=L("MENU_DISCOVER"),
                    thumb=R("icon-discover.png")
                ),
                DirectoryObject(
                    key=route_path('radio'),
                    title=L("MENU_RADIO"),
                    thumb=R("icon-radio.png")
                ),
                DirectoryObject(
                    key=route_path('your_music'),
                    title=L("MENU_YOUR_MUSIC"),
                    thumb=R("icon-yourmusic.png")
                ),
                PrefsObject(
                    title=L("MENU_PREFS"),
                    thumb=R("icon-preferences.png")
                )
            ],
        )

    #
    # Create objects
    #
    def create_track_object_from_track(self, track, index=None):
        if not track:
            return None

        # Get metadata info
        track_uri       = track.getURI()
        title           = track.getName().decode("utf-8")
        image_url       = self.select_image(track.getAlbumCovers())
        track_duration  = int(track.getDuration()) - 500
        track_number    = int(track.getNumber())
        track_album     = track.getAlbum(nameOnly=True).decode("utf-8")
        track_artists   = track.getArtists(nameOnly=True).decode("utf-8")
        metadata = TrackMetadata(title, image_url, track_uri, track_duration, track_number, track_album, track_artists)

        return self.create_track_object_from_metatada(metadata, index=index)

    def create_track_object_from_metatada(self, metadata, index=None):
        if not metadata:
            return None
        return self.create_track_object(metadata.uri, metadata.duration, metadata.title, metadata.album, metadata.artists, metadata.number, metadata.image_url, index)

    def create_track_object_empty(self, uri):
        if not uri:
            return None
        return self.create_track_object(uri, -1, "", "", "", 0, None)

    def create_track_object(self, uri, duration, title, album, artists, track_number, image_url, index=None):
        rating_key = uri
        if index is not None:
            rating_key = '%s::%s' % (uri, index)

        art_num = str(randint(1,40)).rjust(2, "0")

        track_obj = TrackObject(
            items=[
                MediaObject(
                    parts=[PartObject(key=route_path('play/%s' % uri))],
                    duration=duration,
                    container=Container.MP3, audio_codec=AudioCodec.MP3, audio_channels = 2
                )
            ],

            key = route_path('metadata', uri),
            rating_key = rating_key,

            title  = title,
            album  = album,
            artist = artists,

            index    = index if index != None else track_number,
            duration = duration,

            source_title='Spotify',
            art   = R('art-' + art_num + '.png'),
            thumb = function_path('image.png', uri=image_url)
        )

        Log.Debug('New track object for metadata: --|%s|%s|%s|%s|%s|%s|--' % (image_url, uri, str(duration), str(track_number), album, artists))

        return track_obj

    def create_album_object(self, album, custom_summary=None, custom_image_url=None):
        """ Factory method for album objects """
        title = album.getName().decode("utf-8")
        if Prefs["displayAlbumYear"] and album.getYear() != 0:
            title = "%s (%s)" % (title, album.getYear())
        artist_name = album.getArtists(nameOnly=True).decode("utf-8")
        summary     = '' if custom_summary == None else custom_summary.decode('utf-8')
        image_url   = self.select_image(album.getCovers()) if custom_image_url == None else custom_image_url

        return DirectoryObject(
            key=route_path('album', album.getURI()),

            title=title + " - " + artist_name,
            tagline=artist_name,
            summary=summary,

            art=function_path('image.png', uri=image_url),
            thumb=function_path('image.png', uri=image_url),
        )

        #return AlbumObject(
        #    key=route_path('album', album.getURI().decode("utf-8")),
        #    rating_key=album.getURI().decode("utf-8"),
        #
        #    title=title,
        #    artist=artist_name,
        #    summary=summary,
        #
        #    track_count=album.getNumTracks(),
        #    source_title='Spotify',
        #
        #    art=function_path('image.png', uri=image_url),
        #    thumb=function_path('image.png', uri=image_url),
        #)

    def create_playlist_object(self, playlist):
        uri         = playlist.getURI()
        image_url   = self.select_image(playlist.getImages())
        artist      = playlist.getUsername().decode('utf8')
        title       = playlist.getName().decode("utf-8")
        summary     = ''
        if playlist.getDescription() != None and len(playlist.getDescription()) > 0:
            summary = playlist.getDescription().decode("utf-8")

        return DirectoryObject(
            key=route_path('playlist', uri),

            title=title + " - " + artist,
            tagline=artist,
            summary=summary,

            art=function_path('image.png', uri=image_url) if image_url != None else R("placeholder-playlist.png"),
            thumb=function_path('image.png', uri=image_url) if image_url != None else R("placeholder-playlist.png")
        )

        #return AlbumObject(
        #    key=route_path('playlist', uri),
        #    rating_key=uri,
        #
        #    title=title,
        #    artist=artist,
        #    summary=summary,
        #
        #    source_title='Spotify',
        #
        #    art=function_path('image.png', uri=image_url) if image_url != None else R("placeholder-playlist.png"),
        #    thumb=function_path('image.png', uri=image_url) if image_url != None else R("placeholder-playlist.png")
        #)

    def create_genre_object(self, genre):
        uri         = genre.getTemplateName()
        title       = genre.getName().decode("utf-8")
        image_url   = genre.getIconUrl()

        return DirectoryObject(
            key=route_path('genre', uri),

            title=title,

            art=function_path('image.png', uri=image_url) if image_url != None else R("placeholder-playlist.png"),
            thumb=function_path('image.png', uri=image_url) if image_url != None else R("placeholder-playlist.png")
        )

    def create_artist_object(self, artist, custom_summary=None, custom_image_url=None):
        image_url   = self.select_image(artist.getPortraits()) if custom_image_url == None else custom_image_url
        artist_name = artist.getName().decode("utf-8")
        summary     = '' if custom_summary == None else custom_summary.decode('utf-8')

        return DirectoryObject(
                    key=route_path('artist', artist.getURI()),

                    title=artist_name,
                    summary=summary,

                    art=function_path('image.png', uri=image_url),
                    thumb=function_path('image.png', uri=image_url)
                )

        #return ArtistObject(
        #        key=route_path('artist', artist.getURI().decode("utf-8")),
        #        rating_key=artist.getURI().decode("utf-8"),
        #
        #        title=artist_name,
        #        summary=summary,
        #        source_title='Spotify',
        #
        #        art=function_path('image.png', uri=image_url),
        #        thumb=function_path('image.png', uri=image_url)
        #    )

    #
    # Insert objects into container
    #

    def add_section_header(self, title, oc):
        oc.add(
            DirectoryObject(
                key='',
                title=title
            )
        )

    def add_track_to_directory(self, track, oc, index = None):
        if not self.client.is_track_playable(track):
            Log("Ignoring unplayable track: %s" % track.getName())
            return

        track_uri = track.getURI().decode("utf-8")
        if not self.client.is_track_uri_valid(track_uri):
            Log("Ignoring unplayable track: %s, invalid uri: %s" % (track.getName(), track_uri))
            return

        oc.add(self.create_track_object_from_track(track, index=index))

    def add_album_to_directory(self, album, oc, custom_summary=None, custom_image_url=None):
        if not self.client.is_album_playable(album):
            Log("Ignoring unplayable album: %s" % album.getName())
            return
        oc.add(self.create_album_object(album, custom_summary=custom_summary, custom_image_url=custom_image_url))

    def add_artist_to_directory(self, artist, oc, custom_summary=None, custom_image_url=None):
        oc.add(self.create_artist_object(artist, custom_summary=custom_summary, custom_image_url=custom_image_url))

    def add_playlist_to_directory(self, playlist, oc):
        oc.add(self.create_playlist_object(playlist))

    def add_genre_to_directory(self, genre, oc):
        oc.add(self.create_genre_object(genre))

    def add_story_to_directory(self, story, oc):
        content_type = story.getContentType()
        image_url    = self.select_image(story.getImages())
        item         = story.getObject()
        if content_type == 'artist':
            self.add_artist_to_directory(item, oc, custom_summary=story.getDescription(), custom_image_url=image_url)
        elif content_type == 'album':
            self.add_album_to_directory(item,  oc, custom_summary=story.getDescription(), custom_image_url=image_url)
        elif content_type == 'track':
            self.add_album_to_directory(item.getAlbum(), oc, custom_summary=story.getDescription() + " - " + item.getName(), custom_image_url=image_url)