Пример #1
0
 def test_normalize_case(self):
     for uri, expected0, expected1 in case_normalization_tests:
         testname = uri
         uri = iri.split_uri_ref(uri)
         self.assertEqual(expected0, iri.unsplit_uri_ref(iri.normalize_case(uri)), testname)
         self.assertEqual(
             expected1, iri.unsplit_uri_ref(iri.normalize_case(uri, doHost=1)), testname + " (host too)"
         )
Пример #2
0
 def test_normalize_case(self):
     for uri, expected0, expected1 in case_normalization_tests:
         testname = uri
         uri = iri.split_uri_ref(uri)
         self.assertEqual(expected0,
                          iri.unsplit_uri_ref(iri.normalize_case(uri)),
                          testname)
         self.assertEqual(
             expected1,
             iri.unsplit_uri_ref(iri.normalize_case(uri, doHost=1)),
             testname + ' (host too)')
Пример #3
0
# Look at each Wiki URL and build an appropriate opener object for retrieving
# pages.   If the URL includes HTTP authentication information such as
# http://user:[email protected]/mywiki, the opener is built with
# basic authentication enabled.   For details, see:
# 
#     : HTTP basic auth: http://www.voidspace.org.uk/python/articles/urllib2.shtml#id6
for k, v in TARGET_WIKIS.items():
    #The target wiki base URI must end in '/'
    v = v.rstrip('/') + '/'
    (scheme, authority, path, query, fragment) = split_uri_ref(v)
    auth, host, port = split_authority(authority)
    authority = host + ':' + port if port else host
    schemeless_url = authority + path
    if auth:
        TARGET_WIKIS[k] = unsplit_uri_ref((scheme, authority, path, query, fragment))
        auth = auth.split(':')
        password_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
        # Not setting the realm for now, so use None
        password_mgr.add_password(None, scheme+"://"+host+path, auth[0], auth[1])
        password_handler = urllib2.HTTPBasicAuthHandler(password_mgr)
        TARGET_WIKI_OPENERS[k] = urllib2.build_opener(
            password_handler,
            urllib2.HTTPCookieProcessor(),
            multipart_post_handler.MultipartPostHandler)
    else:
        TARGET_WIKI_OPENERS[k] = DEFAULT_OPENER

SERVICE_ID = 'http://purl.org/xml3k/akara/services/demo/moinrest'
DEFAULT_MOUNT = 'moin'
Пример #4
0
# Look at each Wiki URL and build an appropriate opener object for retrieving
# pages.   If the URL includes HTTP authentication information such as
# http://user:[email protected]/mywiki, the opener is built with
# basic authentication enabled.   For details, see:
#
#     : HTTP basic auth: http://www.voidspace.org.uk/python/articles/urllib2.shtml#id6
for k, v in TARGET_WIKIS.items():
    #The target wiki base URI must end in '/'
    v = v.rstrip('/') + '/'
    (scheme, authority, path, query, fragment) = split_uri_ref(v)
    auth, host, port = split_authority(authority)
    authority = host + ':' + port if port else host
    schemeless_url = authority + path
    if auth:
        TARGET_WIKIS[k] = unsplit_uri_ref(
            (scheme, authority, path, query, fragment))
        auth = auth.split(':')
        password_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
        # Not setting the realm for now, so use None
        password_mgr.add_password(None, scheme + "://" + host + path, auth[0],
                                  auth[1])
        password_handler = urllib2.HTTPBasicAuthHandler(password_mgr)
        TARGET_WIKI_OPENERS[k] = urllib2.build_opener(
            password_handler, urllib2.HTTPCookieProcessor(),
            multipart_post_handler.MultipartPostHandler)
    else:
        TARGET_WIKI_OPENERS[k] = DEFAULT_OPENER

SERVICE_ID = 'http://purl.org/xml3k/akara/services/demo/moinrest'
DEFAULT_MOUNT = 'moin'