コード例 #1
0
    def createOpener(self):
        handlers = []  # proxy support
        if self.proxy:  # proxy support
            handlers.append(urllib2.ProxyHandler({"https": self.proxy
                                                  }))  # proxy support

        cj = MyCookieJar()
        cj.set_policy(cookielib.DefaultCookiePolicy(rfc2965=True))
        cjhdr = urllib2.HTTPCookieProcessor(cj)
        handlers.append(cjhdr)  # proxy support
        return urllib2.build_opener(*handlers)  # proxy support
コード例 #2
0
 def login(self, password):
     """Login to the Mailman web interface"""
     policy = cookielib.DefaultCookiePolicy(rfc2965=True)
     cookiejar = cookielib.CookieJar(policy)
     self.data['opener'] = urllib2.build_opener(
         urllib2.HTTPCookieProcessor(cookiejar)).open
     url = '%s/%s' % (self.data['baseurl'], self.data['listname'])
     form = {'adminpw': password}
     try:
         page = self.data['opener'](url, urllib.urlencode(form))
     except (urllib2.URLError, httplib.InvalidURL), e:
         return False
コード例 #3
0
ファイル: indafoto.py プロジェクト: huwiki/old-pywiki-stuff
 def __init__(self, user=None):
     self.user = user
     policy = cookielib.DefaultCookiePolicy(allowed_domains=[
         'indapass.hu', 'indafoto.hu', '.indapass.hu', '.indafoto.hu'
     ])
     self.cookiejar = cookielib.LWPCookieJar(policy=policy)
     self.opener = urllib2.build_opener(
         urllib2.HTTPCookieProcessor(self.cookiejar))
     if self.user:
         cookie_file = self.getLoginDataFile()
         if os.path.exists(cookie_file):
             self.cookiejar.load(cookie_file,
                                 ignore_discard=True,
                                 ignore_expires=True)
コード例 #4
0
 def __init__(self, URL, parent_window, progress, postdata = None, referer = None):
     global _cookiejar
     self.URL = URL
     self.data = None
     self.parent_window = parent_window
     self.progress = progress
     self.postdata = postdata
     self.referer = referer
     self._stopevent = threading.Event()
     self._sleepperiod = 1.0
     if not _cookiejar:
         _cookiejar = cookielib.LWPCookieJar(policy = cookielib.DefaultCookiePolicy())
     self.cj = _cookiejar
     threading.Thread.__init__(self, name = "Retriever")
コード例 #5
0
def buildOpener():
    global server
    headers = {  "Connection" : "Keep-alive",
                 #'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_8) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/29.0.1547.65 Safari/537.36',
                 'User-Agent' : 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_8) AppleWebKit/534.57.2 (KHTML, like Gecko) Version/4.0.3 Safari/531.9',
                'Cache-Control' : 'max-age=0',
                'Accept' : 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
                'Origin' : server,
                'Content-Type' : 'application/x-www-form-urlencoded',
                'Referer' : server + 'espaceclic/connection.do',
                'Accept-Encoding' : 'gzip,deflate,sdch',
                'Accept-Language' : 'fr-FR,fr;q=0.8,en-US;q=0.6,en;q=0.4'
                }

    policy = cookielib.DefaultCookiePolicy( rfc2965=True )
    cj = cookielib.CookieJar( policy )
    keepalive_handler = keepalive.HTTPSHandler( )
    try:
        opener = urllib2.build_opener( keepalive_handler, GAECookieProcessor( cj )  )
    except urllib2.URLError as e:
        print "URL error"
        if hasattr(e, 'reason'):
            print 'Serveur inaccessible.'
            print 'Raison : ', e.reason
        if hasattr(e, 'code'):
            print 'Le serveur n\'a pas pu répondre à la requete.'
            print 'Code d\'erreur : ', e.code
            if e.code == 403:
                print 'Le serveur vous a refusé l\'accès'
            if e.code == 404:
                print 'La page demandée n\'existe pas. Peut-être la FFT a-t-elle changé ses adresses ?'
        exit( -1 )
    except urllib2.HTTPError as e:
        print "HTTP error code ", e.code, " : ", e.reason
        print "Vérifiez votre connexion, ou l\'état du serveur de la FFT"
        exit( -1 )
    except:
        import sys
        print "Build opener : Autre exception : ", sys.exc_type, sys.exc_value
        exit( -1 )

    t_headers = []
    for k, v in headers.items():
        t_headers.append( ( k, v ) )
    opener.addheaders = t_headers 

    return cj, opener
コード例 #6
0
    def __init__(self, params):
        '''
        Constructor
        '''
        self._cookiePolicy = cookielib.DefaultCookiePolicy()
        self._cookieJar = cookielib.CookieJar(self._cookiePolicy)
        self._cookieHandler = urllib2.HTTPCookieProcessor(self._cookieJar)
        self._proxyHandler = urllib2.ProxyHandler(config.PROXY)
        self._opener = urllib2.build_opener(self._proxyHandler,
                                            self._cookieHandler)

        self._loggedIn = False

        if (params.has_key('username') and params.has_key('password')):
            self._loggedIn = self._login(params['username'],
                                         params['password'])
            if (self._loggedIn == False):
                raise InvalidCredentialsException()
コード例 #7
0
    def __init__( self, config = None ):
        """
        Initializes an http object.

        Example:
            client = http( { 'proxy' : { 'host':'proxy','port':3128 } } )

        @param config Optionally specify configuration values that need to
                      remain consistent from one request to the next.
                      The configuration should be a dictionary.
                      Supported configuration values are as follows:
                        proxy   Container dictionary for the following items:
                          type  Type of proxy (currently, only "http")
                          host  Proxy host
                          port  Proxy service port
                          user  Optional user name to send to proxy
                          pass  Optional password to send to proxy
        """

        # store the config in object state
        self.config = config if config is not None else {}

        # set up a list of HTTP handlers
        self.handlers = []

        # check for proxy configuration, and initialize as needed
        self._check_proxy()

        # initialize a cookie jar for browser-like cookie handling
        policy = cookielib.DefaultCookiePolicy(
            rfc2965          = True,
            strict_ns_domain = cookielib.DefaultCookiePolicy.DomainStrict
        )
        cjar = cookielib.CookieJar( policy )
        cproc = urllib2.HTTPCookieProcessor( cjar )
        self.handlers.append( cproc )

        # build our custom opener and install it for future requests
        opener = urllib2.build_opener( *self.handlers )
        urllib2.install_opener( opener )
コード例 #8
0
ファイル: Grandstream.py プロジェクト: blink-hr/vPBX
    def _enableStaticProvisioning_BT200(self, vars):
        try:
            # Login into interface
            cookiejar = cookielib.CookieJar(
                cookielib.DefaultCookiePolicy(rfc2965=True))
            opener = urllib2.build_opener(
                urllib2.HTTPCookieProcessor(cookiejar))
            response = opener.open(
                'http://' + self._ip + '/dologin.htm',
                urllib.urlencode({
                    'Login': '******',
                    'P2': self._http_password,
                    'gnkey': '0b82'
                }))
            body = response.read()
            if 'dologin.htm' in body:
                logging.error('Endpoint %s@%s BT200 - dologin failed login' %
                              (self._vendorname, self._ip))
                return False

            # Force cookie version to 0
            for cookie in cookiejar:
                cookie.version = 0

            response = opener.open('http://' + self._ip + '/update.htm',
                                   urllib.urlencode(vars) + '&gnkey=0b82')
            body = response.read()
            if 'dologin.htm' in body:
                logging.error(
                    'Endpoint %s@%s BT200 - dologin failed to keep session' %
                    (self._vendorname, self._ip))
                return False

            return True
        except urllib2.HTTPError, e:
            logging.error(
                'Endpoint %s@%s BT200 failed to send vars to interface - %s' %
                (self._vendorname, self._ip, str(e)))
            return False
コード例 #9
0
    def _login(self, userId, pwd):
        """
      @summary: Attempts to log a user in
      @todo: Handle login failures
      """
        # Legacy code support.  This will go away
        self.userId = userId

        policyServer = urlparse(self.server).netloc
        policy = cookielib.DefaultCookiePolicy(
            allowed_domains=(policyServer, ))
        self.cookieJar = cookielib.LWPCookieJar(policy=policy)
        opener = urllib2.build_opener(
            urllib2.HTTPCookieProcessor(self.cookieJar))
        urllib2.install_opener(opener)

        if userId is not None and pwd is not None:
            url = "%s/login" % self.server

            urlParams = [("username", userId), ("pword", pwd)]

            self.makeRequest(url, parameters=urlParams)
コード例 #10
0
    def load_whitelist(self):
        '''Load the cookie jar whitelist policy.'''

        cookie_whitelist = config['cookie_whitelist']

        if cookie_whitelist:
            mkbasedir(cookie_whitelist)

        # Create cookie whitelist file if it does not exist.
        if not os.path.exists(cookie_whitelist):
            open(cookie_whitelist, 'w').close()

        # Read cookie whitelist file into list.
        file = open(cookie_whitelist, 'r')
        domain_list = [line.rstrip('\n') for line in file]
        file.close()

        # Define policy of allowed domains
        policy = cookielib.DefaultCookiePolicy(allowed_domains=domain_list)
        self.jar.set_policy(policy)

        # Save the last modified time of the whitelist.
        self._whitelistmtime = os.stat(cookie_whitelist).st_mtime
コード例 #11
0
    def __login(self):
        Addon.log('logging in')
        policy = cookielib.DefaultCookiePolicy(
            rfc2965=True, strict_rfc2965_unverifiable=False)
        self.cj = cookielib.MozillaCookieJar(self.cookie_file)
        self.cj.set_policy(policy)

        if os.access(self.cookie_file, os.F_OK):
            self.cj.load(ignore_discard=True)

        opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self.cj))
        urllib2.install_opener(opener)
        self.cj.clear_session_cookies()

        url = self.__build_url('cgi-bin/oc/manage.cgi')
        form_data = urllib.urlencode({
            'a': 'do_login',
            'force_direct': '0',
            'manage_proper': '1',
            'input_username': self.user,
            'input_password': self.password
        })
        response = self.__fetch(self.__LOGIN_URL, form_data)
        self.cj.save(ignore_discard=True)
コード例 #12
0
def downloadFile(options):
    if options.verbose == True:
        print '-- begin script --'

    if (options.outputDirectory != ''
            and not os.path.exists(options.outputDirectory)):
        os.makedirs(options.outputDirectory)

    urlITCBase = 'https://itunesconnect.apple.com%s'

    cj = MyCookieJar()
    cj.set_policy(cookielib.DefaultCookiePolicy(rfc2965=True))
    opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))

    if options.verbose == True:
        print 'Signing into iTunes Connect web site.'

    # Go to the iTunes Connect website and retrieve the
    # form action for logging into the site.
    urlWebsite = urlITCBase % '/WebObjects/iTunesConnect.woa'
    html = readHtml(opener, urlWebsite, options=options)
    match = re.search('" action="(.*)"', html)
    urlActionLogin = urlITCBase % match.group(1)

    # Login to iTunes Connect web site and go to the sales
    # report page, get the form action url and form fields.
    # Note the sales report page will actually load a blank
    # page that redirects to the static URL. Best guess here
    # is that the server is setting some session variables
    # or something.
    webFormLoginData = urllib.urlencode({
        'theAccountName': options.appleId,
        'theAccountPW': options.password,
        '1.Continue': '0'
    })
    html = readHtml(opener, urlActionLogin, webFormLoginData, options=options)
    if (html.find('Your Apple ID or password was entered incorrectly.') != -1):
        raise ITCException, 'User or password incorrect.'

    # Find the Sales and Trends URL.
    if options.verbose == True:
        print 'Accessing Sales and Trends reporting web site.'

    # Sometimes the vendor default page does not load right away.
    # This causes the script to fail, so as a work around, the
    # script will attempt to load the page 3 times before abend.
    vendorDefaultPageAttempts = 3
    while vendorDefaultPageAttempts > 0:
        vendorDefaultPageAttempts = vendorDefaultPageAttempts - 1
        urlSalesAndTrends = 'https://reportingitc.apple.com/'
        html = readHtml(opener, urlSalesAndTrends, options=options)

        # We're at the vendor default page. Might need additional work if your account
        # has more than one vendor.
        try:
            match = re.findall('"javax.faces.ViewState" value="(.*?)"', html)
            viewState = match[0]
            match = re.findall('script id="defaultVendorPage:(.*?)"', html)
            defaultVendorPage = match[0]
            ajaxName = re.sub('_2', '_0', defaultVendorPage)
            if options.debug == True:
                print 'viewState: ', viewState
                print 'defaultVendorPage: ', defaultVendorPage
                print 'ajaxName: ', ajaxName
            vendorDefaultPageAttempts = 0  # exit loop
        except:
            if vendorDefaultPageAttempts < 1:
                errMessage = 'Unable to load default vendor page.'
                if options.verbose == True:
                    print errMessage
                    raise
                else:
                    raise ITCException, errMessage

    # This may seem confusing because we just accessed the vendor default page in the
    # code above. However, the vendor default page as a piece of javascript that runs
    # once the page is loaded in the browser. The javascript does a resubmit. My guess
    # is this action is needed to set the default vendor on the server-side. Regardless
    # we must call the page again but no parsing of the HTML is needed this time around.
    urlDefaultVendorPage = 'https://reportingitc.apple.com/vendor_default.faces'
    webFormSalesReportData = urllib.urlencode({
        'AJAXREQUEST':
        ajaxName,
        'javax.faces.ViewState':
        viewState,
        'defaultVendorPage':
        defaultVendorPage,
        'defaultVendorPage:' + defaultVendorPage:
        'defaultVendorPage:' + defaultVendorPage
    })
    html = readHtml(opener,
                    urlDefaultVendorPage,
                    webFormSalesReportData,
                    options=options)

    # Check for notification messages.
    urlDashboard = 'https://reportingitc.apple.com/subdashboard.faces'
    html = readHtml(opener, urlDashboard, options=options)
    try:
        # Note the (?s) is an inline re.DOTALL, makes . match new lines.
        match = re.findall('(?s)<div class="notification">(.*?)</span>', html)
        notificationDiv = match[0]
        match = re.findall('(?s)<td>(.*?)</td>', notificationDiv)
        notificationMessage = match[0]
        if options.verbose == True:
            print notificationMessage
    except:
        pass  # Do nothing. We're just checking for notifications.

    # Access the sales report page.
    if options.verbose == True:
        print 'Accessing sales report web page.'
    urlSalesReport = 'https://reportingitc.apple.com/sales.faces'
    html = readHtml(opener, urlSalesReport, options=options)

    # Get the form field names needed to download the report.
    try:
        match = re.findall('"javax.faces.ViewState" value="(.*?)"', html)
        viewState = match[0]
        match = re.findall('theForm:j_id_jsp_[0-9]*_6', html)
        dailyName = match[0]
        ajaxName = re.sub('._6', '_2', dailyName)
        dateName = re.sub('._6', '_8', dailyName)
        selectName = re.sub('._6', '_32', dailyName)
        if options.debug == True:
            print 'viewState: ', viewState
            print 'dailyName: ', dailyName
            print 'ajaxName: ', ajaxName
            print 'dateName: ', dateName
            print 'selectName:', selectName
    except:
        errMessage = 'Unable to load the sales report web page at this time. A number of reasons can cause this including delayed reporting, unsigned contracts, and change to the web site breaking this script. Try again later or sign into iTunes Connect and verify access.'
        if options.verbose == True:
            print errMessage
            raise
        else:
            raise ITCException, errMessage

    # Get the list of available dates.
    try:
        # Note the (?s) is an inline re.DOTALL, makes . match new lines.
        match = re.findall('(?s)<div class="pickList">(.*?)</div>', html)
        dateListAvailableDays = re.findall('<option value="(.*?)"', match[0])
        dateListAvailableWeeks = re.findall('<option value="(.*?)"', match[1])
        if options.debug == True:
            print 'dateListAvailableDays: ', dateListAvailableDays
            print 'dateListAvailableWeeks: ', dateListAvailableWeeks
    except:
        errMessage = 'Unable to retrieve the list of available dates.'
        if options.verbose == True:
            print errMessage
            raise
        else:
            raise ITCException, errMessage

    # Click through from the dashboard to the sales page.
    webFormSalesReportData = urllib.urlencode({
        'AJAXREQUEST':
        ajaxName,
        'theForm':
        'theForm',
        'theForm:xyz':
        'notnormal',
        'theForm:vendorType':
        'Y',
        'theForm:datePickerSourceSelectElementSales':
        dateListAvailableDays[0],
        'theForm:weekPickerSourceSelectElement':
        dateListAvailableWeeks[0],
        'javax.faces.ViewState':
        viewState,
        dailyName:
        dailyName
    })
    html = readHtml(opener,
                    urlSalesReport,
                    webFormSalesReportData,
                    options=options)
    match = re.findall('"javax.faces.ViewState" value="(.*?)"', html)
    viewState = match[0]

    # Set the list of report dates.
    # A better approach is to grab the list of available dates
    # from the web site instead of generating the dates. Will
    # consider doing this in the future.
    reportDates = []
    if options.dateToDownload == None:
        for i in range(int(options.daysToDownload)):
            today = datetime.date.today() - datetime.timedelta(i + 1)
            reportDates.append(today)
    else:
        reportDates = [
            datetime.datetime.strptime(options.dateToDownload,
                                       '%m/%d/%Y').date()
        ]

    if options.debug == True:
        print 'reportDates: ', reportDates

    ####
    if options.verbose == True:
        print 'Downloading daily sales reports.'
    unavailableCount = 0
    filenames = []
    for downloadReportDate in reportDates:
        # Set the date within the web page.
        dateString = downloadReportDate.strftime('%m/%d/%Y')

        if dateString in dateListAvailableDays:
            if options.verbose == True:
                print 'Downloading report for: ', dateString
            webFormSalesReportData = urllib.urlencode({
                'AJAXREQUEST':
                ajaxName,
                'theForm':
                'theForm',
                'theForm:xyz':
                'notnormal',
                'theForm:vendorType':
                'Y',
                'theForm:datePickerSourceSelectElementSales':
                dateString,
                'theForm:datePickerSourceSelectElementSales':
                dateString,
                'theForm:weekPickerSourceSelectElement':
                dateListAvailableWeeks[0],
                'javax.faces.ViewState':
                viewState,
                selectName:
                selectName
            })
            html = readHtml(opener, urlSalesReport, webFormSalesReportData)
            match = re.findall('"javax.faces.ViewState" value="(.*?)"', html)
            viewState = match[0]

            # And finally...we're ready to download yesterday's sales report.
            webFormSalesReportData = urllib.urlencode({
                'theForm':
                'theForm',
                'theForm:xyz':
                'notnormal',
                'theForm:vendorType':
                'Y',
                'theForm:datePickerSourceSelectElementSales':
                dateString,
                'theForm:weekPickerSourceSelectElement':
                dateListAvailableWeeks[0],
                'javax.faces.ViewState':
                viewState,
                'theForm:downloadLabel2':
                'theForm:downloadLabel2'
            })
            request = urllib2.Request(urlSalesReport, webFormSalesReportData)
            urlHandle = opener.open(request)
            try:
                if options.debug == True:
                    print urlHandle.info()

                # Check for the content-disposition. If present then we know we have a
                # file to download. If not present then an AttributeError exception is
                # thrown and we assume the file is not available for download.
                filename = urlHandle.info().getheader(
                    'content-disposition').split('=')[1]
                # Check for an override of the file name. If found then change the file
                # name to match the outputFormat.
                if (options.outputFormat):
                    filename = downloadReportDate.strftime(
                        options.outputFormat)

                filebuffer = urlHandle.read()
                urlHandle.close()

                if options.unzipFile == True:
                    if options.verbose == True:
                        print 'Unzipping archive file: ', filename
                    #Use GzipFile to de-gzip the data
                    ioBuffer = StringIO.StringIO(filebuffer)
                    gzipIO = gzip.GzipFile('rb', fileobj=ioBuffer)
                    filebuffer = gzipIO.read()

                filename = os.path.join(options.outputDirectory, filename)
                if options.unzipFile == True and filename[
                        -3:] == '.gz':  #Chop off .gz extension if not needed
                    filename = os.path.splitext(filename)[0]

                if options.verbose == True:
                    print 'Saving download file:', filename

                downloadFile = open(filename, 'w')
                downloadFile.write(filebuffer)
                downloadFile.close()

                filenames.append(filename)
            except AttributeError:
                print '%s report is not available - try again later.' % dateString
                unavailableCount += 1
        else:
            print '%s report is not available - try again later.' % dateString
            unavailableCount += 1
    # End for downloadReportDate in reportDates:
    ####

    if unavailableCount > 0:
        raise ITCException, '%i report(s) not available - try again later' % unavailableCount

    if options.debug == True:
        os.remove(os.path.join(options.outputDirectory, "temp.html"))
    if options.verbose == True:
        print '-- end of script --'

    return filenames
コード例 #13
0
 def __init__(self):
     self._cookie_policy = cookielib.DefaultCookiePolicy()
     self.cj = cookielib.CookieJar(self._cookie_policy)
     self.session = requests.session()
     self.driver = None
コード例 #14
0
 def __init__(self, server):
     policy = cookielib.DefaultCookiePolicy(rfc2965=True)
     cookiejar = cookielib.CookieJar(policy)
     urllib2.HTTPCookieProcessor.__init__(self, cookiejar)
     self.server = server
コード例 #15
0
   through Python 2.6.x. It is not tested with Python 3.x.x.
"""

import sys
import re
import string
import urllib
import getopt
import httplib
import urllib2
from time import sleep
from HTMLParser import HTMLParser
# if we have Python 2.4's cookielib, use it
try:
    import cookielib
    policy = cookielib.DefaultCookiePolicy(rfc2965=True)
    cookiejar = cookielib.CookieJar(policy)
    opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookiejar)).open
except ImportError:
    import ClientCookie
    # if this is a new ClientCookie, we need to turn on RFC2965 cookies
    cookiejar = ClientCookie.CookieJar()
    try:
        cookiejar.set_policy(ClientCookie.DefaultCookiePolicy(rfc2965=True))
        # install an opener that uses this policy
        opener = ClientCookie.build_opener(
            ClientCookie.HTTPCookieProcessor(cookiejar))
        ClientCookie.install_opener(opener)
    except AttributeError:
        # must be an old ClientCookie, which already accepts RFC2965 cookies
        pass
コード例 #16
0
ファイル: api.py プロジェクト: OpenELEQ/plugin.program.ump
	def __init__(self,pt=False):
		if not os.path.exists(defs.addon_ddir):
			os.makedirs(defs.addon_ddir)
		self.pub=[]
		self.index_items=[]
		self.backwards=teamkodi.backwards()
		self.settings={}
		self.buffermode=buffering.get()
		self.log=""
		self.handle = int(sys.argv[1])
		self.ws_limit=False #web search limit
		self.defs=defs
		if self.backwards.abortRequested():sys.exit()
		self.window = ui.listwindow('select.xml', defs.addon_dir,'Default', '720p',ump=self)
		self.iwindow = ui.imagewindow('picture.xml', defs.addon_dir,"Default","720p")
		self.urlval_en=True
		self.urlval_tout=30
		self.urlval_d_size={self.defs.CT_VIDEO:1000000,self.defs.CT_AUDIO:10000,self.defs.CT_IMAGE:200}
		self.urlval_d_tout=1.5
		try:self.tm_conc=int(float(addon.getSetting("conc")))
		except:self.tm_conc=10
		self.player=None
		self.cfagents=prefs.get("cfagents")
		self.cflocks={}
		self.mirrors=[]
		self.terminate=False
		self.loaded_uprv={}
		self.checked_uids={"video":{},"audio":{},"image":{}}
		self.pt=pt
		socket.socket = proxy.getsocket()
		policy=cookielib.DefaultCookiePolicy(rfc2965=True, rfc2109_as_netscape=True, strict_rfc2965_unverifiable=False)
		self.cj=cookielib.LWPCookieJar(os.path.join(defs.addon_ddir, "cookie"))
		self.cj.set_policy(policy)
		self.dialog=xbmcgui.Dialog()
		if os.path.exists(defs.addon_cookfile):
			try:
				self.cj.load()
			except cookielib.LoadError:
				pass
			except IOError:
				pass
		if addon.getSetting("verifyssl").lower()=="false":
			self.opener = urllib2.build_opener(http.HTTPErrorProcessor,urllib2.HTTPCookieProcessor(self.cj),http.HTTPSHandler)
		else:
			self.opener = urllib2.build_opener(http.HTTPErrorProcessor,urllib2.HTTPCookieProcessor(self.cj))	
		if addon.getSetting("overrideua")=="true":
			self.ua=addon.getSetting("useragent")
		else:
			from ump import useragents
			self.ua=choice(useragents.all)
		self.opener.addheaders = [('User-agent', self.ua)]
		self.tunnel=webtunnel.tunnel(self.opener)
		query=sys.argv[2][1:]
		result=parse_qs(query)
		[self.module]= result.get('module', ["ump"])
		[self.page]= result.get('page', ["root"])
		[args]= result.get('args', ["e30="])
		try:
			self.args=json.loads(args.decode("base64"))
		except:
			try:
				self.args=json.loads(args) # old url formatting
			except:
				self._wronguri()
		for keep in ["info","art","pub"]:
			if keep in ["pub"]:default="W10="
			else: default= "e30="
			[lst]=result.get(keep, [default])
			try:
				setattr(self,keep,json.loads(lst.decode("base64")))
			except:
				try:
					setattr(self,keep,json.loads(lst))
				except:
					self._wronguri()
		[self.content_type]= result.get('content_type', ["ump"])
		self.loadable_uprv=providers.find(self.content_type,"url")
		self.stats=stats.stats()
		self.throttle=throttle.throttle(self.defs.addon_tdir)

		if prefs.get("play","flag"):
			self.refreshing=True
			prefs.set("play","flag",False)
		else:
			self.refreshing=False
		self.dialogpg=teamkodi.backwards.DialogProgressBG()
		self.dialogpg.create("UMP")
		self.tm=task.manager(self.dialogpg,self.tm_conc)
		self.stat=clicky.clicky(self)
		if not self.page=="urlselect":
			self.stat.query()
		self.identifier=identifier.identifier()
		self.container_mediatype=defs.MT_NONE
		self.dialogpg.update(100,"UMP %s:%s:%s"%(self.content_type,self.module,self.page))
コード例 #17
0
            stalist = self.ctlr_stat_sta()
        try:
            for sta in self.decode_json(stalist):
                if sta.has_key('mac'):
                    sta_mac_list.append(sta['mac'])
        except ValueError:
            pass
        return sta_mac_list

    def ctlr_get_sta_stat_fields_by_name(self, name, tag, stalist=""):
        if stalist == "":
            stalist = self.ctlr_stat_sta()
        #print str(stalist)
        try:
            for sta in self.decode_json(stalist):
                if sta.has_key('hostname') and name == sta['hostname']:
                    rtag = []
                    for t in tag:
                        rtag.append(sta[t])
                    return rtag
        except ValueError:
            pass


print "installing handler for correct redirect"
urlopener = urllib2.build_opener(
    urllib2.HTTPCookieProcessor(
        cookielib.CookieJar(cookielib.DefaultCookiePolicy())),
    urllib2.HTTPSHandler(context=ssl._create_unverified_context()))
urllib2.install_opener(urlopener)
コード例 #18
0
 def __init__(self, policy=None):
     if policy is None:
         policy = cookielib.DefaultCookiePolicy()
     self._policy = policy
     self._cookies = {}
     self._cookies_lock = dummy_threading.RLock()