def post(title, text, tags, YVI_LOGIN, YVI_PASSWORD, YVI_USER_ID=None): request = ClientCookie.Request("http://yvision.kz/auth/") response = ClientCookie.urlopen(request) forms = ParseResponse(response, backwards_compat=False) form = forms[1] form.action = 'http://yvision.kz/ajax/auth/login.php' form['login'] = YVI_LOGIN form['password'] = YVI_PASSWORD request = form.click() response = ClientCookie.urlopen(request) request = ClientCookie.Request('http://%s.yvision.kz/manage/article/add' % YVI_LOGIN) #request = ClientCookie.Request('http://%s.yvision.kz/manage/article/edit/%s' % (YVI_LOGIN,POST_ID)) response = ClientCookie.urlopen(request) forms = ParseResponse(response, backwards_compat=False) form = forms[2] form.action = 'http://%s.yvision.kz/ajax/post/article.php?publicate=1' % YVI_LOGIN form['blog_title'] = title form['blog_post'] = text form['blog_tags'] = tags if YVI_USER_ID is not None: form.new_control('hidden','user-id',{'id':'user-id','value':YVI_USER_ID}) form.new_control('hidden','save',{'value':'asd'}) form.new_control('hidden','saveexit',{'value':'asdf'}) request = form.click() response = ClientCookie.urlopen(request)
def urlretrieve(self, url, cachetime=None, localfile=None, ext=None, postdata=None): url = self.getFullUrl(url) urlmetafile = self.url2cachemetafile(url) if cachetime is None: cachetime = self.defaultCachetime metainfo = self.getCacheMeta(urlmetafile) furl = None fcache = None isDownloadCompleted = False try: self.onDataRetrieved(0, None, url, '') oldtimeout = socket.getdefaulttimeout() socket.setdefaulttimeout(self.socketTimeout) authurl = parseAuthUrl(url) if len(authurl[1]) > 0: #todo use HTTPBasicAuthHandler instead.... url = authurl[0] base64string = base64.encodestring( '%s:%s' % (authurl[1], authurl[2]))[:-1] authheader = "Basic %s" % base64string request = ClientCookie.Request(url) request.add_header('User-Agent', self.userAgent) if len(authurl[1]) > 0: request.add_header("Authorization", authheader) #if len(self.urlContext)>0: #TODO: not always # request.add_header('Referer',self.urlContext) if not (metainfo is None): try: etag = metainfo['ETag'] request.add_header('If-None-Match', etag) except: pass try: lastmodified = metainfo['Last-Modified'] request.add_header('If-Modified-Since', lastmodified) except: pass furl = self.opener.open(request, postdata) info = furl.info() if not (metainfo is None): if hasattr(furl, 'code') and furl.code == 304: self.urlContext = metainfo['CM-UrlContext'] temp = os.path.split(metainfo['CM-Localfile']) print('using cache: ' + temp[1]) isDownloadCompleted = True if not (localfile is None): nameext = os.path.splitext(metainfo['CM-Localfile']) if not (localfile.lower() == nameext[0].lower()): localfile = localfile + nameext[1] shutil.copyfile(metainfo['CM-Localfile'], localfile) return localfile return metainfo['CM-Localfile'] else: self.flushCache(url) try: totalSize = int(info['Content-Length']) except: totalSize = None #------------ construct local file name --------- xfname = os.path.splitext( urltoxfilename(url)) #tuple: suggested (filename,ext) xfname = [xfname[0], xfname[1]] #otherwise you cannot write to it try: mimetype = info['Content-Type'].split( ';' ) # also understand "Content-Type: text/html; charset=utf-8" mimetype = mimetype[0].strip() mimeext = mimetypes.guess_extension(mimetype) if (not (mimeext is None)) and (len(mimeext) > 0): if mimeext == '.m1v': mimeext = '.mpg' xfname[1] = mimeext #override the one based on url alone except: pass if not (ext is None): xfname[1] = ext #override with manual extension ext = xfname[1] xfname = xfname[0] if len(ext) > 0: if not (ext[0] == '.'): ext = '.' + ext ext = ext[0: 7] #do not allow so long extensions... Just truncate if localfile is None: #then autogenerate a file name for the cache localfile = self.cacheFolder + xfname + ext i = 1 while fileExists(localfile): i = min( i * 10, 100000 ) #add a random number to minimize fileexist checks localfile = self.cacheFolder + xfname[0:30] + '[' + str( random.randint(0, i - 1)) + ']' + ext else: localfile = localfile + ext #------------------------------------------------ fcache = file(localfile, 'wb') iscanceled = not self.onDataRetrieved(0, totalSize, url, localfile) data = '...' blockSize = 8192 pval = 0 while len(data) > 0: if not totalSize is None: if pval > totalSize: totalSize = pval * 2 data = furl.read(blockSize) pval = pval + len(data) if len(data) > 0: fcache.write(data) if len(data) < blockSize: break iscanceled = not self.onDataRetrieved(pval, totalSize, url, localfile) if iscanceled: break isDownloadCompleted = not iscanceled self.urlContext = furl.url finally: self.onDownloadFinished(isDownloadCompleted) try: if not fcache is None: fcache.close() if not furl is None: furl.close() socket.setdefaulttimeout(oldtimeout) if not isDownloadCompleted: os.remove(localfile) except: pass if not isDownloadCompleted: return None #------------- write url meta file ------------ #TODO: maybe do something if info['cache-control']=private? info['Content-Length'] = str(pval) info['CM-Localfile'] = localfile info['CM-urlContext'] = self.urlContext info['CM-CacheTime'] = str(cachetime) info['CM-TimeStamp'] = str(time.time()) info['CM-url'] = url fuc = file(urlmetafile, 'wb') fuc.write(str(info)) fuc.close() return localfile
def ping_url(url, n=1): request = ClientCookie.Request(url) for i in xrange(0,n): response = ClientCookie.urlopen(request)