def _post(url, params={}, req_headers={}, files=[], additions="", req_timeout=None): if files != []: files_headers, files_data = utils.encode_multipart_formdata(params, files) params = {} req_headers.update(files_headers) additions += files_data urlopen = get_urlopen() params = dict([(k.encode("utf8"), v.encode("utf8") if type(v) == unicode else v) for k, v in params.items()]) request = urllib2.Request(url, urlencode(params) + additions, headers=req_headers) ret = urlopen(request, timeout=req_timeout).read() return ret
def _post(url, params={}, req_headers={}, files=[], additions='', req_timeout=None): if files != []: files_headers, files_data = utils.encode_multipart_formdata(params, files) params ={} req_headers.update(files_headers) additions += files_data urlopen = get_urlopen() params = dict([(k.encode('utf8') , v.encode('utf8') if type(v)==unicode else v) for k, v in params.items()]) request = urllib2.Request(url, urlencode(params) + additions, headers=req_headers); ret = urlopen(request, timeout=req_timeout).read() return ret
def _post(url, params={}, req_headers={}, files=[], additions="", req_timeout=None): if files != []: files_headers, files_data = utils.encode_multipart_formdata(params, files) params = {} req_headers.update(files_headers) additions += files_data urlopen = urllib2.urlopen if get_prefs("use_http_proxy"): proxy_support = urllib2.ProxyHandler( {"http": get_prefs("http_proxy_host") + ":" + str(get_prefs("http_proxy_port"))} ) urlopen = urllib2.build_opener(proxy_support).open params = dict([(k.encode("utf8"), v.encode("utf8") if type(v) == unicode else v) for k, v in params.items()]) request = urllib2.Request(url, urlencode(params) + additions, headers=req_headers) ret = urlopen(request, timeout=req_timeout).read() return ret
def _post(url, params={}, req_headers={}, files=[], additions=''): if files != []: files_headers, files_data = utils.encode_multipart_formdata(params, files) params ={} req_headers.update(files_headers) additions += files_data pass urlopen = urllib2.urlopen if config.use_http_proxy: proxy_support = urllib2.ProxyHandler( {"http" : config.http_proxy_host+':'+str(config.http_proxy_port)}) urlopen = urllib2.build_opener(proxy_support).open pass params = dict([(k.encode('utf8') , v.encode('utf8') if type(v)==unicode else v) for k, v in params.items()]) request = urllib2.Request(url, urlencode(params) + additions, headers=req_headers); ret = urlopen(request).read() return ret
def _import(): global current_file_count while len(files) > 0 and current_file_count < NUM_PROCS: filename = files.pop() current_file_count += 1 # logging.info('processing %s' % (filename)) with open(filename, 'r') as f: filedata = f.read() f = ('image', os.path.split(filename)[1], filedata) content_type, body = utils.encode_multipart_formdata([('secret', settings.SECRET)], [f]) headers = {'Content-Type': content_type} req = tornado.httpclient.HTTPRequest(url='http://photomap.local/upload/', method="POST", body=body, headers=headers) client.fetch(req, callback=partial(_on_add, filename)) if not files: logging.info('stopping IOLoop') ioloop.IOLoop.instance().stop() return