def main(): global saw_error pycurl.global_init(pycurl.GLOBAL_DEFAULT) outf = file("/dev/null", "rb+") cm = pycurl.CurlMulti() # Set multi handle's options cm.setopt(pycurl.M_PIPELINING, 1) eh = pycurl.Curl() for x in range(1, 20): eh.setopt(pycurl.WRITEDATA, outf) eh.setopt(pycurl.URL, sys.argv[1]) cm.add_handle(eh) while 1: ret, active_handles = cm.perform() if ret != pycurl.E_CALL_MULTI_PERFORM: break while active_handles: ret = cm.select(1.0) if ret == -1: continue while 1: ret, active_handles = cm.perform() if ret != pycurl.E_CALL_MULTI_PERFORM: break count, good, bad = cm.info_read() for h, en, em in bad: print "Transfer to %s failed with %d, %s\n" % \ (h.getinfo(pycurl.EFFECTIVE_URL), en, em) raise RuntimeError for h in good: httpcode = h.getinfo(pycurl.RESPONSE_CODE) if httpcode != 200: print "Transfer to %s failed with code %d\n" %\ (h.getinfo(pycurl.EFFECTIVE_URL), httpcode) raise RuntimeError else: print "Recd %d bytes from %s" % \ (h.getinfo(pycurl.SIZE_DOWNLOAD), h.getinfo(pycurl.EFFECTIVE_URL)) cm.remove_handle(eh) eh.reset() eh.close() cm.close() outf.close() pycurl.global_cleanup()
def daemonize(): """Properly deamonizes the process and closes file desriptors.""" sys.stderr.flush() sys.stdout.flush() pid = os.fork() if pid != 0: # Nothing more to do for the parent sys.exit(0) os.setsid() pid = os.fork() if pid != 0: # Nothing more to do for the parent sys.exit(0) os.umask(0) os.chdir('/') dev_null = open('/dev/null', 'w') os.dup2(dev_null.fileno(), sys.stdin.fileno()) os.dup2(dev_null.fileno(), sys.stdout.fileno()) os.dup2(dev_null.fileno(), sys.stderr.fileno()) # Re-initialize cURL. This is necessary to re-initialze the PKCS #11 # security tokens in NSS. Otherwise any use of SDK after the fork() would # lead to the error: # # A PKCS #11 module returned CKR_DEVICE_ERROR, indicating that a # problem has occurred with the token or slot. # pycurl.global_cleanup() pycurl.global_init(pycurl.GLOBAL_ALL)
def test_global_init_ack_eintr(self): # the GLOBAL_ACK_EINTR flag was introduced in libcurl-7.30, but can also # be backported for older versions of libcurl at the distribution level if not util.pycurl_version_less_than(7, 30) or hasattr(pycurl, 'GLOBAL_ACK_EINTR'): # initialize libcurl with the GLOBAL_ACK_EINTR flag pycurl.global_init(pycurl.GLOBAL_ACK_EINTR) pycurl.global_cleanup()
def Shutdown(): """Stops the module-global HTTP client manager. Must be called before quitting the program and while exactly one thread is running. """ pycurl.global_cleanup()
def cleanPycurl(self): """ make a global curl cleanup (currently unused) """ if self.processingIds(): return False pycurl.global_cleanup() pycurl.global_init(pycurl.GLOBAL_DEFAULT) self.downloaded = 0 self.log.debug("Cleaned up pycurl") return True
def test_global_init_ack_eintr(self): # the GLOBAL_ACK_EINTR flag was introduced in libcurl-7.30, but can also # be backported for older versions of libcurl at the distribution level if util.pycurl_version_less_than(7, 30) and not hasattr(pycurl, 'GLOBAL_ACK_EINTR'): raise nose.plugins.skip.SkipTest('libcurl < 7.30.0 or no GLOBAL_ACK_EINTR') # initialize libcurl with the GLOBAL_ACK_EINTR flag pycurl.global_init(pycurl.GLOBAL_ACK_EINTR) pycurl.global_cleanup()
def test_global_init_ack_eintr(self): # the GLOBAL_ACK_EINTR flag was introduced in libcurl-7.30, but can also # be backported for older versions of libcurl at the distribution level if util.pycurl_version_less_than( 7, 30) and not hasattr(pycurl, 'GLOBAL_ACK_EINTR'): raise unittest.SkipTest('libcurl < 7.30.0 or no GLOBAL_ACK_EINTR') # initialize libcurl with the GLOBAL_ACK_EINTR flag pycurl.global_init(pycurl.GLOBAL_ACK_EINTR) pycurl.global_cleanup()
def wrapper(*args, **kwargs): # curl_global_init(3) and curl_global_cleanup(3) must be called with only # one thread running. This check is just a safety measure -- it doesn't # cover all cases. assert threading.activeCount() == 1, \ "Found active threads when initializing pycURL" pycurl.global_init(pycurl.GLOBAL_ALL) try: return fn(*args, **kwargs) finally: pycurl.global_cleanup()
def perform(self): print self.version() filesize = self.get_filesize() pycurl.global_init( pycurl.GLOBAL_ALL) # GLOBAL_ALL must be set in normal if filesize == -1: # length not known, use single connection instead c = self.gen_curl() outfile = self.try_soutfile(self.filename) c.setopt(pycurl.WRITEFUNCTION, outfile.write) c.perform() outfile.close() else: curlpool = [] blocksize = filesize / self.num_blocks + 1 print filesize for p_start, p_end in [(x, x + blocksize) for x in xrange(0, filesize, blocksize)]: curlpool.append(self.gen_curl(p_start, p_end, filesize)) m = pycurl.CurlMulti() m.handles = [] for c in curlpool: m.add_handle(c) m.handles.append(c) try: while True: ret, num_handles = m.perform() if ret != pycurl.E_CALL_MULTI_PERFORM: break while num_handles: ret = m.select(1.0) if ret == -1: continue while True: ret, num_handles = m.perform() if ret != pycurl.E_CALL_MULTI_PERFORM: break self.end_perform(normal=True) self.event.set() except KeyboardInterrupt: self.end_perform(normal=False) except SystemExit: self.end_perform(normal=False) pycurl.global_cleanup()
def perform(self): print self.version() filesize = self.get_filesize() pycurl.global_init(pycurl.GLOBAL_ALL) # GLOBAL_ALL must be set in normal if filesize == -1: # length not known, use single connection instead c = self.gen_curl() outfile = self.try_soutfile(self.filename) c.setopt(pycurl.WRITEFUNCTION, outfile.write) c.perform() outfile.close() else: curlpool = [] blocksize = filesize / self.num_blocks + 1 print filesize for p_start, p_end in [(x, x + blocksize) for x in xrange(0, filesize, blocksize)]: curlpool.append(self.gen_curl(p_start, p_end, filesize)) m = pycurl.CurlMulti() m.handles = [] for c in curlpool: m.add_handle(c) m.handles.append(c) try: while True: ret, num_handles = m.perform() if ret != pycurl.E_CALL_MULTI_PERFORM: break while num_handles: ret = m.select(1.0) if ret == -1: continue while True: ret, num_handles = m.perform() if ret != pycurl.E_CALL_MULTI_PERFORM: break self.end_perform(normal = True) self.event.set() except KeyboardInterrupt: self.end_perform(normal = False) except SystemExit: self.end_perform(normal = False) pycurl.global_cleanup()
def get_filesize(self): if hasattr(self, 'filesize') and self.filesize != None: return self.filesize pycurl.global_init(pycurl.GLOBAL_ALL) # GLOBAL_ALL must be set in normal curl = pycurl.Curl() curl.setopt(pycurl.HEADER, True) curl.setopt(pycurl.NOBODY, True) curl.setopt(pycurl.URL, self.url) curl.setopt(pycurl.TIMEOUT, HEADER_TIMEOUT) b = StringIO.StringIO() curl.setopt(pycurl.WRITEFUNCTION, b.write) curl.perform() try: size = int(re.findall("Content-Length: (\d+)", b.getvalue())[0]) except: size = -1 pycurl.global_cleanup() self.filesize = size return size
def get_filesize(self): if hasattr(self, 'filesize') and self.filesize != None: return self.filesize pycurl.global_init( pycurl.GLOBAL_ALL) # GLOBAL_ALL must be set in normal curl = pycurl.Curl() curl.setopt(pycurl.HEADER, True) curl.setopt(pycurl.NOBODY, True) curl.setopt(pycurl.URL, self.url) curl.setopt(pycurl.TIMEOUT, HEADER_TIMEOUT) b = StringIO.StringIO() curl.setopt(pycurl.WRITEFUNCTION, b.write) curl.perform() try: size = int(re.findall("Content-Length: (\d+)", b.getvalue())[0]) except: size = -1 pycurl.global_cleanup() self.filesize = size return size
def __init__(self, pool, cr): super(taobao_shop, self).__init__(pool, cr) #pycurl两个函数不是线程安全。所以在主线程中进行一次的初始化和清除 import pycurl pycurl.global_init(pycurl.GLOBAL_DEFAULT) pycurl.global_cleanup()
def test_global_init_default(self): # initialize libcurl with DEFAULT flags pycurl.global_init(pycurl.GLOBAL_DEFAULT) pycurl.global_cleanup()
def cleanup(self): """do global cleanup, should be called when finished with pycurl""" pycurl.global_cleanup()
def cleanup_libcurl(): pycurl.global_cleanup()
def main(): usage = "usage: %prog [options]" parser = OptionParser(usage=usage) # parser.add_option('-q', '--quiet', action="store_const", const=0, dest="v", default=1, help='quiet') # parser.add_option('-v', '--verbose', action="store_const", const=1, dest="v", help='verbose') parser.add_option('-c', '--config', action='store', dest='config', default=DEFAULT_CONF, help='load parameters from configfile (default: ' + DEFAULT_CONF + ')') parser.add_option('-t', '--tcp', action='store_const', dest='mode', const='tcp', help='tcp mode (default)') #parser.add_option('-u', '--udp', action='store_const', dest='t', const='udp', help='udp mode') parser.add_option('-p', '--port', action="store", type='int', dest="port", help='port to listen (default: '+ str(DEFAULT_LISTENPORT) +')') parser.add_option('--url', action="store", dest="url", help='URL of tunnelendpoint (default: '+ DEFAULT_URL +')') parser.add_option('-d', '--dest', action="store", dest="dest", help='destination to connect to (default ' + DEFAULT_TARGET + ')') parser.add_option('--proxy', action='store', dest='proxy', help='proxy to use') parser.add_option('--auth', action='store', dest='auth', help='auth with user:password') parser.add_option('-v', '--verbose', action='store_const', dest='verbose', const=1, help='verbose') #parser.add_option('--no-proxy', action='store_true', dest='np', default=False, help='use no proxy (default: use proxy from env)') global options (options, args) = parser.parse_args() cparser = ConfigParser.ConfigParser(defaults={ 'mode': 'tcp', 'port': DEFAULT_LISTENPORT, 'url': DEFAULT_URL, 'dest': DEFAULT_TARGET, 'auth': '', 'proxy': '', 'verbose': 0 }) cparser.read(options.config) if cparser.has_section('pyhstopc'): if not options.mode: options.mode = cparser.get('pyhstopc', 'mode') if not options.port: options.port = cparser.getint('pyhstopc', 'port') if not options.url: options.url = cparser.get('pyhstopc', 'url') if not options.dest: options.dest = cparser.get('pyhstopc', 'dest') if not options.auth: options.auth = cparser.get('pyhstopc', 'auth') if not options.proxy: options.proxy = cparser.get('pyhstopc', 'proxy') try: if not options.verbose: options.verbose = cparser.getint('pyhstopc', 'verbose') except TypeError: options.verbose = 0 cparser = None print 'pyhstopc Version: ' + VERSION print 'terminate with EOF' print 'start..' if USE_CURL: pycurl.global_init(pycurl.GLOBAL_ALL) sl = socketListener() sl.listen() input = sys.stdin.readline() while input: input = sys.stdin.readline() sl.terminate() if USE_CURL: pycurl.global_cleanup() print 'end..'
def put_data( config, filename, protocol, host, port, rel_path, ): try: inputfile = open(filename, 'rb') except: config.logger.error('Failed to open %s for reading!', filename) return (False, 'Invalid filename!') # Set size of file to be uploaded. size = os.path.getsize(filename) if port: url = '%s://%s:%s/%s' % (protocol, host, port, rel_path) else: url = '%s://%s/%s' % (protocol, host, rel_path) passphrase = '' try: pp_file = open(config.passphrase_file, 'r') passphrase = pp_file.readline().strip() pp_file.close() except: config.logger.error('Failed to read passphrase from file %s', config.passphrase_file) return (-1, 'Failed to read passphrase from file') # Store output in memory output = StringIO.StringIO() # Init cURL (not strictly necessary, but for symmetry with cleanup) pycurl.global_init(pycurl.GLOBAL_SSL) curl = pycurl.Curl() # Never use proxy curl.setopt(pycurl.PROXY, "") curl.setopt(pycurl.HTTPHEADER, ['User-Agent: MiG HTTP PUT']) curl.setopt(pycurl.PUT, 1) curl.setopt(pycurl.FOLLOWLOCATION, 1) curl.setopt(pycurl.MAXREDIRS, 5) curl.setopt(pycurl.URL, url) curl.setopt(pycurl.WRITEFUNCTION, output.write) curl.setopt(pycurl.NOSIGNAL, 1) # Uncomment to get verbose cURL output including SSL negotiation # curl.setopt(curl.VERBOSE, 1) # We can not let the server block for very long curl.setopt(pycurl.CONNECTTIMEOUT, 5) curl.setopt(pycurl.TIMEOUT, 10) curl.setopt(pycurl.INFILE, inputfile) curl.setopt(pycurl.INFILESIZE, size) if protocol == 'https': curl.setopt(curl.SSLCERT, config.server_cert) curl.setopt(curl.SSLKEY, config.server_key) if passphrase: curl.setopt(curl.SSLKEYPASSWD, passphrase) # Path to CA certificates # To use NorduGRID default certificate path set: # curl.setopt(curl.CAPATH, "/etc/grid-security/certificates") if config.ca_dir: curl.setopt(curl.CAPATH, config.ca_dir) elif config.ca_file: # We use our own demo CA file specified in the configuration for now curl.setopt(curl.CAINFO, config.ca_file) # Workaround for broken host certificates: # ################################################### # Do not use this, but fix host cert + CA instead! # # ################################################### # VERIFYHOST should be 2 (default) unless remote cert can not be # verified using CA cert. # curl.setopt(curl.SSL_VERIFYHOST,1) # Similarly VERIFYPEER will then probably need to be set to 0 # curl.setopt(curl.SSL_VERIFYPEER,0) try: curl.perform() except pycurl.error as e: # pycurl.error is an (errorcode, errormsg) tuple config.logger.error('cURL command failed! %s', e[1]) return (404, 'Error!') http_status = curl.getinfo(pycurl.HTTP_CODE) # Clean up after cURL curl.close() pycurl.global_cleanup() if http_status == http_success: config.logger.info('PUT request succeeded') # Go to start of buffer output.seek(0) msg = output.readlines() else: # print msg config.logger.warning('Server returned HTTP code %d, expected %d', http_status, http_success) inputfile.close() output.close() return (http_status, 'Success!')
try: curl.perform() except pycurl.error, e: print 'cURL command failed!:' # error is a (errorcode, errormsg) tuple print e[1] return False status = curl.getinfo(pycurl.HTTP_CODE) print 'HTTP code:\t', status curl.close() pycurl.global_cleanup() if status == http_success: print '--- MiG files ---' print data.getvalue() print '--- Done ---' ret = True else: print 'Server returned HTTP code %d, expected %d' % (status, http_success) ret = 1 data.close() return True
def get_data( protocol, host, port, rel_path, cert, key, ca_dir, ca_file, passphrase_file='', ): if port: url = '%s://%s:%s/%s' % (protocol, host, port, rel_path) else: url = '%s://%s/%s' % (protocol, host, rel_path) passphrase = '' if passphrase_file: try: pp_file = open(passphrase_file, 'r') passphrase = pp_file.readline().strip() pp_file.close() except: print('Failed to read passprase from %s', passphrase_file) return None # Store output in memory output = StringIO.StringIO() # Init cURL (not strictly necessary, but for symmetry with cleanup) pycurl.global_init(pycurl.GLOBAL_SSL) curl = pycurl.Curl() curl.setopt(pycurl.HTTPHEADER, ['User-Agent: MiG HTTP GET']) curl.setopt(pycurl.FOLLOWLOCATION, 1) curl.setopt(pycurl.MAXREDIRS, 5) curl.setopt(pycurl.URL, url) curl.setopt(pycurl.WRITEFUNCTION, output.write) curl.setopt(pycurl.NOSIGNAL, 1) # Uncomment to get verbose cURL output including SSL negotiation curl.setopt(curl.VERBOSE, 1) curl.setopt(pycurl.CONNECTTIMEOUT, 30) curl.setopt(pycurl.TIMEOUT, 300) if protocol == 'https': curl.setopt(curl.SSLCERT, cert) curl.setopt(curl.SSLKEY, key) if passphrase: curl.setopt(curl.SSLKEYPASSWD, passphrase) # Path to CA certificates if ca_dir: curl.setopt(curl.CAPATH, ca_dir) elif ca_file: # We use our own demo CA file specified in the configuration for now curl.setopt(curl.CAINFO, ca_file) # Workaround for broken host certificates: # ################################################### # Do not use this, but fix host cert + CA instead! # # ################################################### # VERIFYHOST should be 2 (default) unless remote cert can not be # verified using CA cert. # curl.setopt(curl.SSL_VERIFYHOST,1) # Similarly VERIFYPEER will then probably need to be set to 0 # curl.setopt(curl.SSL_VERIFYPEER,0) # TODO: Should not be necessary but mig-1 host cert has wrong subject (vcr) curl.setopt(curl.SSL_VERIFYHOST, 1) # Uncomment if server identity can't be verified from local hostcert or CA cert curl.setopt(curl.SSL_VERIFYPEER, 0) try: print('get_data: fetch %s', url) curl.perform() except pycurl.error as e: # pycurl.error is an (errorcode, errormsg) tuple print('cURL command failed! %s', e[1]) return '' http_status = curl.getinfo(pycurl.HTTP_CODE) # Clean up after cURL curl.close() pycurl.global_cleanup() server_status = '' if http_status == http_success: # Go to start of buffer output.seek(0) try: server_status = output.readlines() except: print('Failed to parse server status') return None else: print('Server returned HTTP code %d, expected %d', http_status, \ http_success) return None output.close() return server_status
def createContainer(self, src=None, target_info=None, arches=None, scratch=None, yum_repourls=[], branch=None, push_url=None, flatpak=False, module=None): if not yum_repourls: yum_repourls = [] this_task = self.session.getTaskInfo(self.id) self.logger.debug("This task: %r", this_task) owner_info = self.session.getUser(this_task['owner']) self.logger.debug("Started by %s", owner_info['name']) scm = My_SCM(src) scm.assert_allowed(self.options.allowed_scms) git_uri = scm.get_git_uri() component = scm.get_component() arch = None if not arches: raise ContainerError("arches aren't specified") create_build_args = { 'git_uri': git_uri, 'git_ref': scm.revision, 'user': owner_info['name'], 'component': component, 'target': target_info['name'], 'yum_repourls': yum_repourls, 'scratch': scratch, 'koji_task_id': self.id, 'architecture': arch } if branch: create_build_args['git_branch'] = branch if push_url: create_build_args['git_push_url'] = push_url if flatpak: create_build_args['flatpak'] = True if module: create_build_args['module'] = module try: create_method = self.osbs().create_orchestrator_build self.logger.debug("Starting %s with params: '%s, platforms:%s'", create_method, create_build_args, arches) build_response = create_method(platforms=arches, **create_build_args) except (AttributeError, OsbsValidationException): # Older osbs-client, or else orchestration not enabled create_build_args['architecture'] = arch = arches[0] create_method = self.osbs().create_build self.logger.debug("Starting %s with params: '%s'", create_method, create_build_args) build_response = create_method(**create_build_args) build_id = build_response.get_build_name() self.logger.debug("OSBS build id: %r", build_id) # When builds are cancelled the builder plugin process gets SIGINT and SIGKILL # If osbs has started a build it should get cancelled def sigint_handler(*args, **kwargs): if not build_id: return self.logger.warn("Cannot read logs, cancelling build %s", build_id) self.osbs().cancel_build(build_id) signal.signal(signal.SIGINT, sigint_handler) self.logger.debug("Waiting for osbs build_id: %s to be scheduled.", build_id) # we need to wait for kubelet to schedule the build, otherwise it's 500 self.osbs().wait_for_build_to_get_scheduled(build_id) self.logger.debug("Build was scheduled") osbs_logs_dir = self.resultdir() koji.ensuredir(osbs_logs_dir) pid = os.fork() if pid: try: self._incremental_upload_logs(pid) except koji.ActionNotAllowed: pass else: full_output_name = os.path.join(osbs_logs_dir, 'openshift-incremental.log') # Make sure curl is initialized again otherwise connections via SSL # fails with NSS error -8023 and curl_multi.info_read() # returns error code 35 (SSL CONNECT failed). # See http://permalink.gmane.org/gmane.comp.web.curl.library/38759 self._osbs = None self.logger.debug("Running pycurl global cleanup") pycurl.global_cleanup() # Following retry code is here mainly to workaround bug which causes # connection drop while reading logs after about 5 minutes. # OpenShift bug with description: # https://github.com/openshift/origin/issues/2348 # and upstream bug in Kubernetes: # https://github.com/GoogleCloudPlatform/kubernetes/issues/9013 retry = 0 max_retries = 30 while retry < max_retries: try: self._write_incremental_logs(build_id, full_output_name) except Exception, error: self.logger.info("Error while saving incremental logs " "(retry #%d): %s", retry, error) retry += 1 time.sleep(10) continue break else:
def GetFile(): # Defaults url = 'cgi-bin/ls.py' base_dir = '.' # Just using default NorduGRID certificates for now os.environ['HOME'] = pwd.getpwuid(os.geteuid())[5] globus_dir = os.path.expanduser('~/.globus') cert_dir = globus_dir server_cert = cert_dir + '/usercert.pem' server_key = cert_dir + '/userkey.pem' passwd = '' MiGServer = 'https://mig-1.imada.sdu.dk' port = '8092' if len(sys.argv) > 1: passwd = sys.argv[1] data = StringIO.StringIO() # Init cURL (not strictly necessary, but for symmetry with cleanup) pycurl.global_init(pycurl.GLOBAL_SSL) print('cURL:\t\t', pycurl.version) curl = pycurl.Curl() curl.setopt(pycurl.HTTPHEADER, ['User-Agent: MiG HTTP GET']) curl.setopt(pycurl.FOLLOWLOCATION, 1) curl.setopt(pycurl.MAXREDIRS, 5) curl.setopt(pycurl.URL, MiGServer + ':' + port + '/' + url) curl.setopt(pycurl.WRITEFUNCTION, data.write) curl.setopt(pycurl.NOSIGNAL, 1) # Uncomment to get verbose cURL output including SSL negotiation curl.setopt(curl.VERBOSE, 1) curl.setopt(pycurl.CONNECTTIMEOUT, 30) curl.setopt(pycurl.TIMEOUT, 300) # curl.setopt(curl.PORT, port) curl.setopt(curl.SSLCERT, server_cert) curl.setopt(curl.SSLKEY, server_key) if passwd: curl.setopt(curl.SSLKEYPASSWD, passwd) # Path to CA certificates (NorduGRID default certificate path) curl.setopt(curl.CAPATH, '/etc/grid-security/certificates') # TODO: Should not be necessary but mig-1 host cert has wrong subject (vcr) curl.setopt(curl.SSL_VERIFYHOST, 1) # Uncomment if server identity can't be verified from local hostcert or CA cert curl.setopt(curl.SSL_VERIFYPEER, 0) print('fetching:\t', url) print('cert:\t\t', server_cert) print('key:\t\t', server_key) print('passwd:\t\t', passwd) # Clean up after cURL try: curl.perform() except pycurl.error as e: print('cURL command failed!:') # error is a (errorcode, errormsg) tuple print(e[1]) return False status = curl.getinfo(pycurl.HTTP_CODE) print('HTTP code:\t', status) curl.close() pycurl.global_cleanup() if status == http_success: print('--- MiG files ---') print(data.getvalue()) print('--- Done ---') ret = True else: print('Server returned HTTP code %d, expected %d' % (status, http_success)) ret = 1 data.close() return True
def main(): try: opts, args = getopt.getopt(sys.argv[1:], "", ["port=", "test=", "proxy=", "cwd=", "secondary-proxy-port="]) except getopt.GetoptError as err: print str(err) sys.exit(2) test = None proxy_port = 12345 proxy_executable = None proxy_cwd = None secondary_proxy_port = None for o, a in opts: if o == "--port": proxy_port = int(a) elif o == "--test": test = a elif o == "--proxy": proxy_executable = a elif o == "--cwd": proxy_cwd = a elif o == "--secondary-proxy-port": secondary_proxy_port = int(a) else: assert False, "unhandled option" assert test is not None, "test must be specified" assert proxy_executable is not None, "proxy executable must be specified" using_proxy_chaining = (secondary_proxy_port is not None) if test == "proxy-cycle": assert using_proxy_chaining, "The secondary-proxy-port flag is required for the proxy-cycle test" cache_dir = os.path.expanduser('~/.proxy-cache-' + socket.gethostname()) shutil.rmtree(cache_dir, ignore_errors=True) # Get a place to store the stdout/stderr log_file = tempfile.TemporaryFile() try: if not using_proxy_chaining: proxy = subprocess.Popen([proxy_executable, "--port", str(proxy_port)], cwd=proxy_cwd, stdout=log_file, stderr=subprocess.STDOUT) else: proxy = subprocess.Popen([proxy_executable, "--port", str(proxy_port), "--proxy-server", "localhost", "--proxy-port", str(secondary_proxy_port)], cwd=proxy_cwd, stdout=log_file, stderr=subprocess.STDOUT) except: print("Couldn't start proxy") return if using_proxy_chaining: try: secondary_proxy = subprocess.Popen([proxy_executable, "--port", str(secondary_proxy_port), "--proxy-server", "localhost", "--proxy-port", str(proxy_port)], cwd=proxy_cwd, stdout=log_file, stderr=subprocess.STDOUT) #secondary_proxy = subprocess.Popen([proxy_executable, "--port", str(secondary_proxy_port)], cwd=proxy_cwd, stdout=log_file, stderr=subprocess.STDOUT) except: print("Couldn't start secondary proxy") return #elif test == 'proxy-chain-basic': # try: # exec_command = "ssh " + os.environ["USER"] + "@myth12.stanford.edu 'cd /usr/class/cs110/staff/bin/master-repos/assign6-soln/; " + proxy_executable + " --port 56565'" # print exec_command # secondary_proxy = subprocess.Popen(exec_command, cwd=proxy_cwd, stdout=log_file, stderr=subprocess.STDOUT) # except: # print("Couldn't start secondary proxy") # return # try: time.sleep(5) # Make sure the proxy is running pycurl.global_init(pycurl.GLOBAL_DEFAULT) test_runner = ProxyTest(proxy_port) test_runner.run_test(test) pycurl.global_cleanup() return_code = proxy.poll() # Get the return status if it already exited (crashed) if return_code is None: # Hopefully the proxy was still running proxy.terminate() proxy.wait() else: log_file.seek(0) print("Proxy crashed; return code: %d, output:" % return_code) print(log_file.read()) if using_proxy_chaining: secondary_return_code = secondary_proxy.poll(); if secondary_return_code is None: secondary_proxy.terminate() secondary_proxy.wait() # except: # Make sure the proxy is dead # print("Something went wrong. Killing the proxy.") # proxy.kill() # proxy.wait() # raise # finally: shutil.rmtree(cache_dir, ignore_errors=True) # Clean up
def main(): signal.signal(signal.SIGHUP, signal.SIG_IGN) usage = "usage: %prog [options]" parser = OptionParser(usage=usage) # parser.add_option('-q', '--quiet', action="store_const", const=0, dest="v", default=1, help='quiet') parser.add_option('-c', '--config', action='store', dest='config', default=DEFAULT_CONF, help='load parameters from configfile (default: ' + DEFAULT_CONF + ')') parser.add_option('-t', '--tcp', action='store_const', dest='mode', const='tcp', help='tcp mode (default)') parser.add_option('-u', '--udp', action='store_const', dest='mode', const='udp', help='udp mode') parser.add_option('-L', action='append', dest='forward', help='forward port:remotehost:remoteport (like ssh)') parser.add_option('--url', action="store", dest="url", help='URL of tunnelendpoint') parser.add_option('--proxy', action='store', dest='proxy', help='proxy to use') parser.add_option('--auth', action='store', dest='auth', help='auth with user:password') parser.add_option('-a', '--agent', action='store', dest='agent', help='fake useragent') parser.add_option('-v', '--verbose', action='store_const', dest='verbose', const=1, help='verbose') parser.add_option('--no-verify-ssl', action='store_true', dest='nv', help='do not verify ssl-host') parser.add_option('--verify-ssl', action='store_false', dest='nv', help='do not verify ssl-host') global options (options, args) = parser.parse_args() cparser = ConfigParser.ConfigParser(defaults={ 'mode': 'tcp', 'url': DEFAULT_URL, 'auth': '', 'proxy': '', 'agent': '', 'verbose': 0, 'verify': True }) cparser.read(options.config) if cparser.has_section('pyhstopc'): if not options.url: options.url = cparser.get('pyhstopc', 'url') if not options.auth: options.auth = cparser.get('pyhstopc', 'auth') if not options.agent: options.agent = cparser.get('pyhstopc', 'agent') if not options.proxy: options.proxy = cparser.get('pyhstopc', 'proxy') if not options.forward: options.forward = [] try: options.forward.extend(cparser.get('pyhstopc', 'forward').split(',')) except ConfigParser.NoOptionError: pass try: if not options.verbose: options.verbose = cparser.getint('pyhstopc', 'verbose') except TypeError: options.verbose = 0 try: if options.nv == None: options.nv = not cparser.getboolean('pyhstopc', 'verify') except TypeError: options.nv = False cparser = None tmpforward = options.forward options.forward = [] for i in tmpforward: try: lport, rhost, rport = i.split(':') options.forward.append((int(lport.strip()), rhost.strip(), int(rport.strip()),'tcp')) except (KeyError, ValueError): try: lport, rhost, rport, mode = i.split(':') options.forward.append((int(lport.strip()), rhost.strip(), int(rport.strip()), mode)) except (KeyError, ValueError): print 'malformed forward option: ', i print 'pyhstopc Version: ' + VERSION print 'terminate with EOF' print 'start..' pycurl.global_init(pycurl.GLOBAL_ALL) sls = [] for i in range(len(options.forward)): sl = socketListener(i) sl.listen() try: input = sys.stdin.readline() while input: input = sys.stdin.readline() except KeyboardInterrupt: print 'interrupted' for sl in sls: sl.terminate() pycurl.global_cleanup() print 'end..'
def __del__(self): pycurl.global_cleanup()
def PutFile(): srcdir = '.' filename = 'testfile' protocol = 'https' host = 'mig-1.imada.sdu.dk' port = '' filepath = srcdir + '/' + filename try: inputfile = open(filepath, 'rb') except: print('Error: Failed to open %s for reading!' % filepath) return (False, 'Invalid filename!') # Set size of file to be uploaded. size = os.path.getsize(filepath) if port: url = '%s://%s:%s/%s' % (protocol, host, port, filename) else: url = '%s://%s/%s' % (protocol, host, filename) # TODO: change to 'real' server certs # Just using default NorduGRID certificates for now os.environ['HOME'] = pwd.getpwuid(os.geteuid())[5] globus_dir = os.path.expanduser('~/.globus') cert_dir = globus_dir server_cert = cert_dir + '/usercert.pem' server_key = cert_dir + '/userkey.pem' passwd_file = 'cert_pass' passwd = '' try: pw_file = open(passwd_file, 'r') passwd = pw_file.readline().strip() pw_file.close() except: print('Failed to read password from file!') return '' # Store output in memory output = StringIO.StringIO() # Init cURL (not strictly necessary, but for symmetry with cleanup) pycurl.global_init(pycurl.GLOBAL_SSL) curl = pycurl.Curl() curl.setopt(pycurl.HTTPHEADER, ['User-Agent: MiG HTTP PUT']) curl.setopt(pycurl.PUT, 1) curl.setopt(pycurl.FOLLOWLOCATION, 1) curl.setopt(pycurl.MAXREDIRS, 5) curl.setopt(pycurl.URL, url) curl.setopt(pycurl.WRITEFUNCTION, output.write) curl.setopt(pycurl.NOSIGNAL, 1) # Uncomment to get verbose cURL output including SSL negotiation curl.setopt(curl.VERBOSE, 1) curl.setopt(pycurl.CONNECTTIMEOUT, 30) curl.setopt(pycurl.TIMEOUT, 300) # curl.setopt(curl.PORT, port) curl.setopt(pycurl.INFILE, inputfile) curl.setopt(pycurl.INFILESIZE, size) if protocol == 'https': curl.setopt(curl.SSLCERT, server_cert) curl.setopt(curl.SSLKEY, server_key) if passwd: curl.setopt(curl.SSLKEYPASSWD, passwd) # Path to CA certificates (NorduGRID default certificate path) curl.setopt(curl.CAPATH, '/etc/grid-security/certificates') # TODO: Should not be necessary but mig-1 host cert has wrong subject (vcr) curl.setopt(curl.SSL_VERIFYHOST, 1) # Uncomment if server identity can't be verified from local hostcert or CA cert curl.setopt(curl.SSL_VERIFYPEER, 0) # TODO: uncomment the following to actually execute upload try: curl.perform() except pycurl.error as e: # pycurl.error is an (errorcode, errormsg) tuple print('Error: cURL command failed! %s' % e[1]) return (404, 'Error!') status = curl.getinfo(pycurl.HTTP_CODE) # print "HTTP code:\t", status # Clean up after cURL curl.close() pycurl.global_cleanup() if status == http_success: print('PUT request succeeded') # Go to start of buffer output.seek(0) msg = output.readlines() print(msg) else: print('Warning: Server returned HTTP code %d, expected %d'\ % (status, http_success)) inputfile.close() output.close() # TODO: real curl PUT request # TMP! server_status = (200, 'Success') return server_status
try: print 'get_data: fetch %s', url curl.perform() except pycurl.error, e: # pycurl.error is an (errorcode, errormsg) tuple print 'cURL command failed! %s', e[1] return '' http_status = curl.getinfo(pycurl.HTTP_CODE) # Clean up after cURL curl.close() pycurl.global_cleanup() server_status = '' if http_status == http_success: # Go to start of buffer output.seek(0) try: server_status = output.readlines() except: print 'Failed to parse server status' return None else: print 'Server returned HTTP code %d, expected %d', http_status, \
def process(self, args): # Perform any base class processing. if not super(JsonProcess, self).process(args): return # Check to see if the JSON file exists. if args.json == "-": json = {} elif os.path.isfile(args.json): self.log.info("Using JSON file: " + args.json) stream = open(args.json, 'r') json = simplejson.load(stream) stream.close() else: self.log.info("Creating JSON file: " + args.json) json = {} # If the file exists and we have the enable flag, then we # check to see if we are going to force writing the file. if "enable-tmdb" in json and not args.force: self.log.info("Information already cached, skipping") return False # If the ID is 0 or less, then we disable it. if args.id <= 0: # Remove any existing JSON data and disable TMDB. json["enable-tmdb"] = False if "tmdb" in json: del json["tmdb"] else: # Set up the configuration for TMDB. self.configure() url = "http://api.themoviedb.org/3/movie/{0}?api_key={1}".format( args.id, args.api_key) tmdb_json = self.get_json(url) # Insert the TMDB JSON data into the JSON. json["enable-tmdb"] = True json["tmdb"] = tmdb_json # Now that we are done, get the formatted JSON file. formatted = simplejson.dumps(json, indent=4, sort_keys=True) # Figure out how to output the file. if not args.output: args.output = args.json if args.output == "-": # Just print it to the output. print formatted else: # Open the stream for writing. stream = open(args.output, "w") simplejson.dump(json, stream, sort_keys=True, indent=4) stream.close() # Finish up the PyCurl library. pycurl.global_cleanup()
def get_data( config, protocol, host, port, rel_path, ): if port: url = '%s://%s:%s/%s' % (protocol, host, port, rel_path) else: url = '%s://%s/%s' % (protocol, host, rel_path) passphrase = '' try: pp_file = open(config.passphrase_file, 'r') passphrase = pp_file.readline().strip() pp_file.close() except: config.logger.error('Failed to read passprase from %s', config.passphrase_file) return None # Store output in memory output = StringIO.StringIO() # Init cURL (not strictly necessary, but for symmetry with cleanup) pycurl.global_init(pycurl.GLOBAL_SSL) curl = pycurl.Curl() # Never use proxy curl.setopt(pycurl.PROXY, "") curl.setopt(pycurl.HTTPHEADER, ['User-Agent: MiG HTTP GET']) curl.setopt(pycurl.FOLLOWLOCATION, 1) curl.setopt(pycurl.MAXREDIRS, 5) curl.setopt(pycurl.URL, url) curl.setopt(pycurl.WRITEFUNCTION, output.write) curl.setopt(pycurl.NOSIGNAL, 1) # Uncomment to get verbose cURL output including SSL negotiation # curl.setopt(curl.VERBOSE, 1) # TODO: read timeout values from config? # We can not allow the server to block for very long curl.setopt(pycurl.CONNECTTIMEOUT, 5) curl.setopt(pycurl.TIMEOUT, 10) if protocol == 'https': curl.setopt(curl.SSLCERT, config.server_cert) curl.setopt(curl.SSLKEY, config.server_key) if passphrase: curl.setopt(curl.SSLKEYPASSWD, passphrase) # Path to CA certificates # To use NorduGRID default certificate path set: # curl.setopt(curl.CAPATH, "/etc/grid-security/certificates") if config.ca_dir: curl.setopt(curl.CAPATH, config.ca_dir) elif config.ca_file: # We use our own demo CA file specified in the configuration for now curl.setopt(curl.CAINFO, config.ca_file) # Workaround for broken host certificates: # ################################################### # Do not use this, but fix host cert + CA instead! # # ################################################### # VERIFYHOST should be 2 (default) unless remote cert can not be # verified using CA cert. # curl.setopt(curl.SSL_VERIFYHOST,1) # Similarly VERIFYPEER will then probably need to be set to 0 # curl.setopt(curl.SSL_VERIFYPEER,0) # Clean up after cURL try: config.logger.info('get_data: fetch %s', url) curl.perform() except pycurl.error as e: # pycurl.error is an (errorcode, errormsg) tuple config.logger.error('cURL command failed! %s', e[1]) return '' http_status = curl.getinfo(pycurl.HTTP_CODE) curl.close() pycurl.global_cleanup() server_status = ConfigParser.ConfigParser() if http_status == http_success: # Go to start of buffer output.seek(0) try: server_status.readfp(output) except: config.logger.error('Failed to parse server status') return None else: config.logger.error('Server returned HTTP code %d, expected %d', http_status, http_success) return None output.close() return server_status
def createContainer(self, src=None, target_info=None, arches=None, scratch=None, isolated=None, yum_repourls=[], branch=None, push_url=None, koji_parent_build=None, release=None): if not yum_repourls: yum_repourls = [] this_task = self.session.getTaskInfo(self.id) self.logger.debug("This task: %r", this_task) owner_info = self.session.getUser(this_task['owner']) self.logger.debug("Started by %s", owner_info['name']) scm = My_SCM(src) scm.assert_allowed(self.options.allowed_scms) git_uri = scm.get_git_uri() component = scm.get_component() arch = None if not arches: raise ContainerError("arches aren't specified") create_build_args = { 'git_uri': git_uri, 'git_ref': scm.revision, 'user': owner_info['name'], 'component': component, 'target': target_info['name'], 'yum_repourls': yum_repourls, 'scratch': scratch, 'koji_task_id': self.id, 'architecture': arch } if branch: create_build_args['git_branch'] = branch if push_url: create_build_args['git_push_url'] = push_url try: orchestrator_create_build_args = create_build_args.copy() orchestrator_create_build_args['platforms'] = arches if koji_parent_build: orchestrator_create_build_args['koji_parent_build'] = koji_parent_build if isolated: orchestrator_create_build_args['isolated'] = isolated if release: orchestrator_create_build_args['release'] = release create_method = self.osbs().create_orchestrator_build self.logger.debug("Starting %s with params: '%s", create_method, orchestrator_create_build_args) build_response = create_method(**orchestrator_create_build_args) except (AttributeError, OsbsOrchestratorNotEnabled): # Older osbs-client, or else orchestration not enabled create_build_args['architecture'] = arch = arches[0] create_method = self.osbs().create_build self.logger.debug("Starting %s with params: '%s'", create_method, create_build_args) build_response = create_method(**create_build_args) build_id = build_response.get_build_name() self.logger.debug("OSBS build id: %r", build_id) # When builds are cancelled the builder plugin process gets SIGINT and SIGKILL # If osbs has started a build it should get cancelled def sigint_handler(*args, **kwargs): if not build_id: return self.logger.warn("Cannot read logs, cancelling build %s", build_id) self.osbs().cancel_build(build_id) signal.signal(signal.SIGINT, sigint_handler) self.logger.debug("Waiting for osbs build_id: %s to be scheduled.", build_id) # we need to wait for kubelet to schedule the build, otherwise it's 500 self.osbs().wait_for_build_to_get_scheduled(build_id) self.logger.debug("Build was scheduled") osbs_logs_dir = self.resultdir() koji.ensuredir(osbs_logs_dir) pid = os.fork() if pid: try: self._incremental_upload_logs(pid) except koji.ActionNotAllowed: pass else: full_output_name = os.path.join(osbs_logs_dir, 'openshift-incremental.log') # Make sure curl is initialized again otherwise connections via SSL # fails with NSS error -8023 and curl_multi.info_read() # returns error code 35 (SSL CONNECT failed). # See http://permalink.gmane.org/gmane.comp.web.curl.library/38759 self._osbs = None self.logger.debug("Running pycurl global cleanup") pycurl.global_cleanup() # Following retry code is here mainly to workaround bug which causes # connection drop while reading logs after about 5 minutes. # OpenShift bug with description: # https://github.com/openshift/origin/issues/2348 # and upstream bug in Kubernetes: # https://github.com/GoogleCloudPlatform/kubernetes/issues/9013 retry = 0 max_retries = 30 while retry < max_retries: try: self._write_incremental_logs(build_id, full_output_name) except Exception, error: self.logger.info("Error while saving incremental logs " "(retry #%d): %s", retry, error) retry += 1 time.sleep(10) continue break else: