def _resolve_url(url): return url # this processing causes too big delays last_url = url last_host = None last_type = None for ind in range(12): try: request = urllib2.Request(last_url) last_host = request.get_host() last_type = request.get_type() if last_host not in RESOLVE_HOSTS: return last_url opener = urllib2.OpenerDirector() opener.add_handler(urllib2.HTTPHandler()) opener.add_handler(urllib2.HTTPSHandler()) opener.add_handler(urllib2.HTTPDefaultErrorHandler()) try: res = opener.open(HeadRequest(last_url), timeout = RESOLVE_TIMEOUT) res.close() except urllib2.URLError, exc: return last_url except urllib2.HTTPError, exc: return last_url redirs = res.info().getheaders('location') if not redirs: return last_url last_url = redirs[0] if last_url.startswith('/'): last_url = last_type + '://' + last_host + last_url
def open_url(url): if (url[:5] == 'https'): #print ssl.OPENSSL_VERSION #httplib.HTTPSConnection.connect = connect_patched #ssl.wrap_socket = wrap_socket_patched #req = urllib2.Request(url=url) #req.add_header('User-Agent', 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3') #response = urllib2.urlopen(req) opener = urllib2.OpenerDirector() opener.add_handler(urllib2.HTTPSHandler()) opener.add_handler(urllib2.HTTPDefaultErrorHandler()) opener.addheaders = [( 'User-Agent', 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3' )] fetch_timeout = 100 response = opener.open(url, None, fetch_timeout) else: req = urllib2.Request(url) req.add_header( 'User-Agent', 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3' ) response = urllib2.urlopen(req) link = response.read() response.close() return link
def main(output_dir): # Create opener. opener = urllib2.OpenerDirector() opener.add_handler(urllib2.ProxyHandler()) opener.add_handler(urllib2.UnknownHandler()) opener.add_handler(urllib2.HTTPHandler()) opener.add_handler(urllib2.HTTPDefaultErrorHandler()) opener.add_handler(urllib2.HTTPSHandler()) opener.add_handler(urllib2.HTTPErrorProcessor()) # Iterate over the files in the docs directory and copy them, as appropriate. for root, dirs, files in os.walk('.'): for file_name in files: if file_name.endswith('.soy') and not file_name.startswith('__'): # Strip the './' prefix, if appropriate. if root.startswith('./'): root = root[2:] # Construct the URL where the .soy file is being served. soy_file = file_name html_file = root + '/' + soy_file[:-len('.soy')] + '.html' url = 'http://localhost:9811/' + html_file # Fetch url and copy its contents to output_dir. req = urllib2.Request(url) res = opener.open(req) html = res.read() copy_to_output_dir(html_file, output_dir, html) elif file_name.endswith('.css') or file_name.endswith('.js'): # Copy the static resource to output_dir. relative_path = os.path.join(root, file_name) with open(relative_path) as resource_file: resource = resource_file.read() copy_to_output_dir(relative_path, output_dir, resource)
def __init__(self, debug=False): ''' ''' super(LiSession, self).__init__() self.cookiejar = cookielib.CookieJar() self.urlopener = urllib2.OpenerDirector() debug_level = 0 if debug: debug_level = 1 self.debug = debug #building the urllib2 session opener try: self.urlopener.add_handler( urllib2.HTTPHandler(debuglevel=debug_level)) self.urlopener.add_handler( urllib2.HTTPSHandler(debuglevel=debug_level)) self.urlopener.add_handler(urllib2.HTTPDefaultErrorHandler()) self.urlopener.add_handler(urllib2.HTTPErrorProcessor()) self.urlopener.add_handler(urllib2.HTTPRedirectHandler()) self.urlopener.add_handler( urllib2.HTTPCookieProcessor(self.cookiejar)) except Exception, ex: self.error = 'unexpected error while defining session uri opener' + ex.message raise Exception(ex.message)
def _is_cache_same(self, dest_file, dist_url): ''' Checks if the local cache version and the upstream version is the same or not. If they are the same, returns True; else False. ''' if not exists(dest_file): if self.DEBUG: stderr.write( "No file in cache, fetching {0}\n".format(dest_file)) return False opener = urllib.OpenerDirector() opener.add_handler(urllib.HTTPHandler()) opener.add_handler(urllib.HTTPSHandler()) opener.add_handler(urllib.HTTPDefaultErrorHandler()) # Extra for handling redirects opener.add_handler(urllib.HTTPErrorProcessor()) opener.add_handler(urllib.HTTPRedirectHandler()) # Add the header opener.addheaders = self.hdr2 # Grab the header try: res = opener.open(HeadRequest(dist_url)) headers = dict(res.info()) res.close() remote_ts = headers['last-modified'] except urllib.HTTPError as http_error: if self.DEBUG: stderr.write("Cannot send HTTP HEAD request to get \"last-modified\"" \ " attribute of remote content file.\n{0} - {1}\n" .format(http_error.code, http_error.reason)) return False except KeyError: if self.DEBUG: stderr.write("Response header of HTTP doesn't contain " \ "\"last-modified\" field. Cannot determine version" \ " of remote file \"{0}\"\n".format(dist_url)) return False # The remote's datetime remote_dt = datetime.datetime.strptime(remote_ts, self.remote_pattern) # Get the locals datetime from the file's mtime, converted to UTC local_dt = datetime.datetime.utcfromtimestamp( (stat(dest_file)).st_mtime) # Giving a two second comfort zone # Else we declare they are different if (remote_dt - local_dt).seconds > 2: if self.DEBUG: stderr.write("Had a local file {0} " \ "but it wasn't new enough\n".format(dest_file)) return False if self.DEBUG: stderr.write("File {0} is same as upstream\n".format(dest_file)) return True
def main(): server = 'www.googleapis.com' url = 'https://' + server + '/chromoting/v1/@me/hosts' settings_filepath = os.path.join(os.path.expanduser('~'), '.ChromotingConfig.json') print "Email:", email = raw_input() password = getpass.getpass("Password: "******"HostId:", host_id host_name = socket.gethostname() print "HostName:", host_name print "Generating RSA key pair...", (private_key, public_key) = keygen.generateRSAKeyPair() print "Done" while 1: pin = getpass.getpass("Host PIN: ") if len(pin) < 4: print "PIN must be at least 4 characters long." continue pin2 = getpass.getpass("Confirm host PIN: ") if pin2 != pin: print "PINs didn't match. Please try again." continue break host_secret_hash = "hmac:" + base64.b64encode( hmac.new(str(host_id), pin, hashlib.sha256).digest()) params = { "data": { "hostId": host_id, "hostName": host_name, "publicKey": public_key, } } headers = {"Authorization": "GoogleLogin auth=" + auth_token, "Content-Type": "application/json" } request = urllib2.Request(url, json.dumps(params), headers) opener = urllib2.OpenerDirector() opener.add_handler(urllib2.HTTPDefaultErrorHandler()) print print "Registering host with directory service..." try: res = urllib2.urlopen(request) data = res.read() except urllib2.HTTPError, err: print >> sys.stderr, "Directory returned error:", err print >> sys.stderr, err.fp.read() return 1
def get_opener(cookiejar=None): opener = urllib2.OpenerDirector() opener.add_handler(urllib2.ProxyHandler()) opener.add_handler(urllib2.UnknownHandler()) opener.add_handler(urllib2.HTTPHandler()) opener.add_handler(urllib2.HTTPDefaultErrorHandler()) opener.add_handler(urllib2.HTTPErrorProcessor()) opener.add_handler(urllib2.HTTPSHandler()) if cookiejar: opener.add_handler(urllib2.HTTPCookieProcessor(cookiejar)) return opener
def getheadersonly(url, redirections=True): opener = urllib2.OpenerDirector() opener.add_handler(urllib2.HTTPHandler()) opener.add_handler(urllib2.HTTPDefaultErrorHandler()) if redirections: # HTTPErrorProcessor makes HTTPRedirectHandler work opener.add_handler(urllib2.HTTPErrorProcessor()) opener.add_handler(urllib2.HTTPRedirectHandler()) try: res = opener.open(HeadRequest(url)) except urllib2.HTTPError, res: pass
def _GetOpener(self): # Authentication code needs to know about 302 response. # So make OpenerDirector without HTTPRedirectHandler. opener = urllib2.OpenerDirector() opener.add_handler(urllib2.ProxyHandler()) opener.add_handler(urllib2.UnknownHandler()) opener.add_handler(urllib2.HTTPHandler()) opener.add_handler(urllib2.HTTPDefaultErrorHandler()) opener.add_handler(urllib2.HTTPSHandler()) opener.add_handler(urllib2.HTTPErrorProcessor()) opener.add_handler(urllib2.HTTPCookieProcessor(cookie_jar)) return opener
def getResponse(self): """ Makes the request, validates and returns a response """ req = urllib2.Request(self.getURL(), data=urllib.urlencode(self.data), headers=self.REQUEST_HEADER) opener = urllib2.OpenerDirector() opener.add_handler(urllib2.HTTPHandler()) opener.add_handler(urllib2.HTTPDefaultErrorHandler()) f = opener.open(req) import json print json.load(f)
def __init__(self, proxy, verbose=0): self.proxy = proxy self.verbose = verbose self.opener = opener = urllib2.OpenerDirector() if proxy: opener.add_handler(urllib2.ProxyHandler({'http':self.proxy})) else: opener.add_handler(urllib2.ProxyHandler()) opener.add_handler(urllib2.UnknownHandler()) opener.add_handler(urllib2.HTTPHandler()) opener.add_handler(urllib2.HTTPDefaultErrorHandler()) opener.add_handler(urllib2.HTTPSHandler()) opener.add_handler(urllib2.HTTPErrorProcessor())
def __init__(self, username, password, host, debug=0): logger.debug('Creating new connection with username=%s host=%s', username, host) self._host = host base64string = base64.encodestring( '%s:%s' % (username, password)).replace('\n', '') self._headers['Authorization'] = 'Basic %s' % base64string self._opener = urllib2.build_opener( urllib2.HTTPHandler(debuglevel=debug), urllib2.HTTPSHandler(debuglevel=debug), urllib2.HTTPCookieProcessor(cookielib.CookieJar()), LoggingHandler(), urllib2.HTTPDefaultErrorHandler())
def _GetOpener(self): """Returns an OpenerDirector that supports cookies and ignores redirects. Returns: A urllib2.OpenerDirector object. """ opener = urllib2.OpenerDirector() opener.add_handler(fancy_urllib.FancyProxyHandler()) opener.add_handler(urllib2.UnknownHandler()) opener.add_handler(urllib2.HTTPHandler()) opener.add_handler(urllib2.HTTPDefaultErrorHandler()) opener.add_handler(urllib2.HTTPSHandler()) opener.add_handler(urllib2.HTTPErrorProcessor()) opener.add_handler(ContentEncodingHandler()) auth_domain = '' if 'AUTH_DOMAIN' in os.environ: auth_domain = os.environ['AUTH_DOMAIN'].lower() if self.save_cookies: if auth_domain == 'appscale': cookies_dir = os.path.expanduser( HttpRpcServer.APPSCALE_COOKIE_DIR) if not os.path.exists(cookies_dir): os.mkdir(cookies_dir) else: self.cookie_jar.filename = os.path.expanduser( HttpRpcServer.DEFAULT_COOKIE_FILE_PATH) if os.path.exists(self.cookie_jar.filename): try: self.cookie_jar.load() self.authenticated = True logger.debug("Loaded authentication cookies from %s", self.cookie_jar.filename) except (OSError, IOError, cookielib.LoadError), e: logger.debug( "Could not load authentication cookies; %s: %s", e.__class__.__name__, e) self.cookie_jar.filename = None else: try: fd = os.open(self.cookie_jar.filename, os.O_CREAT, 0600) os.close(fd) except (OSError, IOError), e: logger.debug("Could not create authentication cookies file " + \ "; %s: %s" % (e.__class__.__name__, e)) self.cookie_jar.filename = None
def test_cookie_redirect(self): # cookies shouldn't leak into redirected requests from cookielib import CookieJar from test.test_cookielib import interact_netscape cj = CookieJar() interact_netscape(cj, "http://www.example.com/", "spam=eggs") hh = MockHTTPHandler(302, "Location: http://www.cracker.com/\r\n\r\n") hdeh = urllib2.HTTPDefaultErrorHandler() hrh = urllib2.HTTPRedirectHandler() cp = urllib2.HTTPCookieProcessor(cj) o = build_test_opener(hh, hdeh, hrh, cp) o.open("http://www.example.com/") self.assert_(not hh.req.has_header("Cookie"))
def __init__(self, *args, **kargs): urllib2.OpenerDirector.__init__(self, *args, **kargs) #agregando soporte basico self.add_handler(urllib2.ProxyHandler()) self.add_handler(urllib2.UnknownHandler()) self.add_handler(urllib2.HTTPHandler()) self.add_handler(urllib2.HTTPDefaultErrorHandler()) self.add_handler(urllib2.HTTPRedirectHandler()) self.add_handler(urllib2.FTPHandler()) self.add_handler(urllib2.FileHandler()) self.add_handler(urllib2.HTTPErrorProcessor()) #Agregar soporte para cookies. (en este momento no es necesario, #pero uno nunca sabe si se puede llegar a nececitar) self.cj = cookielib.CookieJar() self.add_handler(urllib2.HTTPCookieProcessor(self.cj))
def _GetOpener(self): """Returns an OpenerDirector that supports cookies and ignores redirects. Returns: A urllib2.OpenerDirector object. """ opener = urllib2.OpenerDirector() opener.add_handler(urllib2.ProxyHandler()) opener.add_handler(urllib2.UnknownHandler()) opener.add_handler(urllib2.HTTPHandler()) opener.add_handler(urllib2.HTTPDefaultErrorHandler()) opener.add_handler(urllib2.HTTPSHandler()) opener.add_handler(urllib2.HTTPErrorProcessor()) opener.add_handler(urllib2.HTTPCookieProcessor(self.cookie_jar)) return opener
def _GetHTTPOpener(): """Create an http opener used to interact with Google's ClientLogin. Returns: An http opener capable of handling anything needed to interact with Google's ClientLogin. """ # Create an http opener capable of handling proxies, http and https. opener = urllib2.OpenerDirector() opener.add_handler(urllib2.ProxyHandler()) opener.add_handler(urllib2.UnknownHandler()) opener.add_handler(urllib2.HTTPHandler()) opener.add_handler(urllib2.HTTPDefaultErrorHandler()) opener.add_handler(urllib2.HTTPErrorProcessor()) opener.add_handler(urllib2.HTTPSHandler()) return opener
def _build_opener(self): # 创建一个opener用于配置请求 opener = urllib2.OpenerDirector() # disable 本地代理 opener.add_handler(urllib2.ProxyHandler()) # 对URL不符合要求抛出URLError opener.add_handler(urllib2.UnknownHandler()) # 发送http请求 opener.add_handler(urllib2.HTTPHandler()) # 建错误reponse抛出为HttpEerror opener.add_handler(urllib2.HTTPDefaultErrorHandler()) # 发送https请求 opener.add_handler(urllib2.HTTPSHandler()) # 对于 http response status code 不属于[200,300) # 转换为错误response opener.add_handler(urllib2.HTTPErrorProcessor()) self.opener = opener
def _GetOpener(self): """Returns an OpenerDirector that supports cookies and ignores redirects. Returns: A urllib2.OpenerDirector object. """ opener = urllib2.OpenerDirector() opener.add_handler(fancy_urllib.FancyProxyHandler()) opener.add_handler(urllib2.UnknownHandler()) opener.add_handler(urllib2.HTTPHandler()) opener.add_handler(urllib2.HTTPDefaultErrorHandler()) opener.add_handler(fancy_urllib.FancyHTTPSHandler()) opener.add_handler(urllib2.HTTPErrorProcessor()) opener.add_handler(ContentEncodingHandler()) if self.save_cookies: self.cookie_jar.filename = os.path.expanduser( HttpRpcServer.DEFAULT_COOKIE_FILE_PATH) if os.path.exists(self.cookie_jar.filename): try: self.cookie_jar.load() self.authenticated = True logger.debug("Loaded authentication cookies from %s", self.cookie_jar.filename) except (OSError, IOError, cookielib.LoadError), e: # Failed to load cookies. The target file path is bad. logger.debug( "Could not load authentication cookies; %s: %s", e.__class__.__name__, e) self.cookie_jar.filename = None else: # Create an empty cookie file. This must be created with the file # permissions set upfront in order to be secure. try: fd = os.open(self.cookie_jar.filename, os.O_CREAT, 0600) os.close(fd) except (OSError, IOError), e: # Failed to create cookie file. Don't try to save cookies. logger.debug( "Could not create authentication cookies file; %s: %s", e.__class__.__name__, e) self.cookie_jar.filename = None
def register(self, auth): """Generates a private key for the stored |host_id|, and registers it with the Directory service. Args: auth: Authentication object with credentials for authenticating with the Directory service. Raises: urllib2.HTTPError: An error occurred talking to the Directory server (for example, if the |auth| credentials were rejected). """ logging.info("HostId: " + self.host_id) logging.info("HostName: " + self.host_name) logging.info("Generating RSA key pair...") (self.private_key, public_key) = keygen.generateRSAKeyPair() logging.info("Done") json_data = { "data": { "hostId": self.host_id, "hostName": self.host_name, "publicKey": public_key, } } params = json.dumps(json_data) headers = { "Authorization": "GoogleLogin auth=" + auth.chromoting_auth_token, "Content-Type": "application/json", } request = urllib2.Request(self.url, params, headers) opener = urllib2.OpenerDirector() opener.add_handler(urllib2.HTTPDefaultErrorHandler()) logging.info("Registering host with directory service...") res = urllib2.urlopen(request) data = res.read() logging.info("Done")
def _GetOpener(self): """Returns an OpenerDirector that supports cookies and ignores redirects. Returns: A urllib2.OpenerDirector object. """ opener = urllib2.OpenerDirector() opener.add_handler(fancy_urllib.FancyProxyHandler()) opener.add_handler(urllib2.UnknownHandler()) opener.add_handler(urllib2.HTTPHandler()) opener.add_handler(urllib2.HTTPDefaultErrorHandler()) opener.add_handler(fancy_urllib.FancyHTTPSHandler()) opener.add_handler(urllib2.HTTPErrorProcessor()) if self.save_cookies: self.cookie_jar.filename = os.path.expanduser( HttpRpcServer.DEFAULT_COOKIE_FILE_PATH) if os.path.exists(self.cookie_jar.filename): try: self.cookie_jar.load() self.authenticated = True logger.info("Loaded authentication cookies from %s", self.cookie_jar.filename) except (OSError, IOError, cookielib.LoadError), e: logger.debug( "Could not load authentication cookies; %s: %s", e.__class__.__name__, e) self.cookie_jar.filename = None else: try: fd = os.open(self.cookie_jar.filename, os.O_CREAT, 0600) os.close(fd) except (OSError, IOError), e: logger.debug( "Could not create authentication cookies file; %s: %s", e.__class__.__name__, e) self.cookie_jar.filename = None
def init_HTTP(self): # add default auth for monitor.old GenericServer.init_HTTP(self) # self.Cookie is a CookieJar which is a list of cookies - if 0 then emtpy if len(self.Cookie) == 0: try: # Ninja Settings # get a Ninja cookie via own method self.urlopener.add_handler(urllib2.HTTPDefaultErrorHandler()) self.urlopener.open( self.login_url, urllib.urlencode({ 'username': self.get_username(), 'password': self.get_password(), 'csrf_token': self.csrf() })) if str(self.conf.debug_mode) == "True": self.Debug(server=self.get_name(), debug="Cookie:" + str(self.Cookie)) except: self.Error(sys.exc_info())
def _GetOpener(self): """Returns an OpenerDirector that supports cookies and ignores redirects. Returns: A urllib2.OpenerDirector object. """ opener = urllib2.OpenerDirector() opener.add_handler(urllib2.ProxyHandler()) opener.add_handler(urllib2.UnknownHandler()) opener.add_handler(urllib2.HTTPHandler()) opener.add_handler(urllib2.HTTPDefaultErrorHandler()) opener.add_handler(urllib2.HTTPSHandler()) opener.add_handler(urllib2.HTTPErrorProcessor()) if self.save_cookies: self.cookie_file = os.path.expanduser("~/.codereview_upload_cookies") self.cookie_jar = cookielib.MozillaCookieJar(self.cookie_file) if os.path.exists(self.cookie_file): try: self.cookie_jar.load() self.authenticated = True StatusUpdate("Loaded authentication cookies from %s" % self.cookie_file) except (cookielib.LoadError, IOError): # Failed to load cookies - just ignore them. pass else: # Create an empty cookie file with mode 600 fd = os.open(self.cookie_file, os.O_CREAT, 0600) os.close(fd) # Always chmod the cookie file os.chmod(self.cookie_file, 0600) else: # Don't save cookies across runs of update.py. self.cookie_jar = cookielib.CookieJar() opener.add_handler(urllib2.HTTPCookieProcessor(self.cookie_jar)) return opener
req = urllib2.Request(url, None, headers) req.get_method = lambda: 'HEAD' #Create an opener that does not support local file access opener = urllib2.OpenerDirector() #Don't follow redirects, but don't treat them as errors either error_nop = lambda *args, **kwargs: True http_error_processor = urllib2.HTTPErrorProcessor() http_error_processor.http_error_301 = error_nop http_error_processor.http_error_302 = error_nop http_error_processor.http_error_307 = error_nop handlers = [ urllib2.UnknownHandler(), urllib2.HTTPHandler(), urllib2.HTTPDefaultErrorHandler(), urllib2.FTPHandler(), http_error_processor ] try: import ssl handlers.append(urllib2.HTTPSHandler()) except: #Python isn't compiled with SSL support pass map(opener.add_handler, handlers) if platform.python_version_tuple() >= (2, 6): opener.open(req, timeout=10) else: opener.open(req) except ValueError: raise ValidationError(_(u'Enter a valid URL.'), code='invalid')
def __call__(self, value): try: super(RelativeURLValidator, self).__call__(value) except ValidationError as e: # Trivial case failed. Try for possible IDN domain if value: value = smart_text(value) scheme, netloc, path, query, fragment = urlparse.urlsplit( value) try: netloc = netloc.encode('idna') # IDN -> ACE except UnicodeError: # invalid domain part raise e url = urlparse.urlunsplit( (scheme, netloc, path, query, fragment)) super(RelativeURLValidator, self).__call__(url) else: raise else: url = value if self.verify_exists: broken_error = ValidationError( _(u'This URL appears to be a broken link.'), code='invalid_link') if url.startswith('http://') or url.startswith('ftp://'): headers = { "Accept": "text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5", "Accept-Language": "en-us,en;q=0.5", "Accept-Charset": "ISO-8859-1,utf-8;q=0.7,*;q=0.7", "Connection": "close", "User-Agent": self.user_agent, } url = url.encode('utf-8') try: req = urllib2.Request(url, None, headers) req.get_method = lambda: 'HEAD' #Create an opener that does not support local file access opener = urllib2.OpenerDirector() #Don't follow redirects, but don't treat them as errors either error_nop = lambda *args, **kwargs: True http_error_processor = urllib2.HTTPErrorProcessor() http_error_processor.http_error_301 = error_nop http_error_processor.http_error_302 = error_nop http_error_processor.http_error_307 = error_nop handlers = [ urllib2.UnknownHandler(), urllib2.HTTPHandler(), urllib2.HTTPDefaultErrorHandler(), urllib2.FTPHandler(), http_error_processor ] try: import ssl handlers.append(urllib2.HTTPSHandler()) except: #Python isn't compiled with SSL support pass map(opener.add_handler, handlers) if platform.python_version_tuple() >= (2, 6): opener.open(req, timeout=10) else: opener.open(req) except ValueError: raise ValidationError(_(u'Enter a valid URL.'), code='invalid') except: # urllib2.URLError, httplib.InvalidURL, etc. raise broken_error else: # Resolve the relative URL try: resolve(url) except Http404: raise broken_error
def _is_cache_same(self, local_file, remote_url): """Checks if the local cache version and the upstream version is the same or not. If they are the same, returns True; else False. """ with self.fetch_lock: if not os.path.exists(local_file): logging.debug( "No local file cached, will fetch {0}".format(remote_url)) return False last_checked = self.fetch_last_checked.get(remote_url, 0) now = time.time() if now - last_checked <= self.fetch_timeout: logging.debug( "Checked for fresh version of '%s' just %f seconds ago. " "Will wait %f seconds before checking again.", remote_url, now - last_checked, self.fetch_timeout - now + last_checked) return True opener = urllib.OpenerDirector() opener.add_handler(urllib.HTTPHandler()) opener.add_handler(urllib.HTTPSHandler()) opener.add_handler(urllib.HTTPDefaultErrorHandler()) # Extra for handling redirects opener.add_handler(urllib.HTTPErrorProcessor()) opener.add_handler(urllib.HTTPRedirectHandler()) # Add the header opener.addheaders = self.hdr2 # Grab the header try: res = opener.open(CVEFeedManager.HeadRequest(remote_url)) headers = self._parse_http_headers(res.info()) res.close() remote_ts = headers['last-modified'] except urllib.HTTPError as http_error: logging.debug( "Cannot send HTTP HEAD request to get \"last-modified\" " "attribute of remote content file.\n{0} - {1}".format( http_error.code, http_error.reason)) return False except KeyError: self._print_no_last_modified_warning(remote_url) return False self.fetch_last_checked[remote_url] = time.time() # The remote's datetime remote_dt = datetime.datetime.strptime(remote_ts, self.remote_pattern) # Get the locals datetime from the file's mtime, converted to UTC local_dt = datetime.datetime.utcfromtimestamp( os.stat(local_file).st_mtime) # Giving a two second comfort zone # Else we declare they are different if (remote_dt - local_dt).seconds > 2: logging.info("Had a local version of {0} " "but it wasn't new enough".format(local_file)) return False logging.debug("File {0} is same as upstream".format(local_file)) return True
def GetAdminOpener(host, user=None, pwd=None, otp_entry=None, cookiejar_path=None): """Returns an OpenerDirector for retrieving administrative URLs. Uses stored admin cookies if available, or prompts for authentication credentials and authenticates with server otherwise. Based on reitveld codereview script. """ opener = urllib2.OpenerDirector() opener.add_handler(urllib2.HTTPDefaultErrorHandler()) opener.add_handler(urllib2.HTTPSHandler()) opener.add_handler(urllib2.HTTPErrorProcessor()) # TODO(spencer): remove the HTTP handler when we move to AsyncHTTPSTestCase. # This is only for testing currently. opener.add_handler(urllib2.HTTPHandler()) if cookiejar_path is None: cookiejar_path = expanduser('~/.viewfinder_admin_cookie') cookie_jar = cookielib.MozillaCookieJar(cookiejar_path) if os.path.exists(cookiejar_path): try: cookie_jar.load() logging.info('loaded admin authentication cookies from %s' % cookiejar_path) except: # Otherwise, bad cookies; clear them. os.unlink(cookiejar_path) if not os.path.exists(cookiejar_path): # Create empty file with correct permissions. fd = os.open(cookiejar_path, os.O_CREAT, 0600) os.close(fd) # Always chmod to be sure. os.chmod(cookiejar_path, 0600) opener.add_handler(urllib2.HTTPCookieProcessor(cookie_jar)) class TornadoXSRFProcessor(urllib2.BaseHandler): """Add tornado's xsrf headers to outgoing requests.""" handler_order = urllib2.HTTPCookieProcessor.handler_order + 1 def http_request(self, request): cookie_header = request.get_header('Cookie') if cookie_header is not None and '_xsrf=' in cookie_header: # We have an xsrf cookie in the cookie jar. Copy it into the X-Xsrftoken header. request.add_unredirected_header( 'X-Xsrftoken', re.match('_xsrf=([^;]+)', cookie_header).group(1)) else: # No xsrf cookie, so just make one up. (this is currently the expected case because cookielib # considers our xsrf cookie to be a "session" cookie and doesn't save it) request.add_unredirected_header('X-Xsrftoken', 'fake_xsrf') if cookie_header: request.add_unredirected_header( 'Cookie', '_xsrf="fake_xsrf"; ' + cookie_header) else: request.add_unredirected_header('Cookie', '_xsrf="fake_xsrf"') return request https_request = http_request opener.add_handler(TornadoXSRFProcessor()) # Look for admin cookie. If it doesn't exist (or is expired), prompt # and reauthenticate. if len(cookie_jar) == 0 or \ any([c.is_expired() for c in cookie_jar if c.domain == host]): if user is None or pwd is None or otp_entry is None: user, pwd, otp_entry = _PromptForAdminCookie(user, pwd, otp_entry) from viewfinder.backend.www.admin import admin_api admin_api.Authenticate(opener, host, user, pwd, otp_entry) cookie_jar.save() logging.info('saved admin authentication cookies to %s' % cookiejar_path) return opener
print "Done" params = ('{"data":{' + \ '"hostId": "%(hostId)s",' + \ '"hostName": "%(hostName)s",' + \ '"publicKey": "%(publicKey)s"}}') % \ {'hostId': host_id, 'hostName': host_name, 'publicKey': public_key} headers = { "Authorization": "GoogleLogin auth=" + auth_token, "Content-Type": "application/json" } request = urllib2.Request(url, params, headers) opener = urllib2.OpenerDirector() opener.add_handler(urllib2.HTTPDefaultErrorHandler()) print print "Registering host with directory service..." try: res = urllib2.urlopen(request) data = res.read() except urllib2.HTTPError, err: print >> sys.stderr, "Directory returned error:", err print >> sys.stderr, err.fp.read() sys.exit(1) print "Done" # Get token that the host will use to athenticate in talk network. authenticator = gaia_auth.GaiaAuthenticator('chromiumsync')