def get_url_last_modified(url): req = urllib.request.Request(url, method='HEAD') with urllib.request.urlopen(req) as f: header = f.getheader('Last-Modified') # Dropbox URLs don't return a Last-Modified header if header is not None: return http_date.parse_http_date(header)
def handle_request(self, request): path = request._path filename = os.path.join(self.doc_root, path[1:]) if request._method not in ('get', 'head'): request.error(405) return if os.path.isdir(filename): filename = os.path.join(filename, 'index.html') if not os.path.isfile(filename): request.error(404) else: stat_info = os.stat(filename) mtime = stat_info[stat.ST_MTIME] file_length = stat_info[stat.ST_SIZE] ims = request.get_request_header('if-modified-since') if ims: length_match = 1 m = self.crack_if_modified_since.match(ims) if m: length = m.group(3) if length: if int(length) != file_length: length_match = 0 ims_date = http_date.parse_http_date(m.group(1)) if length_match and ims_date: if mtime <= ims_date: request.error(304, with_body=0) return base, ext = os.path.splitext(filename) ext = ext[1:].lower() request['Content-Type'] = mime_type_table.content_type_map.get( ext, 'text/plain') request['Last-Modified'] = http_date.build_http_date(mtime) if request._method == 'get': f = open(filename, 'rb') block = f.read(32768) if not block: request.error(204) # no content else: while 1: request.push(block) block = f.read(32768) if not block: break elif request._method == 'head': pass else: # should be impossible request.error(405)
def handle_request (self, request): path = request._path filename = os.path.join (self.doc_root, path[1:]) if request._method not in ('get', 'head'): request.error (405) return if os.path.isdir (filename): filename = os.path.join (filename, 'index.html') if not os.path.isfile (filename): request.error (404) else: stat_info = os.stat (filename) mtime = stat_info[stat.ST_MTIME] file_length = stat_info[stat.ST_SIZE] ims = request.get_request_header ('if-modified-since') if ims: length_match = 1 m = self.crack_if_modified_since.match (ims) if m: length = m.group (3) if length: if int(length) != file_length: length_match = 0 ims_date = http_date.parse_http_date (m.group(1)) if length_match and ims_date: if mtime <= ims_date: request.error (304, with_body=0) return base, ext = os.path.splitext (filename) ext = ext[1:].lower() request['Content-Type'] = mime_type_table.content_type_map.get (ext, 'text/plain') request['Last-Modified'] = http_date.build_http_date (mtime) if request._method == 'get': f = open (filename, 'rb') block = f.read (32768) if not block: request.error (204) # no content else: while 1: request.push (block) block = f.read (32768) if not block: break elif request._method == 'head': pass else: # should be impossible request.error (405)
def handle_request(self, request): if request.command not in self.valid_commands: request.error(400) # bad request return self.hit_counter.increment() path, params, query, fragment = request.split_uri() if '%' in path: path = unquote(path) # strip off all leading slashes while path and path[0] == '/': path = path[1:] if self.filesystem.isdir(path): if path and path[-1] != '/': request['Location'] = 'http://%s/%s/' % ( request.channel.server.server_name, path) request.error(301) return # we could also generate a directory listing here, # may want to move this into another method for that # purpose found = 0 if path and path[-1] != '/': path = path + '/' for default in self.directory_defaults: p = path + default if self.filesystem.isfile(p): path = p found = 1 break if not found: request.error(404) # Not Found return elif not self.filesystem.isfile(path): request.error(404) # Not Found return file_length = self.filesystem.stat(path)[stat.ST_SIZE] ims = get_header_match(IF_MODIFIED_SINCE, request.header) length_match = 1 if ims: length = ims.group(4) if length: try: length = string.atoi(length) if length != file_length: length_match = 0 except: pass ims_date = 0 if ims: ims_date = http_date.parse_http_date(ims.group(1)) try: mtime = self.filesystem.stat(path)[stat.ST_MTIME] except: request.error(404) return if length_match and ims_date: if mtime <= ims_date: request.reply_code = 304 request.done() self.cache_counter.increment() return try: file = self.filesystem.open(path, 'rb') except IOError: request.error(404) return request['Last-Modified'] = http_date.build_http_date(mtime) request['Content-Length'] = file_length self.set_content_type(path, request) if request.command == 'get': request.push(self.default_file_producer(file)) self.file_counter.increment() request.done()
def handle_request (self, request): if request.command not in self.valid_commands: request.error (400) # bad request return self.hit_counter.increment() path, params, query, fragment = request.split_uri() if '%' in path: path = unquote (path) # strip off all leading slashes while path and path[0] == '/': path = path[1:] if self.filesystem.isdir (path): if path and path[-1] != '/': request['Location'] = 'http://%s/%s/' % ( request.channel.server.server_name, path ) request.error (301) return # we could also generate a directory listing here, # may want to move this into another method for that # purpose found = 0 if path and path[-1] != '/': path = path + '/' for default in self.directory_defaults: p = path + default if self.filesystem.isfile (p): path = p found = 1 break if not found: request.error (404) # Not Found return elif not self.filesystem.isfile (path): request.error (404) # Not Found return file_length = self.filesystem.stat (path)[stat.ST_SIZE] ims = get_header_match (IF_MODIFIED_SINCE, request.header) length_match = 1 if ims: length = ims.group (4) if length: try: length = string.atoi (length) if length != file_length: length_match = 0 except: pass ims_date = 0 if ims: ims_date = http_date.parse_http_date (ims.group (1)) try: mtime = self.filesystem.stat (path)[stat.ST_MTIME] except: request.error (404) return if length_match and ims_date: if mtime <= ims_date: request.reply_code = 304 request.done() self.cache_counter.increment() return try: file = self.filesystem.open (path, 'rb') except IOError: request.error (404) return request['Last-Modified'] = http_date.build_http_date (mtime) request['Content-Length'] = file_length self.set_content_type (path, request) if request.command == 'get': request.push (self.default_file_producer (file)) self.file_counter.increment() request.done()
def download_file(url): try: os.mkdir(download_dir) except OSError: pass dest = shaderutil.url_to_download_path(url, download_dir) if os.path.exists(dest): # We check ctime here, since atime can be updated by simply reading the # file, and we want mtime to match the upstream file. ctime could also # be modified externally (chmod, chown, etc), but that's not really an # issue since it won't matter if we miss one update here or there. st = os.stat(dest) if time.time() - st.st_ctime < poll_updates: return dest last_modified = get_url_last_modified(url) if int(st.st_mtime) == last_modified: # FIXME: Also check file size matches Content-Length # print('Skipping %s - up to date' % url) # We want to update ctime to indicate that we have checked this # file for updates without modifying it. One way we can achieve # this is to touch it's permissions (setting them to their current # value is sufficient). # NOTE that this won't work on Windows, where ctime is the creation # time rather than the inode modified time. os.chmod(dest, os.stat(dest).st_mode) return dest rename_to = '%s~%s' % (time.strftime( "%Y%m%d%H%M%S", time.gmtime(st.st_mtime)), os.path.basename(dest)) rename_to = os.path.join(os.path.dirname(dest), rename_to) print('%s updated' % url) os.rename(dest, rename_to) print('old file backed up as %s' % rename_to) recursive_mkdir(os.path.dirname(dest)) with open(dest, 'wb') as f: try: print('Downloading %s...' % url, end='') sys.stdout.flush() with urllib.request.urlopen(url) as download: last_modified = http_date.parse_http_date( download.getheader('Last-Modified')) while True: buf = download.read(64 * 1024) if not buf: break f.write(buf) print('.', end='') sys.stdout.flush() print('Done.') except: try: os.remove(dest) except: pass else: print('\nRemoved partially downloaded %s' % dest) raise os.utime(dest, (time.time(), last_modified)) return dest
def get_url_last_modified(url): req = urllib.request.Request(url, method='HEAD') with urllib.request.urlopen(req) as f: return http_date.parse_http_date(f.getheader('Last-Modified'))
def download_file(url): try: os.mkdir(download_dir) except OSError: pass dest = shaderutil.url_to_download_path(url, download_dir) if os.path.exists(dest): # We check ctime here, since atime can be updated by simply reading the # file, and we want mtime to match the upstream file. ctime could also # be modified externally (chmod, chown, etc), but that's not really an # issue since it won't matter if we miss one update here or there. st = os.stat(dest) if time.time() - st.st_ctime < poll_updates: return dest # Dropbox URLs don't return a Last-Modified header last_modified = get_url_last_modified(url) if int(st.st_mtime) == last_modified or last_modified is None: # FIXME: Also check file size matches Content-Length # print('Skipping %s - up to date' % url) # We want to update ctime to indicate that we have checked this # file for updates without modifying it. One way we can achieve # this is to touch it's permissions (setting them to their current # value is sufficient). # NOTE that this won't work on Windows, where ctime is the creation # time rather than the inode modified time. os.chmod(dest, os.stat(dest).st_mode) return dest rename_to = '%s~%s' % (time.strftime("%Y%m%d%H%M%S", time.gmtime(st.st_mtime)), os.path.basename(dest)) rename_to = os.path.join(os.path.dirname(dest), rename_to) print('%s updated' % url) os.rename(dest, rename_to) print('old file backed up as %s' % rename_to) recursive_mkdir(os.path.dirname(dest)) with open(dest, 'wb') as f: try: print('Downloading %s...' % url, end='') sys.stdout.flush() with urllib.request.urlopen(url) as download: last_modified = download.getheader('Last-Modified') # Dropbox URLs don't return a Last-Modified header if last_modified is not None: last_modified = http_date.parse_http_date(last_modified) while True: buf = download.read(64*1024) if not buf: break f.write(buf) print('.', end='') sys.stdout.flush() print('Done.') except: try: os.remove(dest) except: pass else: print('\nRemoved partially downloaded %s' % dest) raise if last_modified is not None: os.utime(dest, (time.time(), last_modified)) return dest