def lookup_shader_crc(crc, index): shaders = index['shaders'] if crc not in shaders: return [] shader = shaders[crc] posts = index['posts'] result = [] for i, sha in enumerate(shader, 1): result_distinct = {'posts': [], 'shader': None} for post_url in shader[sha]: post = posts[post_url] downloads = [] for (url, zip_path) in shader[sha][post_url]: downloads.append({ 'url': url, 'path': zip_path, }) if result_distinct['shader'] is None: filename = shaderutil.url_to_download_path( url, download_dir) result_distinct['shader'] = extract_shader( filename, zip_path) result_distinct['posts'].append({ 'title': post['title'], 'author': post['author'], 'url': post_url, 'downloads': downloads, }) if result_distinct['shader'] is not None: result.append(result_distinct) return result
def lookup_shader_crc(crc, index): shaders = index["shaders"] if crc not in shaders: return [] shader = shaders[crc] posts = index["posts"] result = [] for i, sha in enumerate(shader, 1): result_distinct = {"posts": [], "shader": None} for post_url in shader[sha]: post = posts[post_url] downloads = [] for (url, zip_path) in shader[sha][post_url]: downloads.append({"url": url, "path": zip_path}) if result_distinct["shader"] is None: filename = shaderutil.url_to_download_path(url, download_dir) result_distinct["shader"] = extract_shader(filename, zip_path) result_distinct["posts"].append( {"title": post["title"], "author": post["author"], "url": post_url, "downloads": downloads} ) if result_distinct["shader"] is not None: result.append(result_distinct) return result
def download_file(url): try: os.mkdir(download_dir) except OSError: pass dest = shaderutil.url_to_download_path(url, download_dir) if os.path.exists(dest): # We check ctime here, since atime can be updated by simply reading the # file, and we want mtime to match the upstream file. ctime could also # be modified externally (chmod, chown, etc), but that's not really an # issue since it won't matter if we miss one update here or there. st = os.stat(dest) if time.time() - st.st_ctime < poll_updates: return dest last_modified = get_url_last_modified(url) if int(st.st_mtime) == last_modified: # FIXME: Also check file size matches Content-Length # print('Skipping %s - up to date' % url) # We want to update ctime to indicate that we have checked this # file for updates without modifying it. One way we can achieve # this is to touch it's permissions (setting them to their current # value is sufficient). # NOTE that this won't work on Windows, where ctime is the creation # time rather than the inode modified time. os.chmod(dest, os.stat(dest).st_mode) return dest rename_to = '%s~%s' % (time.strftime( "%Y%m%d%H%M%S", time.gmtime(st.st_mtime)), os.path.basename(dest)) rename_to = os.path.join(os.path.dirname(dest), rename_to) print('%s updated' % url) os.rename(dest, rename_to) print('old file backed up as %s' % rename_to) recursive_mkdir(os.path.dirname(dest)) with open(dest, 'wb') as f: try: print('Downloading %s...' % url, end='') sys.stdout.flush() with urllib.request.urlopen(url) as download: last_modified = http_date.parse_http_date( download.getheader('Last-Modified')) while True: buf = download.read(64 * 1024) if not buf: break f.write(buf) print('.', end='') sys.stdout.flush() print('Done.') except: try: os.remove(dest) except: pass else: print('\nRemoved partially downloaded %s' % dest) raise os.utime(dest, (time.time(), last_modified)) return dest
def download_file(url): try: os.mkdir(download_dir) except OSError: pass dest = shaderutil.url_to_download_path(url, download_dir) if os.path.exists(dest): # We check ctime here, since atime can be updated by simply reading the # file, and we want mtime to match the upstream file. ctime could also # be modified externally (chmod, chown, etc), but that's not really an # issue since it won't matter if we miss one update here or there. st = os.stat(dest) if time.time() - st.st_ctime < poll_updates: return dest # Dropbox URLs don't return a Last-Modified header last_modified = get_url_last_modified(url) if int(st.st_mtime) == last_modified or last_modified is None: # FIXME: Also check file size matches Content-Length # print('Skipping %s - up to date' % url) # We want to update ctime to indicate that we have checked this # file for updates without modifying it. One way we can achieve # this is to touch it's permissions (setting them to their current # value is sufficient). # NOTE that this won't work on Windows, where ctime is the creation # time rather than the inode modified time. os.chmod(dest, os.stat(dest).st_mode) return dest rename_to = '%s~%s' % (time.strftime("%Y%m%d%H%M%S", time.gmtime(st.st_mtime)), os.path.basename(dest)) rename_to = os.path.join(os.path.dirname(dest), rename_to) print('%s updated' % url) os.rename(dest, rename_to) print('old file backed up as %s' % rename_to) recursive_mkdir(os.path.dirname(dest)) with open(dest, 'wb') as f: try: print('Downloading %s...' % url, end='') sys.stdout.flush() with urllib.request.urlopen(url) as download: last_modified = download.getheader('Last-Modified') # Dropbox URLs don't return a Last-Modified header if last_modified is not None: last_modified = http_date.parse_http_date(last_modified) while True: buf = download.read(64*1024) if not buf: break f.write(buf) print('.', end='') sys.stdout.flush() print('Done.') except: try: os.remove(dest) except: pass else: print('\nRemoved partially downloaded %s' % dest) raise if last_modified is not None: os.utime(dest, (time.time(), last_modified)) return dest