def upload_files(bucket, local_files, verbose): if verbose: print() def upload_file(local_file): key = bucket.new_key(local_file.name) filename = local_file.path headers = {} if local_file.content_type: headers['Content-Type'] = local_file.content_type if local_file.content_encoding: headers['Content-Encoding'] = local_file.content_encoding if local_file.md5: hex_md5 = local_file.md5 b64_md5 = hex_md5.decode('hex').encode('base64').strip() md5 = (hex_md5, b64_md5) else: md5 = None policy = local_file.policy if verbose: parallel.t_print("Uploading %r %r" % (local_file.name, headers)) key.set_contents_from_filename(filename,headers,md5=md5,policy=policy) if not verbose: parallel.t_write(".") return None parallel.process(upload_file, local_files) print()
def upload_files(bucket, local_files, verbose): if verbose: print() def upload_file(local_file): key = bucket.new_key(local_file.name) filename = local_file.path headers = {} if local_file.content_type: headers['Content-Type'] = local_file.content_type if local_file.content_encoding: headers['Content-Encoding'] = local_file.content_encoding if local_file.md5: hex_md5 = local_file.md5 b64_md5 = hex_md5.decode('hex').encode('base64').strip() md5 = (hex_md5, b64_md5) else: md5 = None policy = local_file.policy if verbose: parallel.t_print("Uploading %r %r" % (local_file.name, headers)) key.set_contents_from_filename(filename, headers, md5=md5, policy=policy) if not verbose: parallel.t_write(".") return None parallel.process(upload_file, local_files) print()
def list_remote_files(bucket, config): sys.stdout.write("Listing bucket %s" % bucket.name) if config.quick: print() remote_files = {} for key in bucket.list(): remote_file = RemoteFileQuick(key) if config.verbose: print(remote_file) remote_files[key.name] = remote_file return remote_files else: if config.verbose: print() key_names = [key.name for key in bucket.list()] def get_remote_file(key_name): key = bucket.get_key(key_name) remote_file = RemoteFile(key) if config.verbose: parallel.t_print(remote_file) else: parallel.t_write(".") return remote_file remote_files_list = parallel.process(get_remote_file, key_names) print() return dict([(f.name,f) for f in remote_files_list])
def list_remote_files(bucket, config): sys.stdout.write("Listing bucket %s" % bucket.name) if config.quick: print() remote_files = {} for key in bucket.list(): remote_file = RemoteFileQuick(key) if config.verbose: print(remote_file) remote_files[key.name] = remote_file return remote_files else: if config.verbose: print() key_names = [key.name for key in bucket.list()] def get_remote_file(key_name): key = bucket.get_key(key_name) remote_file = RemoteFile(key) if config.verbose: parallel.t_print(remote_file) else: parallel.t_write(".") return remote_file remote_files_list = parallel.process(get_remote_file, key_names) print() return dict([(f.name, f) for f in remote_files_list])
def rect(img): cv2.rectangle(img, (6, 6), (WIDTH - 6, HEIGHT - 6), color(), thickness = thick()) return img def triangle(img): c = color() t = thick() cv2.line(img, (WIDTH / 2, 6), (6, HEIGHT - 6), c, t) cv2.line(img, (6, HEIGHT - 6), (WIDTH - 6, HEIGHT - 6), c, t) cv2.line(img, (WIDTH - 6, HEIGHT - 6), (WIDTH / 2, 6), c, t) return img def docreate(job): l, f = job img = cv2.resize(f(newimg()), (32, 32)) return (l, "".join([chr(j) for j in flatten_rgb_image(img)])) functions = [triangle, line1, line2, rect, circle] f = open(args.o, "w") fl = open(args.l, "w") jobs = [(l, fcn) for l, fcn in enumerate(functions)] * args.n for l, r in process(jobs, docreate): f.write(r) print >> fl, l #cv2.imshow('test', img) #cv2.waitKey()
color(), thickness=thick()) return img def triangle(img): c = color() t = thick() cv2.line(img, (WIDTH / 2, 6), (6, HEIGHT - 6), c, t) cv2.line(img, (6, HEIGHT - 6), (WIDTH - 6, HEIGHT - 6), c, t) cv2.line(img, (WIDTH - 6, HEIGHT - 6), (WIDTH / 2, 6), c, t) return img def docreate(job): l, f = job img = cv2.resize(f(newimg()), (32, 32)) return (l, "".join([chr(j) for j in flatten_rgb_image(img)])) functions = [triangle, line1, line2, rect, circle] f = open(args.o, "w") fl = open(args.l, "w") jobs = [(l, fcn) for l, fcn in enumerate(functions)] * args.n for l, r in process(jobs, docreate): f.write(r) print >> fl, l #cv2.imshow('test', img) #cv2.waitKey()
tdb.arg_parser().add_argument("--std", required=True) tdb.arg_parser().add_argument("--rows", type=int, default=20000) tdb.arg_parser().add_argument("-o", required=True) args = tdb.parse_args() # read the mean for each dimension mean = np.array( [float(i) for i in open(args.mean).readline().strip().split(" ")], np.float64) assert (len(mean) == DIM) # read the standard deviation for each dimension std = np.array( [float(i) for i in open(args.std).readline().strip().split(" ")], np.float64) assert (len(std) == DIM) def compute(m): k = np.matrix([np.fromstring(i, np.uint8) for i in m]) - mean k = k / std return k.transpose() * k jobs = process(tdb.groups(args.rows), compute) m = reduce(lambda acc, x: acc + x, jobs, np.zeros((DIM, DIM), np.float64)) print >> sys.stderr, "processed rows:", tdb.count() sio.savemat(args.o, {"c": m, "n": tdb.count()}, do_compression=True)
if filt == None: return arr elif filt == 'raw,sobel': i = ip.unflatten_rgb_image(arr, d, d) i = ip.sobel_scipy(i) i = ip.gray_as_rgb(i) return ip.flatten_rgb_image(i) raise Exception('unknown filter') if args.filter == None: qi = np.int32(qi) elif args.filter == 'raw,sobel': qi = np.int32(do_filter(qi, args.filter)) else: print >> sys.stderr, "unknown filter" sys.exit(1) if args.filterout != None: ip.write_rgb_image(args.filterout, ip.unflatten_rgb_image(np.uint8(qi), d, d)) # ----------------------------- def compute_distance(datachunks): return [np.linalg.norm(qi - do_filter(np.fromstring(c, np.uint8), args.filter)) for c in datachunks] c = 0 for result in process(db.groups(400), compute_distance): for k in result: print k, c c += 1
CHANNELS = 3 DIM = WIDTH * HEIGHT * CHANNELS tdb = TinyDB(dimensions = DIM, parse_args = None) tdb.arg_parser().add_argument("--mean", required = True) tdb.arg_parser().add_argument("--std", required = True) tdb.arg_parser().add_argument("--rows", type = int, default = 20000) tdb.arg_parser().add_argument("-o", required = True) args = tdb.parse_args() # read the mean for each dimension mean = np.array([float(i) for i in open(args.mean).readline().strip().split(" ")], np.float64) assert(len(mean) == DIM) # read the standard deviation for each dimension std = np.array([float(i) for i in open(args.std).readline().strip().split(" ")], np.float64) assert(len(std) == DIM) def compute(m): k = np.matrix([np.fromstring(i, np.uint8) for i in m]) - mean k = k / std return k.transpose() * k jobs = process(tdb.groups(args.rows), compute) m = reduce(lambda acc, x: acc + x, jobs, np.zeros((DIM, DIM), np.float64)) print >> sys.stderr, "processed rows:", tdb.count() sio.savemat(args.o, {"c": m, "n": tdb.count()}, do_compression = True)
raise Exception('unknown filter') if args.filter == None: qi = np.int32(qi) elif args.filter == 'raw,sobel': qi = np.int32(do_filter(qi, args.filter)) else: print >> sys.stderr, "unknown filter" sys.exit(1) if args.filterout != None: ip.write_rgb_image(args.filterout, ip.unflatten_rgb_image(np.uint8(qi), d, d)) # ----------------------------- def compute_distance(datachunks): return [ np.linalg.norm(qi - do_filter(np.fromstring(c, np.uint8), args.filter)) for c in datachunks ] c = 0 for result in process(db.groups(400), compute_distance): for k in result: print k, c c += 1