def Execute(self, opt, args): if not args: self.Usage() nb = args[0] if not git.check_ref_format('heads/%s' % nb): print >>sys.stderr, "error: '%s' is not a valid name" % nb sys.exit(1) nb = args[0] err = [] all = self.GetProjects(args[1:]) pm = Progress('Abandon %s' % nb, len(all)) for project in all: pm.update() if not project.AbandonBranch(nb): err.append(project) pm.end() if err: if len(err) == len(all): print >>sys.stderr, 'error: no project has branch %s' % nb else: for p in err: print >>sys.stderr,\ "error: %s/: cannot abandon %s" \ % (p.relpath, nb) sys.exit(1)
def rasterizeVLines(dstImg, edges, delta): flip = delta[:,1] < 0 p = np.where(flip[:,None,None], edges[:,::-1,:], edges[:,:,:]) del edges d = np.where(flip[:,None], -delta, delta) del delta, flip x0 = p[:,0,0] y0 = p[:,0,1] y1 = p[:,1,1] del p dx = d[:,0] dy = d[:,1] m = dx / dy del dx, dy, d c = x0 - m * y0 y0 = np.floor(y0).astype(int) y1 = np.ceil(y1).astype(int) del x0 data = dstImg.data[::-1] progress = Progress(len(y0), None) for i in xrange(len(y0)): y = np.arange(y0[i], y1[i]) + 0.5 x = m[i] * y + c[i] data[y.astype(int),np.floor(x).astype(int),:] = 255 progress.step()
def rasterizeHLines(dstImg, edges, delta): flip = delta[:,0] < 0 p = np.where(flip[:,None,None], edges[:,::-1,:], edges[:,:,:]) del edges d = np.where(flip[:,None], -delta, delta) del delta, flip x0 = p[:,0,0] x1 = p[:,1,0] y0 = p[:,0,1] del p dx = d[:,0] dy = d[:,1] m = dy / dx del dx, dy, d c = y0 - m * x0 x0 = np.floor(x0).astype(int) x1 = np.ceil(x1).astype(int) del y0 data = dstImg.data[::-1] progress = Progress(len(x0), None) for i in xrange(len(x0)): x = np.arange(x0[i], x1[i]) y = m[i] * (x + 0.5) + c[i] data[np.floor(y).astype(int),x,:] = 255 progress.step()
def prune(self, prune_map, show_progress=True): ''' Helper method to remove terms (fields) of our matrix prune_map is a list of 0's and 1's of same length as self.terms. For each term, if 0, then remove it, otherwise keep it. ''' if not(prune_map) or len(prune_map) != len(self.terms): return False if show_progress: print ' Pruning terms list ...' new_terms = SuperList() for i in range(len(prune_map)-1,-1,-1): if prune_map[i] == 1: #print self.terms[i] new_terms.append(self.terms[i]) self.terms = new_terms if show_progress: print ' Pruning documents ...' p = Progress(n=len(self), percent=10) for doc in self.docs: new_doc_terms = SuperList() for i in range(len(prune_map)-1,-1,-1): if prune_map[i] == 1: new_doc_terms.append(doc['terms'][i]) doc['terms'] = new_doc_terms if show_progress: p.show(message=' Pruning progress:')
def RasterizeTriangles(dst, coords, shader): """ Software rasterizer. """ delta = coords - coords[:,[1,2,0],:] perp = np.concatenate((delta[:,:,1,None], -delta[:,:,0,None]), axis=-1) dist = np.sum(perp[:,0,:] * delta[:,2,:], axis=-1) perp /= dist[:,None,None] base = np.sum(perp * coords, axis=-1) cmin = np.floor(np.amin(coords, axis=1)).astype(int) cmax = np.ceil( np.amax(coords, axis=1)).astype(int) minx = cmin[:,0] maxx = cmax[:,0] miny = cmin[:,1] maxy = cmax[:,1] progress = Progress(len(coords), None).HighFrequency(200) for i in xrange(len(coords)): ixy = np.mgrid[miny[i]:maxy[i],minx[i]:maxx[i]].transpose([1,2,0])[:,:,::-1] xy = ixy + 0.5 uvw = np.sum(perp[i,None,None,:,:] * xy[:,:,None,:], axis=-1) - base[i,None,None,:] mask = np.all(uvw > 0, axis=-1) col = shader.shade(i, ixy, uvw) # log.debug('dst: %s', dst.data[miny[i]:maxy[i],minx[i]:maxx[i]].shape) # log.debug('src: %s', col.shape) dst.data[miny[i]:maxy[i],minx[i]:maxx[i],:][mask] = col[mask] progress.step()
def bake(self, skel): """ Bake animation as skinning matrices for the specified skeleton. Results in significant performance gain when skinning. We do skinning with 3x4 matrixes, as suggested in http://graphics.ucsd.edu/courses/cse169_w05/2-Skeleton.htm Section 2.3 (We assume the 4th row contains [0 0 0 1]) """ if self.disableBaking: return from progress import Progress log.debug('Updating baked animation %s (%s frames)', self.name, self.nFrames) progress = Progress(self.nFrames) bones = skel.getBones() if len(bones) != self.nBones: raise RuntimeError("Error baking animation %s: number of bones in animation data differs from bone count of skeleton %s" % (self.name, skel.name)) old_pose = skel.getPose() self._data_baked = np.zeros((self.dataLen, 3, 4)) for f_idx in xrange(self.nFrames): i = f_idx * self.nBones skel.setPose(self._data[i:i+self.nBones]) for b_idx in xrange(self.nBones): idx = i + b_idx self._data_baked[idx,:,:] = bones[b_idx].matPoseVerts[:3,:4] progress.step("Baking animation frame %s", f_idx+1) skel.setPose(old_pose)
def Execute(self, opt, args): if not args: self.Usage() nb = args[0] err = [] success = [] all = self.GetProjects(args[1:]) pm = Progress('Checkout %s' % nb, len(all)) for project in all: pm.update() status = project.CheckoutBranch(nb) if status is not None: if status: success.append(project) else: err.append(project) pm.end() if err: for p in err: print >>sys.stderr,\ "error: %s/: cannot checkout %s" \ % (p.relpath, nb) sys.exit(1) elif not success: print >>sys.stderr, 'error: no project has branch %s' % nb sys.exit(1)
def Execute(self, opt, args): if not args: self.Usage() nb = args[0] if not git.check_ref_format('heads/%s' % nb): print("error: '%s' is not a valid name" % nb, file=sys.stderr) sys.exit(1) err = [] projects = [] if not opt.all: projects = args[1:] if len(projects) < 1: projects = ['.',] # start it in the local project by default all_projects = self.GetProjects(projects) pm = Progress('Starting %s' % nb, len(all_projects)) for project in all_projects: pm.update() # If the current revision is a specific SHA1 then we can't push back # to it so substitute the manifest default revision instead. if IsId(project.revisionExpr): project.revisionExpr = self.manifest.default.revisionExpr if not project.StartBranch(nb): err.append(project) pm.end() if err: for p in err: print("error: %s/: cannot start %s" % (p.relpath, nb), file=sys.stderr) sys.exit(1)
def run(): options = docopt.docopt(__doc__) kwarg = dict() if options['--target']: kwarg['targetdirectory'] = options['--target'] if options['--show-time']: kwarg['show_time'] = True if options['--author']: kwarg['author'] = options['--author'] progress = None last = "" for msg in export(options['<event_name>'], **kwarg): if not options['--quiet']: if not progress: progress = Progress(msg) continue if options['--verbose']: sys.stdout.write(msg) sys.stdout.flush() else: if msg[0] == '[': progress.increment() last = msg.split(']')[1].strip() progress.print_status_line(last)
def writeMaterialFile(human, filepath, meshes, config): progress = Progress(len(meshes)) folderpath = os.path.dirname(filepath) name = formatName(os.path.splitext(os.path.basename(filepath))[0]) filename = name + ".material" filepath = os.path.join(folderpath, filename) f = codecs.open(filepath, 'w', encoding="utf-8") lines = [] for objIdx, mesh in enumerate(meshes): if isinstance(mesh, proxy.Proxy): # Object is proxy obj = mesh.object.mesh else: # Object is human mesh obj = mesh.mesh mat = obj.material if objIdx > 0: lines.append('') lines.append('material %s_%s_%s' % (formatName(name), objIdx, formatName(obj.name) if formatName(obj.name) != name else "human")) lines.append('{') lines.append(' receive_shadows %s\n' % ("on" if mat.receiveShadows else "off")) lines.append(' technique') lines.append(' {') lines.append(' pass') lines.append(' {') lines.append(' lighting on\n') lines.append(' ambient %f %f %f 1' % mat.ambientColor.asTuple()) lines.append(' diffuse %f %f %f %f' % tuple(mat.diffuseColor.asTuple() + (mat.opacity,))) lines.append(' specular %f %f %f %f' % tuple(mat.specularColor.asTuple() + (128*(mat.shininess), ))) lines.append(' emissive %f %f %f\n' % mat.emissiveColor.asTuple()) lines.append(' depth_write %s' % ("off" if mat.transparent else "on")) if mat.transparent: lines.append(' alpha_rejection greater 128') lines.append('') textures = mat.exportTextures(os.path.join(folderpath, 'textures')) for textureType, texturePath in textures.items(): if config.exportShaders: include = True else: include = (textureType == 'diffuseTexture') texfile = "textures/" + os.path.basename(texturePath) pre = '' if include else '//' lines.append(' %stexture_unit %s' % (pre, textureType)) lines.append(' %s{' % pre) lines.append(' %s texture %s' % (pre, texfile)) lines.append(' %s}\n' % pre) lines.append(' }') lines.append(' }') lines.append('}') progress.step() f.write("\n".join(lines)) f.close()
def mapMaskSoft(dimensions = (1024, 1024), mesh = None): """ Create a texture mask for the selected human (software renderer). """ progress = Progress() (0) if mesh is None: mesh = G.app.selectedHuman.mesh W = dimensions[0] H = dimensions[1] components = 4 dstImg = mh.Image(width=W, height=H, components=components) dstImg.data[...] = np.tile([0,0,0,255], (H,W)).reshape((H,W,components)) faces = getFaces(mesh) coords = np.asarray([0,H])[None,None,:] + mesh.texco[mesh.fuvs[faces]] * np.asarray([W,-H])[None,None,:] shape = mesh.fvert[faces].shape shape = tuple(list(shape) + [components]) colors = np.repeat(1, np.prod(shape)).reshape(shape) log.debug("mapMask: begin render") progress(0.1, 0.55) RasterizeTriangles(dstImg, coords[:,[0,1,2],:], MaskShader()) progress(0.55, 0.99) RasterizeTriangles(dstImg, coords[:,[2,3,0],:], MaskShader()) log.debug("mapMask: end render") progress.finish() return dstImg
def writeLibraryGeometry(fp, rmeshes, config): progress = Progress(len(rmeshes), None) fp.write('\n <library_geometries>\n') for rmesh in rmeshes: writeGeometry(fp, rmesh, config) progress.step() fp.write(' </library_geometries>\n')
def update_music_data(self): analyzer = Analyzer() music_list = self.banshee.get_tracks() # delete previously analyzed songs no longer existing in Banshee for mp3 in self.music_shelve: if mp3 not in music_list: del self.music_shelve[mp3] self.music_shelve.sync() song_count = len(music_list) progress = Progress("Analyzing Songs", song_count) # calculate and save features of new songs for mp3 in music_list: if mp3 not in self.music_shelve: features = analyzer.compute_features(mp3) if analyzer.valid_features(features): self.music_shelve[mp3] = features self.music_shelve.sync() progress.display() # convert music data to array self.music_data = np.array(self.music_shelve.values())
def writeAnimation(human, linebuffer, animTrack, config): import numpy as np progress = Progress(len(human.getSkeleton().getBones())) log.message("Exporting animation %s.", animTrack.name) linebuffer.append(' <animation name="%s" length="%s">' % (animTrack.name, animTrack.getPlaytime())) linebuffer.append(' <tracks>') for bIdx, bone in enumerate(human.getSkeleton().getBones()): # Note: OgreXMLConverter will optimize out unused (not moving) animation tracks linebuffer.append(' <track bone="%s">' % bone.name) linebuffer.append(' <keyframes>') frameTime = 1.0/float(animTrack.frameRate) for frameIdx in xrange(animTrack.nFrames): poseMat = animTrack.getAtFramePos(frameIdx)[bIdx] translation = poseMat[:3,3] angle, axis, _ = transformations.rotation_from_matrix(poseMat) axis = np.asarray(axis * np.matrix(bone.getRestMatrix(offsetVect=config.offset)))[0] linebuffer.append(' <keyframe time="%s">' % (float(frameIdx) * frameTime)) linebuffer.append(' <translate x="%s" y="%s" z="%s" />' % (translation[0], translation[1], translation[2])) # TODO account for scale linebuffer.append(' <rotate angle="%s">' % angle) linebuffer.append(' <axis x="%s" y="%s" z="%s" />' % (axis[0], axis[1], axis[2])) linebuffer.append(' </rotate>') linebuffer.append(' </keyframe>') linebuffer.append(' </keyframes>') linebuffer.append(' </track>') progress.step() linebuffer.append(' </tracks>') linebuffer.append(' </animation>')
def crawl(conn, crawl_count=1, new_only=True): all_packages = set(x[0] for x in conn.execute("SELECT name FROM packages")) real_package_lookup = dict((package_name.lower(), package_name) for package_name in all_packages) package_query = "SELECT name, latest_sdist FROM packages WHERE latest_sdist IS NOT NULL" if new_only: package_query += " AND name NOT IN (SELECT DISTINCT name FROM dependencies)" packages = list(conn.execute(package_query)) total_count = len(packages) progress = Progress("dependencies", crawl_count, total_count) progress.start() mutex = multiprocessing.Lock() def package_callback(result): from . import get_conn mutex.acquire() try: progress.parse_count += 1 if result is None: return package, dependencies = result conn = get_conn() # we're a different thread, so we can't use 'conn' from the parent scope save_package_data(conn, package, dependencies, real_package_lookup) finally: mutex.release() pool = multiprocessing.Pool(PROCESS_COUNT) for package, url in packages: pool.apply_async(per_package, args=(package, url), callback=package_callback) pool.close() pool.join() progress.stop()
def writeLibraryMaterials(fp, rmeshes, config): progress = Progress(len(rmeshes), None) fp.write('\n <library_materials>\n') for rmesh in rmeshes: writeMaterials(fp, rmesh) progress.step() fp.write(' </library_materials>\n')
def _Fetch(self, projects, opt): fetched = set() pm = Progress("Fetching projects", len(projects)) if self.jobs == 1: for project in projects: pm.update() if project.Sync_NetworkHalf(quiet=opt.quiet): fetched.add(project.gitdir) else: print >> sys.stderr, "error: Cannot fetch %s" % project.name if opt.force_broken: print >> sys.stderr, "warn: --force-broken, continuing to sync" else: sys.exit(1) else: threads = set() lock = _threading.Lock() sem = _threading.Semaphore(self.jobs) for project in projects: sem.acquire() t = _threading.Thread(target=self._FetchHelper, args=(opt, project, lock, fetched, pm, sem)) threads.add(t) t.start() for t in threads: t.join() pm.end() for project in projects: project.bare_git.gc("--auto") return fetched
def Execute(self, opt, args): if not args: self.Usage() nb = args[0] if not git.check_ref_format('heads/%s' % nb): print >>sys.stderr, "error: '%s' is not a valid name" % nb sys.exit(1) err = [] projects = [] if not opt.all: projects = args[1:] if len(projects) < 1: print >>sys.stderr, "error: at least one project must be specified" sys.exit(1) all = self.GetProjects(projects) pm = Progress('Starting %s' % nb, len(all)) for project in all: pm.update() if not project.StartBranch(nb): err.append(project) pm.end() if err: for p in err: print >>sys.stderr,\ "error: %s/: cannot start %s" \ % (p.relpath, nb) sys.exit(1)
def _Fetch(self, projects): fetched = set() pm = Progress('Fetching projects', len(projects)) if self.jobs == 1: for project in projects: pm.update() if project.Sync_NetworkHalf(): fetched.add(project.gitdir) else: print >>sys.stderr, 'error: Cannot fetch %s' % project.name sys.exit(1) else: threads = set() lock = _threading.Lock() sem = _threading.Semaphore(self.jobs) for project in projects: sem.acquire() t = _threading.Thread(target = self._FetchHelper, args = (project, lock, fetched, pm, sem)) threads.add(t) t.start() for t in threads: t.join() pm.end() return fetched
class MonteCarloRunner(object): def __init__(self, func): self.func = func self.progress = Progress(report_rate=1) self.choices = [] def run(self, *args): while True: try: self.progress.report(self.choices) self.choices = [] result = self.func(self.amb, *args) break except Fail: continue self.progress.report(self.choices, final=True) return result def amb(self, choices=None): if choices is None: choices = [False, True] if isinstance(choices, (int, long)): size = choices else: size = len(choices) if size <= 0: raise Fail value = random.randrange(size) self.choices.append(value) return value
def writeLibraryImages(fp, rmeshes, config): progress = Progress(len(rmeshes), None) fp.write('\n <library_images>\n') for rmesh in rmeshes: writeImages(fp, rmesh, config) progress.step() fp.write(' </library_images>\n')
def writeLibraryEffects(fp, rmeshes, config): progress = Progress(len(rmeshes), None) fp.write('\n <library_effects>\n') for rmesh in rmeshes: writeEffects(fp, rmesh) progress.step() fp.write(' </library_effects>\n')
def Execute(self, opt, args): if not opt.all and not args: self.Usage() if not opt.all: nb = args[0] if not git.check_ref_format('heads/%s' % nb): print("error: '%s' is not a valid name" % nb, file=sys.stderr) sys.exit(1) else: args.insert(0,None) nb = "'All local branches'" err = defaultdict(list) success = defaultdict(list) all_projects = self.GetProjects(args[1:]) pm = Progress('Abandon %s' % nb, len(all_projects)) for project in all_projects: pm.update() if opt.all: branches = project.GetBranches().keys() else: branches = [nb] for name in branches: status = project.AbandonBranch(name) if status is not None: if status: success[name].append(project) else: err[name].append(project) pm.end() width = 25 for name in branches: if width < len(name): width = len(name) if err: for br in err.keys(): err_msg = "error: cannot abandon %s" %br print(err_msg, file=sys.stderr) for proj in err[br]: print(' '*len(err_msg) + " | %s" % proj.relpath, file=sys.stderr) sys.exit(1) elif not success: print('error: no project has local branch(es) : %s' % nb, file=sys.stderr) sys.exit(1) else: print('Abandoned branches:', file=sys.stderr) for br in success.keys(): if len(all_projects) > 1 and len(all_projects) == len(success[br]): result = "all project" else: result = "%s" % ( ('\n'+' '*width + '| ').join(p.relpath for p in success[br])) print("%s%s| %s\n" % (br,' '*(width-len(br)), result),file=sys.stderr)
def _Fetch(self, projects, opt): fetched = set() pm = Progress('Fetching projects', len(projects)) if self.jobs == 1: for project in projects: pm.update() if not opt.quiet: print('Fetching project %s' % project.name) if project.Sync_NetworkHalf( quiet=opt.quiet, current_branch_only=opt.current_branch_only, clone_bundle=not opt.no_clone_bundle, no_tags=opt.no_tags): fetched.add(project.gitdir) else: print('error: Cannot fetch %s' % project.name, file=sys.stderr) if opt.force_broken: print('warn: --force-broken, continuing to sync', file=sys.stderr) else: sys.exit(1) else: threads = set() lock = _threading.Lock() sem = _threading.Semaphore(self.jobs) err_event = _threading.Event() for project in projects: # Check for any errors before starting any new threads. # ...we'll let existing threads finish, though. if err_event.isSet(): break sem.acquire() t = _threading.Thread(target = self._FetchHelper, args = (opt, project, lock, fetched, pm, sem, err_event)) # Ensure that Ctrl-C will not freeze the repo process. t.daemon = True threads.add(t) t.start() for t in threads: t.join() # If we saw an error, exit with code 1 so that other scripts can check. if err_event.isSet(): print('\nerror: Exited sync due to fetch errors', file=sys.stderr) sys.exit(1) pm.end() self._fetch_times.Save() self._GCProjects(projects) return fetched
def mapImageSoft(srcImg, mesh, leftTop, rightBottom): progress = Progress() (0) dstImg = mh.Image(G.app.selectedHuman.getTexture()) dstW = dstImg.width dstH = dstImg.height srcImg = srcImg.convert(dstImg.components) camera = getCamera(mesh) faces = getFaces(mesh) # log.debug('matrix: %s', G.app.modelCamera.getConvertToScreenMatrix()) progress(0.05) texco = np.asarray([0,dstH])[None,None,:] + mesh.texco[mesh.fuvs[faces]] * np.asarray([dstW,-dstH])[None,None,:] matrix_ = np.asarray(G.app.modelCamera.getConvertToScreenMatrix(mesh)) coord = np.concatenate((mesh.coord[mesh.fvert[faces]], np.ones((len(faces),4,1))), axis=-1) # log.debug('texco: %s, coord: %s', texco.shape, coord.shape) coord = np.sum(matrix_[None,None,:,:] * coord[:,:,None,:], axis = -1) # log.debug('coord: %s', coord.shape) coord = coord[:,:,:2] / coord[:,:,3:] progress(0.1) # log.debug('coord: %s', coord.shape) # log.debug('coords: %f-%f, %f-%f', # np.amin(coord[...,0]), np.amax(coord[...,0]), # np.amin(coord[...,1]), np.amax(coord[...,1])) # log.debug('rect: %s %s', leftTop, rightBottom) coord -= np.asarray([leftTop[0], leftTop[1]])[None,None,:] coord /= np.asarray([rightBottom[0] - leftTop[0], rightBottom[1] - leftTop[1]])[None,None,:] alpha = np.sum(mesh.vnorm[mesh.fvert[faces]] * camera[None,None,:], axis=-1) alpha = np.maximum(0, alpha) # alpha[...] = 1 # debug # log.debug('alpha: %s', alpha.shape) # log.debug('coords: %f-%f, %f-%f', # np.amin(coord[...,0]), np.amax(coord[...,0]), # np.amin(coord[...,1]), np.amax(coord[...,1])) progress(0.15) uva = np.concatenate((coord, alpha[...,None]), axis=-1) # log.debug('uva: %s', uva.shape) valid = np.any(alpha >= 0, axis=1) # log.debug('valid: %s', valid.shape) texco = texco[valid,:,:] uva = uva[valid,:,:] # log.debug('%s %s', texco.shape, uva.shape) # log.debug('src: %s, dst: %s', srcImg.data.shape, dstImg.data.shape) log.debug("mapImage: begin render") progress(0.2, 0.6) RasterizeTriangles(dstImg, texco[:,[0,1,2],:], UvAlphaShader(dstImg, srcImg, uva[:,[0,1,2],:])) progress(0.6, 0.99) RasterizeTriangles(dstImg, texco[:,[2,3,0],:], UvAlphaShader(dstImg, srcImg, uva[:,[2,3,0],:])) progress.finish() log.debug("mapImage: end render") return dstImg
def load(self, filename, update=True): from codecs import open log.message("Loading human from MHM file %s.", filename) progress = Progress()(0.0, 0.8) event = events3d.HumanEvent(self, 'load') event.path = filename self.callEvent('onChanging', event) self.resetMeshValues() self.blockEthnicUpdates = True subdivide = False f = open(filename, 'rU', encoding="utf-8") for lh in G.app.loadHandlers.values(): lh(self, ['status', 'started']) lines = f.readlines() fprog = Progress(len(lines)) for data in lines: lineData = data.split() if len(lineData) > 0 and not lineData[0] == '#': if lineData[0] == 'version': log.message('Version %s', lineData[1]) elif lineData[0] == 'tags': for tag in lineData[1:]: log.debug('Tag %s', tag) elif lineData[0] == 'subdivide': subdivide = lineData[1].lower() in ['true', 'yes'] elif lineData[0] in G.app.loadHandlers: G.app.loadHandlers[lineData[0]](self, lineData) else: log.debug('Could not load %s', lineData) fprog.step() log.debug("Finalizing MHM loading.") for lh in set(G.app.loadHandlers.values()): lh(self, ['status', 'finished']) f.close() self.blockEthnicUpdates = False self._setEthnicVals() self.callEvent('onChanged', event) if update: progress(0.8, 0.9) self.applyAllTargets() progress(0.9, 0.99) self.setSubdivided(subdivide) progress(1.0) log.message("Done loading MHM file.")
def exportStlBinary(filepath, config, exportJoints = False): """ filepath: *string*. The filepath of the file to export the object to. config: *Config*. Export configuration. """ progress = Progress(0, None) human = config.human obj = human.meshData config.setupTexFolder(filepath) filename = os.path.basename(filepath) name = config.goodName(os.path.splitext(filename)[0]) progress(0, 0.3, "Collecting Objects") rmeshes = exportutils.collect.setupMeshes( name, human, config=config, subdivide=config.subdivide) fp = open(filepath, 'wb') fp.write('\x00' * 80) fp.write(struct.pack('<I', 0)) count = 0 progress(0.3, 0.99, "Writing Objects") objprog = Progress(len(rmeshes)) coord = config.scale * (obj.coord - config.offset) for rmesh in rmeshes: obj = rmesh.object for fn,fv in enumerate(obj.fvert): fno = obj.fnorm[fn] co = coord[fv] fp.write(struct.pack('<fff', fno[0], fno[1], fno[2])) fp.write(struct.pack('<fff', co[0][0], co[0][1], co[0][2])) fp.write(struct.pack('<fff', co[1][0], co[1][1], co[1][2])) fp.write(struct.pack('<fff', co[2][0], co[2][1], co[2][2])) fp.write(struct.pack('<H', 0)) count += 1 fp.write(struct.pack('<fff', fno[0], fno[1], fno[2])) fp.write(struct.pack('<fff', co[2][0], co[2][1], co[2][2])) fp.write(struct.pack('<fff', co[3][0], co[3][1], co[3][2])) fp.write(struct.pack('<fff', co[0][0], co[0][1], co[0][2])) fp.write(struct.pack('<H', 0)) count += 1 objprog.step() fp.seek(80) fp.write(struct.pack('<I', count)) progress(1, None, "STL export finished. Exported file: %s", filepath)
def verify_preview(request, pk=None): try: in_progress = models.UploadProgress.objects.get(pk=pk) except: raise Http404 progress = Progress(in_progress) context = {} if request.method == 'POST': progress.write_database() return redirect('../../') else: if not progress.dates_verified(): return redirect('../') if not progress.locations_verified(): return redirect('../locations/') if not progress.varieties_verified(): return redirect('../varieties/') if not progress.measures_verified(): return redirect('../measures/') if not progress.statistics_verified(): return redirect('../statistics/') year, table, summary, statnames, colnames, rownames = progress.prepare_table() preview_table = zip(rownames, table) preview_summary = zip(statnames, summary) context['year'] = year context['preview_table'] = preview_table context['preview_summary'] = preview_summary context['column_headers'] = ['', ] + colnames return render(request, 'verify-preview.html', context)
def writeLibraryGeometry(fp, meshes, config, shapes=None): progress = Progress(len(meshes), None) fp.write('\n <library_geometries>\n') for mIdx,mesh in enumerate(meshes): if shapes is None: shape = None else: shape = shapes[mIdx] writeGeometry(fp, mesh, config, shape) progress.step() fp.write(' </library_geometries>\n')
def FromXml_Definition(self, old): """Convert another manifest representation to this one. """ mp = self.manifestProject gm = self._modules gr = self._review fd = open(os.path.join(mp.worktree, '.gitignore'), 'ab') fd.write('/.repo\n') fd.close() sort_projects = list(old.projects.keys()) sort_projects.sort() b = mp.GetBranch(mp.CurrentBranch).merge if b.startswith(R_HEADS): b = b[len(R_HEADS):] if old.notice: gm.SetString('repo.notice', old.notice) info = [] pm = Progress('Converting manifest', len(sort_projects)) for p in sort_projects: pm.update() p = old.projects[p] gm.SetString('submodule.%s.path' % p.name, p.relpath) gm.SetString('submodule.%s.url' % p.name, p.remote.url) if gr.GetString('review.url') is None: gr.SetString('review.url', p.remote.review) elif gr.GetString('review.url') != p.remote.review: gr.SetString('review.%s.url' % p.name, p.remote.review) r = p.revisionExpr if r and not IsId(r): if r.startswith(R_HEADS): r = r[len(R_HEADS):] if r == b: r = '.' gm.SetString('submodule.%s.revision' % p.name, r) for c in p.copyfiles: info.append('Moved %s out of %s' % (c.src, p.relpath)) c._Copy() p.work_git.rm(c.src) mp.work_git.add(c.dest) self.SetRevisionId(p.relpath, p.GetRevisionId()) mp.work_git.add('.gitignore', '.gitmodules', '.review') pm.end() for i in info: print >>sys.stderr, i
def _Fetch(self, projects): fetched = set() pm = Progress('Fetching projects', len(projects)) for project in projects: pm.update() if project.Sync_NetworkHalf(): fetched.add(project.gitdir) else: print >> sys.stderr, 'error: Cannot fetch %s' % project.name sys.exit(1) pm.end() return fetched
def mapImageGL(srcImg, mesh, leftTop, rightBottom): progress = Progress()(0) log.debug("mapImageGL: 1") dstImg = G.app.selectedHuman.meshData.object3d.textureTex dstW = dstImg.width dstH = dstImg.height left, top = leftTop right, bottom = rightBottom camera = getCamera(mesh) coords = mesh.r_texco texmat = G.app.modelCamera.getConvertToScreenMatrix(mesh) texmat = matrix.scale( (1 / (right - left), 1 / (top - bottom), 1)) * matrix.translate( (-left, -bottom, 0)) * texmat texmat = np.asarray(texmat) texco = mesh.r_coord alpha = np.sum(mesh.r_vnorm * camera[None, :], axis=-1) alpha = np.maximum(alpha, 0) color = (np.array([0, 0, 0, 0])[None, ...] + alpha[..., None]) * 255 color = np.ascontiguousarray(color, dtype=np.uint8) texco = np.ascontiguousarray(texco, dtype=np.float32) progress(0.5, 0.99) result = mh.renderSkin(dstImg, mesh.vertsPerPrimitive, coords, index=mesh.index, texture=srcImg, UVs=texco, textureMatrix=texmat, color=color, clearColor=None) progress(1) return result
def writeMorphController(fp, mesh, shapes, config): progress = Progress() progress(0, 0.7) nShapes = len(shapes) fp.write( ' <controller id="%sMorph" name="%sMorph">\n' % (mesh.name, mesh.name)+ ' <morph source="#%sMesh" method="NORMALIZED">\n' % (rmesh.name) + ' <source id="%sTargets">\n' % (mesh.name) + ' <IDREF_array id="%sTargets-array" count="%d">' % (mesh.name, nShapes)) for key,_ in shapes: fp.write(" %sMeshMorph_%s" % (mesh.name, key)) fp.write( ' </IDREF_array>\n' + ' <technique_common>\n' + ' <accessor source="#%sTargets-array" count="%d" stride="1">\n' % (mesh.name, nShapes) + ' <param name="IDREF" type="IDREF"/>\n' + ' </accessor>\n' + ' </technique_common>\n' + ' </source>\n' + ' <source id="%sWeights">\n' % (mesh.name) + ' <float_array id="%sWeights-array" count="%d">' % (mesh.name, nShapes)) progress(0.7, 0.99) fp.write(nShapes*" 0") fp.write('\n' + ' </float_array>\n' + ' <technique_common>\n' + ' <accessor source="#%sWeights-array" count="%d" stride="1">\n' % (mesh.name, nShapes) + ' <param name="MORPH_WEIGHT" type="float"/>\n' + ' </accessor>\n' + ' </technique_common>\n' + ' </source>\n' + ' <targets>\n' + ' <input semantic="MORPH_TARGET" source="#%sTargets"/>\n' % (mesh.name) + ' <input semantic="MORPH_WEIGHT" source="#%sWeights"/>\n' % (mesh.name) + ' </targets>\n' + ' </morph>\n' + ' </controller>\n') progress(1)
def FromXml_Definition(self, old): """Convert another manifest representation to this one. """ mp = self.manifestProject gm = self._modules gr = self._review fd = open(os.path.join(mp.worktree, '.gitignore'), 'ab') fd.write('/.repo\n') fd.close() sort_projects = list(old.projects.keys()) sort_projects.sort() b = mp.GetBranch(mp.CurrentBranch).merge if b.startswith(R_HEADS): b = b[len(R_HEADS):] info = [] pm = Progress('Converting manifest', len(sort_projects)) for p in sort_projects: pm.update() p = old.projects[p] gm.SetString('submodule.%s.path' % p.name, p.relpath) gm.SetString('submodule.%s.url' % p.name, p.remote.url) if gr.GetString('review.url') is None: gr.SetString('review.url', p.remote.review) elif gr.GetString('review.url') != p.remote.review: gr.SetString('review.%s.url' % p.name, p.remote.review) r = p.revisionExpr if r and not IsId(r): if r.startswith(R_HEADS): r = r[len(R_HEADS):] if r == b: r = '.' gm.SetString('submodule.%s.revision' % p.name, r) for c in p.copyfiles: info.append('Moved %s out of %s' % (c.src, p.relpath)) c._Copy() p.work_git.rm(c.src) mp.work_git.add(c.dest) self.SetRevisionId(p.relpath, p.GetRevisionId()) mp.work_git.add('.gitignore', '.gitmodules', '.review') pm.end() for i in info: print >> sys.stderr, i
def exportObj(filepath, config=None): progress = Progress(0, None) if config is None: config = exportutils.config.Config() human = config.human config.setupTexFolder(filepath) filename = os.path.basename(filepath) name = config.goodName(os.path.splitext(filename)[0]) progress(0, 0.3, "Collecting Objects") objects = [ m.object.mesh if isinstance(m, proxy.Proxy) else m.mesh for m in human.getMeshes() ] progress(0.3, 0.99, "Writing Objects") wavefront.writeObjFile(filepath, objects, True, config) progress(1.0, None, "OBJ Export finished. Output file: %s" % filepath)
def mapUVGL(mesh): """ Project the UV map topology of the selected human mesh onto a texture (hardware accelerated). """ progress = Progress()(0) W = 2048 H = 2048 dstImg = mh.Texture(size=(W, H), components=3) log.debug("mapUVGL: begin setup") fuvs = mesh.index edges = np.array([fuvs, np.roll(fuvs, 1, axis=-1)]).transpose([1, 2, 0]).reshape( (-1, 2)) del fuvs edges = np.where((edges[:, 0] < edges[:, 1])[:, None], edges, edges[:, ::-1]) ec = edges[:, 0] + (edges[:, 1] << 16) del edges ec = np.unique(ec) edges = np.array([ec & 0xFFFF, ec >> 16]).transpose() del ec log.debug("mapUVGL: begin render") coords = mesh.r_texco edges = np.ascontiguousarray(edges, dtype=np.uint32) progress(0.6, 0.99) dstImg = mh.renderSkin(dstImg, 2, coords, index=edges, clearColor=(0, 0, 0, 255)) log.debug("mapUV: end render") progress(1) return dstImg.convert(3)
def add_stats(request, pk=None): def create_add_stat_form(Formcls, prefix): locations = progress.progress['locations'].keys() initial = { 'data': 'hello\nhey\n', 'data_json': json.dumps({ 'data': [[None] * len(locations)], 'rowname': [''], 'headers': locations, }), } return Formcls(initial=initial, prefix=prefix) try: in_progress = models.UploadProgress.objects.get(pk=pk) except: raise Http404 progress = Progress(in_progress) raise NotImplementedError( 'add statistics uses a spreadsheet view, not written') try: variety_map = progress.progress['varieties'] if not variety_map: variety_map = {None: {'pk': -1}} except KeyError: variety_map = {None: {'pk': -1}} unknown_names = [ name for name in variety_map if variety_map[name]['pk'] == -1 ] return add_model( request, progress, unknown_names=unknown_names, template='add-stats.html', Formcls=forms.AddStatistic, form_fieldname='none', input_fieldname='name', map_name=progress.map_variety, is_verified=progress.varieties_verified, model_cache=utils.create_variety_model_cache(), )
def __init__(self, parentWidget, downloadTuples, onFinished=None, onProgress=None, overrideProgressSteps=None): self.log = mhapi.utility.getLogChannel("assetdownload") self.parentWidget = parentWidget self.onFinished = onFinished self.onProgress = onProgress self.overrideProgressSteps = overrideProgressSteps self.downloadThread = DownloadThread(downloadTuples, overrideProgressSteps = self.overrideProgressSteps) self.downloadThread.signalProgress.connect(self._onProgress) self.downloadThread.signalFinished.connect(self._onFinished) self.progress = Progress() self.log.debug("About to start downloading") self.log.spam("downloadTuples",downloadTuples) self.downloadThread.start()
def setSubdivided(self, flag, *args, **kwargs): if flag != self.isSubdivided(): proxies = [obj for obj in self.getProxyObjects() if obj] progress = Progress([len(self.mesh.coord)] + [len(obj.mesh.coord) for obj in proxies]) guicommon.Object.setSubdivided(self, flag, *args, **kwargs) progress.step() for obj in proxies: obj.setSubdivided(flag, *args, **kwargs) progress.step() self.callEvent('onChanged', events3d.HumanEvent(self, 'smooth'))
def _Fetch(self, projects, opt): fetched = set() pm = Progress('Fetching projects', len(projects)) if self.jobs == 1: for project in projects: pm.update() if project.Sync_NetworkHalf(quiet=opt.quiet): fetched.add(project.gitdir) else: print >>sys.stderr, 'error: Cannot fetch %s' % project.name if opt.force_broken: print >>sys.stderr, 'warn: --force-broken, continuing to sync' else: sys.exit(1) else: threads = set() lock = _threading.Lock() sem = _threading.Semaphore(self.jobs) err_event = _threading.Event() for project in projects: # Check for any errors before starting any new threads. # ...we'll let existing threads finish, though. if err_event.isSet(): break sem.acquire() t = _threading.Thread(target = self._FetchHelper, args = (opt, project, lock, fetched, pm, sem, err_event)) threads.add(t) t.start() for t in threads: t.join() # If we saw an error, exit with code 1 so that other scripts can check. if err_event.isSet(): print >>sys.stderr, '\nerror: Exited sync due to fetch errors' sys.exit(1) pm.end() for project in projects: project.bare_git.gc('--auto') return fetched
def mapLightingGL(lightpos = (-10.99, 20.0, 20.0), mesh = None, res = (1024, 1024), border = 1): """ Create a lightmap for the selected human (hardware accelerated). """ progress = Progress() (0) if mesh is None: mesh = G.app.selectedHuman.mesh W = res[0] H = res[1] delta = lightpos - mesh.coord ld = vnormalize(delta) del delta s = np.sum(ld * mesh.vnorm, axis=-1) del ld s = np.maximum(0, np.minimum(255, (s * 256))).astype(np.uint8) mesh.color[...,:3] = s[...,None] mesh.color[...,3] = 255 del s progress(0.1) mesh.markCoords(colr = True) mesh.update() coords = mesh.r_texco colors = mesh.r_color progress(0.2, 0.5) dstImg = mh.renderSkin((W, H), mesh.vertsPerPrimitive, coords, index = mesh.index, color = colors, clearColor = (0, 0, 0, 0)) progress(0.5, 0.99) dstImg = fixSeams(dstImg, mesh, border) mesh.setColor([255, 255, 255, 255]) log.debug('mapLightingGL: %s', dstImg.data.shape) progress(1) return dstImg
def exportOgreMesh(filepath, config): progress = Progress.begin() progress(0, 0.05, "Preparing export") human = config.human # TODO account for config.scale in skeleton config.setupTexFolder(filepath) # TODO unused progress(0.05, 0.2, "Collecting Objects") objects = human.getObjects(excludeZeroFaceObjs=True) progress(0.2, 0.95 - 0.35 * bool(human.getSkeleton())) writeMeshFile(human, filepath, objects, config) if human.getSkeleton(): progress(0.6, 0.95, "Writing Skeleton") writeSkeletonFile(human, filepath, config) progress(0.95, 0.99, "Writing Materials") writeMaterialFile(human, filepath, objects, config) progress(1.0, None, "Ogre export finished.")
def test_init__nondefaults(self): expected = { "status": { '0': [1298924180, 1298924380] }, "steps": ['1', '2', '3'], "id": "ip-10-122-7-227.ec2.internal", "pidfile": "/pidfile", "error": "" } Progress(self.file.name, pidfile=expected['pidfile'], steps=expected['steps'], status=expected['status'], id=expected['id']) f = open(self.file.name) data = json.loads(f.read()) f.close() for k, v in expected.items(): self.assertEquals(data.get(k), v)
def Execute(self, opt, args): nb = args[0] err = defaultdict(list) success = defaultdict(list) all_projects = self.GetProjects(args[1:]) pm = Progress('Abandon %s' % nb, len(all_projects)) for project in all_projects: pm.update() if opt.all: branches = list(project.GetBranches().keys()) else: branches = [nb] for name in branches: status = project.AbandonBranch(name) if status is not None: if status: success[name].append(project) else: err[name].append(project) pm.end() width = 25 for name in branches: if width < len(name): width = len(name) if err: for br in err.keys(): err_msg = "error: cannot abandon %s" %br print(err_msg, file=sys.stderr) for proj in err[br]: print(' '*len(err_msg) + " | %s" % proj.relpath, file=sys.stderr) sys.exit(1) elif not success: print('error: no project has local branch(es) : %s' % nb, file=sys.stderr) sys.exit(1) else: print('Abandoned branches:', file=sys.stderr) for br in success.keys(): if len(all_projects) > 1 and len(all_projects) == len(success[br]): result = "all project" else: result = "%s" % ( ('\n'+' '*width + '| ').join(p.relpath for p in success[br])) print("%s%s| %s\n" % (br,' '*(width-len(br)), result),file=sys.stderr)
def _crawl(self, data: list) -> list: """ Retrieves and stores results from crawling over `url` response in `self._data` """ self.smph = th.Semaphore(self.MAX_THREADS) self.miss_lock = th.Lock() self.data_lock = th.Lock() total = len(data) with Progress(total) as self.prog: for url, *args in data: self.smph.acquire() new_thread = th.Thread(target=self._crawl_this, args=(url, *args)) new_thread.start() self.prog.wait() return self._miss
def read(self, fname): """ Загружаем информацию из файла """ self.fname = fname # Зпгружаем информацию из заголовка with open(self.fname, 'rb') as fd: self.header = fd.read(178) self.parse_header() # self.framecount = 2 # ????????????????? self.frames = np.zeros( (self.framecount, self.imageheight, self.imagewidth)) tmp_array = np.zeros(self.imagewidth) dt = np.dtype(np.int16) dt = dt.newbyteorder('<') for frame in range(self.framecount): Progress(frame, self.framecount) t_frame = fd.read(self.imageheight * self.imagewidth * self.pixeldepthperplane // 8) # for line in range(self.imageheight): # for pixel in range(self.imagewidth): # index = (line * self.imagewidth + pixel) * 2 # self.frames[frame][line][pixel] = struct.unpack('<H', t_frame[index:index+2])[0] # logging.debug(self.frames[frame][0]) for line in range(self.imageheight): # t_line = fd.read(self.imagewidth * self.pixeldepthperplane//8) t_line = t_frame[line * self.imagewidth * 2:(line * self.imagewidth + self.imagewidth) * 2] t_a = np.frombuffer(t_line, dtype=dt) self.frames[frame][line] = t_a # logging.debug(t_line[:7]) # logging.debug(self.frames[frame][0]) # logging.debug('') self.trailer = fd.read(self.framecount * 8) self.parse_trailer()
def exportObj(human, filepath, config=None): progress = Progress(0, None) if config is None: config = exportutils.config.Config() config.setHuman(human) config.setupTexFolder(filepath) filename = os.path.basename(filepath) name = config.goodName(os.path.splitext(filename)[0]) progress(0, 0.3, "Collecting Objects") rmeshes = exportutils.collect.setupMeshes( name, human, config=config, subdivide=config.subdivide) progress(0.3, 0.99, "Writing Objects") objects = [rmesh.object for rmesh in rmeshes] wavefront.writeObjFile(filepath, objects, True, config) progress(1.0, None, "OBJ Export finished. Output file: %s" % filepath)
def exportOgreMesh(filepath, config): progress = Progress.begin() progress( 0, 0.05, "Setting properties" ) # TODO this leads to a disastrous amount of confusion among translators human = config.human # TODO account for config.scale in skeleton config.setupTexFolder(filepath) # TODO unused progress(0.05, 0.2, "Collecting Objects") objects = human.getObjects(excludeZeroFaceObjs=True) progress(0.2, 0.95 - 0.35 * bool(human.getSkeleton())) writeMeshFile(human, filepath, objects, config) if human.getSkeleton(): progress(0.6, 0.95, "Writing Skeleton") writeSkeletonFile(human, filepath, config) progress(0.95, 0.99, "Writing Materials") writeMaterialFile(human, filepath, objects, config) progress(1.0, None, "Ogre export finished.")
def training_patches(imnames, npatches, psize, maxdim=None, colour=False, verbose=False): """ Extract patches from images for dictionary training Arguments: imnames: A list of image names from which to extract training patches. npatches: The number (int) of patches to extract from the images maxdim: The maximum dimension of the image in pixels. The image is rescaled if it is larger than this. By default there is no scaling. psize: A int of the size of the square patches to extract verbose: bool, print progress bar Returns: An np.array (npatches, psize**2*3) for RGB or (npatches, psize**2) for grey of flattened image patches. NOTE, the actual npatches found may be less than that requested. """ nimg = len(imnames) ppeimg = int(round(float(npatches) / nimg)) plist = [] # Set up progess updates progbar = Progress(nimg, title='Extracting patches', verbose=verbose) for i, ims in enumerate(imnames): img = imread_resize(ims, maxdim) # read in and resize the image spaceing = max(int(round(img.shape[1] * ppeimg**(-0.5))), 1) # Extract patches and map to grayscale if necessary if (colour == False) and (img.ndim == 3): imgg = rgb2gray(img) plist.append(grid_patches(imgg, psize, spaceing)[0]) else: plist.append(grid_patches(img, psize, spaceing)[0]) progbar.update(i) progbar.finished() patches = np.concatenate(plist, axis=0) return np.reshape(patches, (patches.shape[0], np.prod(patches.shape[1:])))
def reporter(): n_left = Work.qsize() time_p = Progress(design='t', max_val=n_work) prog_p = Progress(design='p', max_val=n_work) status_format = "{0:<10} {1:<25} {2:<25}" while n_left > 0: # Check progress n_made = len(output) n_left = Work.qsize() n_done = n_work - n_left # Calculate speed and time left progress = prog_p.to_string(n_done) found = str(n_made) + " paths found." t_out = "Time left: " + time_p.to_string(n_done) status = status_format.format(progress, found, t_out) s_out('\r' + status) time.sleep(1)
def add_locations(request, pk=None): try: in_progress = models.UploadProgress.objects.get(pk=pk) except: raise Http404 progress = Progress(in_progress) try: locations_map = progress.progress['locations'] except KeyError: locations_map = None try: location_map = progress.progress['location'] except KeyError: location_map = None if location_map: _locations = {None: location_map} elif locations_map: _locations = locations_map else: _locations = {None: {'pk': -1, 'planting_methods': -1}} unknown_names = [ name for name in _locations if _locations[name]['pk'] == -1 ] return add_model( request, progress, unknown_names=unknown_names, template='add-locations.html', Formcls=forms.AddLocation, form_fieldname='location', input_fieldname='name', map_name=progress.map_planting_methods, is_verified=progress.locations_verified, next_url='planting-methods/', model_cache=utils.create_location_model_cache(), )
def FromXml_Local_2(self, old): shutil.rmtree(old.manifestProject.worktree) os.remove(old._manifestFile) my_remote = self._Remote().name new_base = os.path.join(self.repodir, 'projects') old_base = os.path.join(self.repodir, 'projects.old') os.rename(new_base, old_base) os.makedirs(new_base) info = [] pm = Progress('Converting projects', len(self.projects)) for p in self.projects.values(): pm.update() old_p = old.projects.get(p.name) old_gitdir = os.path.join(old_base, '%s.git' % p.relpath) if not os.path.isdir(old_gitdir): continue parent = os.path.dirname(p.gitdir) if not os.path.isdir(parent): os.makedirs(parent) os.rename(old_gitdir, p.gitdir) _rmdir(os.path.dirname(old_gitdir), self.repodir) if not os.path.isdir(p.worktree): os.makedirs(p.worktree) if os.path.isdir(os.path.join(p.worktree, '.git')): p._LinkWorkTree(relink=True) self._CleanOldMRefs(p) if old_p and old_p.remote.name != my_remote: info.append("%s/: renamed remote '%s' to '%s'" \ % (p.relpath, old_p.remote.name, my_remote)) p.bare_git.remote('rename', old_p.remote.name, my_remote) p.config.ClearCache() self.SetMRefs(p) pm.end() for i in info: print >> sys.stderr, i
def load_batch(self, batch_size=1, test=False): data_type = "train" if not test else "val" path_A = glob('./datasets/train/%s/%sA/*' % (self.name, data_type)) path_B = glob('./datasets/train/%s/%sB/*' % (self.name, data_type)) self.number_of_batches = int( min(len(path_A), len(path_B)) / batch_size) total_samples = self.number_of_batches * batch_size path_A = np.random.choice(path_A, total_samples, replace=False) path_B = np.random.choice(path_B, total_samples, replace=False) progress = Progress(self.number_of_batches - 1) for index in range(self.number_of_batches - 1): batch_A = path_A[index * batch_size:(index + 1) * batch_size] batch_B = path_B[index * batch_size:(index + 1) * batch_size] images_A, images_B = [], [] for image_A, image_B in zip(batch_A, batch_B): imageA = self.read_image(image_A) imageB = self.read_image(image_B) #image_A = Image.fromarray(imageA).resize(128, 128) #image_B = Image.fromarray(imageB).resize(128, 128) image_A = scipy.misc.imresize(imageA, self.resolution) image_B = scipy.misc.imresize(imageB, self.resolution) if not test and np.random.random() > 0.5: image_A = np.fliplr(image_A) image_B = np.fliplr(image_B) images_A.append(image_A) images_B.append(image_B) imgs_A = np.array(images_A) / 127.5 - 1. imgs_B = np.array(images_B) / 127.5 - 1. progress.update(index, 'Loading images') yield imgs_A, imgs_B progress.stop()
def stochastic_page_rank(graph, node, n_iter=1000000, n_steps=100): """Stochastic PageRank estimation Parameters: graph -- a graph object as returned by load_graph() n_iter (int) -- number of random walks performed n_steps (int) -- number of followed links before random walk is stopped Returns: A dict that assigns each page its hit frequency This function estimates the Page Rank by counting how frequently a random walk that starts on a random node will after n_steps end on each node of the given graph. """ #create an empty dictionary Nodecount = {} prog = Progress(n_iter, "Permforming stochastic page rank. This may take while" ) #setting up progress bar for nodes in range(0, len(list(graph.nodes))): #loop through all the nodes Nodecount[list( graph.nodes )[nodes]] = 0 #set each node in the dictionary with a value of 0 for i in range(0, n_iter): prog += 1 #progress bar prog.show() node = randomnodechooser(graph) for x in range( 0, n_steps ): #loop for n amount of random links to follow per random node we start from RandomNodenumber = random.randint(0, int(len(graph.edges(node))) - 1) #seelect a random number # use random number to select a random index in the list of edges from the randomnode node = (list(graph.edges(node))[RandomNodenumber])[1] Nodecount[node] += (1 / n_iter ) #Increase the finnally landed node by 1/n iter. ## The most common will be largest prog.finish() return Nodecount
def Execute(self, opt, args): if not args: self.Usage() nb = args[0] if not git.check_ref_format('heads/%s' % nb): print("error: '%s' is not a valid name" % nb, file=sys.stderr) sys.exit(1) err = [] projects = [] if not opt.all: projects = args[1:] if len(projects) < 1: print("error: at least one project must be specified", file=sys.stderr) sys.exit(1) all_projects = self.GetProjects(projects) pm = Progress('Starting %s' % nb, len(all_projects)) for project in all_projects: pm.update() # If the current revision is a specific SHA1 then we can't push back # to it; so substitute with dest_branch if defined, or with manifest # default revision instead. if IsId(project.revisionExpr): if project.dest_branch: project.revisionExpr = project.dest_branch else: project.revisionExpr = self.manifest.default.revisionExpr if not project.StartBranch(nb): err.append(project) pm.end() if err: for p in err: print("error: %s/: cannot start %s" % (p.relpath, nb), file=sys.stderr) sys.exit(1)
def writePolylist(fp, rmesh, config): progress = Progress(2) fvert = rmesh.getFvert() nFaces = len(fvert) fp.write( ' <polylist count="%d">\n' % nFaces + ' <input offset="0" semantic="VERTEX" source="#%s-Vertex"/>\n' % rmesh.name) if config.useNormals: fp.write( ' <input offset="1" semantic="NORMAL" source="#%s-Normals"/>\n' % rmesh.name + ' <input offset="2" semantic="TEXCOORD" source="#%s-UV"/>\n' % rmesh.name + ' <vcount>') else: fp.write( ' <input offset="1" semantic="TEXCOORD" source="#%s-UV"/>\n' % rmesh.name + ' <vcount>') fp.write( ''.join(["4 " for fv in fvert]) ) fp.write('\n' + ' </vcount>\n' ' <p>') progress.step() fuvs = rmesh.getFuvs() for fn,fv in enumerate(fvert): fuv = fuvs[fn] if config.useNormals: fp.write( ''.join([("%d %d %d " % (fv[n], fn, fuv[n])) for n in range(4)]) ) else: fp.write( ''.join([("%d %d " % (fv[n], fuv[n])) for n in range(4)]) ) fp.write( ' </p>\n' + ' </polylist>\n') progress.step()
def content_based_batch_gradient_descent(self): if self.learning_rate is None or self.regularized_factor is None: return False total_iteration = 1000 progress = Progress('Content-based Gradient Descent', total_iteration) log = [] current_iteration = 1 while current_iteration <= total_iteration: progress.report(current_iteration, self.content_based_cost) # ==> Compute partial derivatives # Derivative of cost function wrt movie features dj_duser = dict() for user_id in self.users: user = self.users[user_id] n = len(user.theta) dj_duser[user.id] = [] for k in range(0, n): if k == 0: dj_duser[user.id].append( self.dj_wrt_user_theta_k0(user)) else: dj_duser[user.id].append( self.dj_wrt_user_theta_k(user, k)) # Apply gradient descent for user_id in dj_duser: dj_dtheta = dj_duser[user_id] user = self.users[user_id] n = len(user.theta) for k in range(0, n): user.theta[k] = user.theta[k] - (self.learning_rate * dj_dtheta[k]) current_iteration += 1 progress.complete() return log
def distribution_page_rank(graph, n_iter=100): """Probabilistic PageRank estimation Parameters: graph -- a graph object as returned by load_graph() n_iter (int) -- number of probability distribution updates Returns: A dict that assigns each page its probability to be reached This function estimates the Page Rank by iteratively calculating the probability that a random walker is currently on any node. """ node_prob = {} #create an empty dictionary prog = Progress(n_iter, "Permforming distribution page rank. This may take while" ) #set up progress bar for z in range(0, len(list(graph.nodes))): #loop through each node # set each node in the dictionary with an equal value with 1/n amount of nodes node_prob[list(graph.nodes)[z]] = 1 / len(list(graph.nodes)) for i in range(0, n_iter): #loop n times prog += 1 #progress bar prog.show() next_prob = {} #second dictionary for count in range( 0, len(graph.nodes) ): #set the second dictionary to all have default values of 0 next_prob[list(graph.nodes)[count]] = 0 for node in range(0, len((graph.nodes))): #for each node in the graph Currentnode = list(graph.nodes)[node] # sets the probability to choose a node to 1/n the current nodes amount of edges p = node_prob[Currentnode] / len(graph.edges(Currentnode)) for edges in range(0, len(graph.edges(Currentnode)) ): #All edges probabilities are increaseed by p CurrentEdges = list(graph.edges(Currentnode)) next_prob[CurrentEdges[edges][1]] += p node_prob = next_prob #this optimises the algorithim and refutes it being thrown off by random chance prog.finish() return node_prob
def Execute(self, opt, args): if not args: self.Usage() nb = args[0] if not git.check_ref_format('heads/%s' % nb): print("error: '%s' is not a valid name" % nb, file=sys.stderr) sys.exit(1) nb = args[0] err = [] success = [] all_projects = self.GetProjects(args[1:]) pm = Progress('Abandon %s' % nb, len(all_projects)) for project in all_projects: pm.update() status = project.AbandonBranch(nb) if status is not None: if status: success.append(project) else: err.append(project) pm.end() if err: for p in err: print("error: %s/: cannot abandon %s" % (p.relpath, nb), file=sys.stderr) sys.exit(1) elif not success: print('error: no project has branch %s' % nb, file=sys.stderr) sys.exit(1) else: print('Abandoned in %d project(s):\n %s' % (len(success), '\n '.join(p.relpath for p in success)), file=sys.stderr)
def load_data_source(self, path="database/source", progress: Progress = None): c = self.cursor() path = get_path(path) for root, dirs, files in os.walk(path): number = len(files) progress_step = 1 / number progress_value = 0 if progress: progress.set(progress_value) for filename in files: obj = DataSource(filename) date = obj.get_date() data = obj.get_data() sql = "INSERT INTO `data` VALUES ('%s', ?, ?, ?, ?, ?)" % date c.executemany(sql, data) self.commit() progress_value += progress_step if progress: progress.set(progress_value) if progress: progress.set(1)