def switchCategory(self, name): # Do we need to switch at all if self.currentCategory and self.currentCategory.name == name: return # Does the category exist if not name in self.categories: return category = self.categories[name] # Does the category have at least one view if len(category.tasks) == 0: return if self.currentCategory: log.debug('hiding category %s', self.currentCategory.name) self.currentCategory.hide() self.currentCategory.hideWidgets() self.currentCategory = category log.debug('showing category %s', self.currentCategory.name) self.currentCategory.show() self.currentCategory.showWidgets() self.switchTask(category.task)
def projectBackground(self): if not self.backgroundChooserView.isBackgroundShowing(): gui3d.app.prompt("Warning", "You need to load a background for the current view before you can project it.", "OK") return mesh = self.human.getSeedMesh() # for all quads, project vertex to screen # if one vertex falls in bg rect, project screen quad into uv quad # warp image region into texture ((x0,y0,z0), (x1,y1,z1)) = self.backgroundImage.mesh.calcBBox() camera = mh.cameras[self.backgroundImage.mesh.cameraMode] x0, y0, _ = camera.convertToScreen(x0, y0, z0, self.backgroundImage.mesh) x1, y1, _ = camera.convertToScreen(x1, y1, z1, self.backgroundImage.mesh) leftTop = (x0, y1) rightBottom = (x1, y0) dstImg = projection.mapImage(self.backgroundImage, mesh, leftTop, rightBottom) texPath = mh.getPath('data/skins/projection.png') if os.path.isfile(texPath): oldImg = mh.Image(texPath) else: oldImg = None gui3d.app.do(ProjectionAction("Change projected background texture", self.human.getTexture(), texPath, oldImg, dstImg)) log.debug("Enabling shadeless rendering on body") self.shadelessButton.setChecked(True) self.human.setShadeless(1) mh.redraw()
def parse_body(self): if self.msg.is_multipart(): for part in self.msg.walk(): if part.get_content_type() == 'text/plain': self.msg = part break else: log.debug('%s message contains no text/plain subpart: %s', self.msg.get_content_type(), self.identifier, v=2) return False if self.msg.get_content_type() != 'text/plain': log.debug('content type %s not supported: %s', self.msg.get_content_type(), self.identifier, v=2) return False self.charset = self.msg.get_content_charset('') unicode_error = None if self.charset: try: self.body = unicode(self.msg.get_payload(decode=True), self.charset) except (UnicodeDecodeError, LookupError), e: unicode_error = e else: try: self.body, charset = self.try_unicode(self.msg.get_payload(decode=True)) except (UnicodeDecodeError, LookupError), e: unicode_error = e else:
def notifyPreview(self, uid=None): pv = g_match_mgr.GetPreview() if pv: pb = L2CNotifyPreview() pb.MergeFrom(pv) self.room.UniOrBroadcast(pb, uid) log.debug("PREVIEW: %s" % pb.desc)
def waitForDevice(device_id=None,wait_sec=10): restartAdb() log.debug("Wait for device to be ready(timeout=%s)" % wait_sec) sec = 0.75 end_at = time.time() + wait_sec def doWait(): if time.time() > end_at: msg = "Device did not become ready in %s secs; are sure it's powered on?" % wait_sec raise Exception(msg) devices = getConnectedDevices() if devices == []: time.sleep(sec) doWait() else: if not device_id: setDeviceId(devices[0]["udid"]) return devices[0]["udid"] else: for i in range(len(devices)): if devices[i]["udid"] == device_id: setDeviceId(device_id) return device_id time.sleep(sec) dev = doWait() return checkAdbConnectionIsUp(),dev
def solr_commit(): try: import pysolr except ImportError: log.debug("pysolr not present, skipping indexing") return patterns = { 'story': '/tmp/webjson/storydefinitions/*.json', 'theme': '/tmp/webjson/themedefinitions/*.json', } for key, pattern in patterns.iteritems(): solr = pysolr.Solr('http://localhost:8983/solr/tstp' + key, timeout=10) objs = [] for fn in glob.glob(pattern): if len(objs) > 100: solr.add(objs) objs = [] with open(fn) as fh: obj = json.load(fh) blob = '\n\n'.join(obj.itervalues()) obj['_text_'] = blob objs.append(obj) solr.add(objs) # rebuild dictionaries urllib2.urlopen('http://localhost:8983/solr/tstptheme/suggest?suggest.build=true&suggest.dictionary=completer') urllib2.urlopen('http://localhost:8983/solr/tstptheme/spell?spellcheck.build=true') urllib2.urlopen('http://localhost:8983/solr/tstpstory/spell?spellcheck.build=true')
def find_distribution(self, dist_id, profile_id, build_type, search_local = 1, search_cvs = 1): ## Profile may be a full path! profile_id = os.path.basename(profile_id) ## .rna module names are magical, we cannot find them! if self.module_name[-4:] == ".rna": return None setup() message = "getting distribution=\"%s\" (cvs tag=\"%s\", root=\"%s\", dist_id=\"%s\")" % ( self.module_name, self.cvs_tag or "HEAD", self.cvs_root, dist_id) log.debug( message ) outmsg.send( message ) locations = [] for a in self.possible_locations(dist_id): for b in self.possible_locations(profile_id): for c in self.possible_locations(build_type): locations.append( "%s%s%s%s/%s" % ( self.cvs_base, a,b,c,self.cvs_path)) if search_local: location = self.find_distribution_filesystem(locations) if location: return location if not search_cvs: return None if string.count(dist_id, "macos"): location = self.find_distribution_rna(locations) if location: return location return self.find_distribution_cvs(locations)
def unlock(self): if not self.owned: raise Exception("can't unlock %r - we don't own it" % self.name) self._open_lock() fcntl.lockf(self.lockfile, fcntl.LOCK_UN, 0, 0) if vars.DEBUG_LOCKS: debug("%s unlock\n", self.name) self.owned = False
def ingest_many(self, data): """ Bulk import a list of data. May loop through data and do ingest single, or may do a bulk import """ if not len(data): return # If the database file doesn't exist, we need to add the flag to # create it the first time we run args = [FPRINT_PATH, "-dbase", "landmarkdb"] if not os.path.exists("landmarkdb.mat"): args.extend(["-cleardbase", "1"]) args.append("-addlist") fp,fname = tempfile.mkstemp() os.close(fp) fp = codecs.open(fname, "w", "utf8") for line in data: if "!" not in line and "&" not in line: fp.write("%s\n" % line) fp.close() log.debug("importing from %s" % fname) args.append(fname) data, err, retval = self.run_process(args) os.unlink(fname) log.debug(data)
def _p_resolveConflict(self, old, saved, new): #return newState resolved = new.copy() resolved_names = [] # Merge model fields for name in self.p_attr_seq: if not IChoice.providedBy(self.p_fields[name]): old_v = old[name] saved_v = saved[name] new_v = new[name] if saved_v != old_v and new_v == old_v: resolved[name] = saved_v resolved_names.append(name) # Merge model choice fields if 'p_choice_tokens' in old: resolved['p_choice_tokens'] = resolved['p_choice_tokens'].copy() for name in old['p_choice_tokens']: old_v = old['p_choice_tokens'][name] saved_v = saved['p_choice_tokens'][name] new_v = new['p_choice_tokens'][name] if saved_v != old_v and new_v == old_v: resolved['p_choice_tokens'][name] = saved_v resolved_names.append(name) log.debug('Conflict resolved for %r, values of %s merged' % \ (self, ', '.join(repr(n) for n in resolved_names))) return resolved
def process(self, block=False, timeout=None): """ process queued events and request If block is set, we wait for incoming events and requests. In this case, a timeout in seconds can be specified, as well. """ while True: try: item = self.queue.get(block=block, timeout=timeout) except queue.Empty: break # after having get the first event, we do no longer block block = False timeout = None if isinstance(item, events.event): try: for subscribedevent, handler in self.subscriptions: if isinstance(item, subscribedevent): handler(item) except TerminateEventProcessing: pass else: for suppliedrequest, handler in self.suppliers: if isinstance(item.request, suppliedrequest): # compute result and signalise that # request has been processed try: item.result = handler(item.request) log.debug("got result %r for %r" % (item.result, item.request)) item.ready.set() break except DenyRequest: pass
def onClicked(event): for radio, uuid in self.clothesSelections: if radio.selected: self.activeClothing = uuid log.debug( 'Selected clothing "%s" (%s)' % (radio.text(), uuid) ) self.reloadTextureChooser() return
def reloadTextureChooser(self): human = gui3d.app.selectedHuman # TODO this is temporary, until new eye texturing approach if 'data/eyes' in self.filechooser.paths: self.filechooser.setPreviewExtensions(['thumb', 'png']) self.filechooser.extension = 'png' if self.skinRadio.selected: self.textures = [self.systemSkins, self.userSkins] elif self.hairRadio.selected: proxy = human.hairProxy self.textures = [os.path.dirname(proxy.file)] elif self.eyesRadio.selected: self.filechooser.setPreviewExtensions('png') self.filechooser.extension = 'mhstx' self.textures = ['data/eyes'] else: # Clothes if self.activeClothing: uuid = self.activeClothing clo = human.clothesObjs[uuid] filepath = human.clothesProxies[uuid].file self.textures = [os.path.dirname(filepath)] + self.defaultTextures else: # TODO maybe dont show anything? self.textures = self.defaultTextures filec = self.filechooser log.debug("fc %s %s %s added", filec, filec.children.count(), str(filec.files)) # Reload filechooser self.filechooser.setPaths(self.textures) self.filechooser.refresh() self.filechooser.setFocus()
def _sendobject(self, type, obj): # we have to switch to blocking mode for send # self.request.setblocking(1) objstring = cPickle.dumps(obj, 1) self.wfile.write("%s %d\r\n%s\r\n" % (type, len(objstring), objstring)) self.wfile.flush() log.debug("server send: type=%s object=%s" % (type, `obj`))
def __init__(self, networklocation): # network location is either a tuple (server adress, port) or a # filename pointing to a socket file try: server, port = networklocation family = socket.AF_INET except ValueError: filename = networklocation family = socket.AF_UNIX self.socket = socket.socket(family, socket.SOCK_STREAM) if family == socket.AF_INET: self.socket.connect((server, port)) else: self.socket.connect(filename) self.subscriptions = [] self.wfile = self.socket.makefile("wb") self.queue = hub.PriorityQueue(-1) self.clientnetworkreceiver = clientnetworkreceiver(self.socket, self.queue) self.clientnetworkreceiver.start() # hash for pending requests self.pendingrequests = {} self.done = False threading.Thread.__init__(self) self.setDaemon(1) log.debug("Network clientchannel initialized")
def __init__(self, executable, cmd): super(DebugEngine, self).__init__() debug("Initializing DebugEngine") self.executable = executable self.cmd = cmd self.proc = None
def run(self): # process events, request and subscription requests coming from # the client while not self.done: type, obj = self._receiveobject() if type == _EVENT: log.debug("server: client sends event '%s'" % obj) hub.notify(obj, priority=-50) elif type == _REQUEST: log.debug("server: requesting %s for client" % `obj`) # extract id rid, obj = obj result = hub.request(obj, priority=-50) log.debug("server: got answer %s" % `result`) # be careful, handler may not exist anymore? try: self.handler._sendobject(_RESULT, (rid, result)) except: pass elif type == _SUBSCRIBE: log.debug("server: client requests subscription for '%s'" % `obj`) # be careful, maybe handler does not exists anymore? try: self.handler.subscribe(obj) except: pass else: log.debug("server: servernetworkreceiver exits: type=%s" % type) self.done = True self.handler.done = True
def convertMultiImage(image_path, temp_dir): converted_paths = [] if imghdr.what(image_path) != 'tiff': return [image_path] debug('Checking for multiple images in TIFF') i = 0 base_name = os.path.basename(image_path) name, extension = os.path.splitext(base_name) image = Image.open(image_path) try: while True: image.seek(i) file_name = os.path.join(temp_dir, name + ' #' + str(i + 1) + \ extension) image_name = getNonExistingFileName(file_name) image.save(image_name, format='TIFF') debug('Saved %s' % image_name) converted_paths.append(image_name) i += 1 except EOFError: # No more images in the file pass return converted_paths
def execute(cmd, root_helper=None, process_input=None, addl_env=None, check_exit_code=True, return_stderr=False, log_fail_as_error=True, extra_ok_codes=None): try: obj, cmd = create_process(cmd, root_helper=root_helper, addl_env=addl_env) _stdout, _stderr = (process_input and obj.communicate(process_input) or obj.communicate()) obj.stdin.close() m = "\nCommand: %s\nExit code: %s\nStdout: %r\n, Stderr: %r" % (cmd, obj.returncode, _stdout, _stderr) extra_ok_codes = extra_ok_codes or [] if obj.returncode and obj.returncode in extra_ok_codes: obj.returncode = None if obj.returncode and log_fail_as_error: LOG.error(m) else: LOG.debug(m) if obj.returncode and check_exit_code: raise RuntimeError(m) finally: # NOTE(termie): this appears to be necessary to let the subprocess # call clean something up in between calls, without # it two execute calls in a row hangs the second one greenthread.sleep(0) return return_stderr and (_stdout, _stderr) or _stdout
def read(path): """Parse a CSV flat file into an ordered dictionary.""" parks = collections.OrderedDict() log.info("reading %s...", path) with open(path, 'r') as csvfile: rows = csv.reader(csvfile) header = None for row in rows: if not row: continue if header is None: header = row log.debug("header: %s", header) else: log.debug("row: %s", row) data = AttributeDictionary() name = row[0] for index, key in enumerate(header): data[key] = row[index] parks[name] = data log.info("read %s parks", len(parks)) return parks
def getTextSizeFromImage(image): width, height = image.size colors = image.getcolors(width * height) background_color = 255 if colors: colors.sort() background_color = colors[-1][1] text_sizes = [] for i in xrange(1, height): blank_line = True for j in range(0, width, 3): color = image.getpixel((j, i - 1)) if colorsContrast(color, background_color): blank_line = False break if blank_line: if text_sizes and text_sizes[-1]: text_sizes.append(0) else: if text_sizes and text_sizes[-1]: text_sizes[-1] += 1 else: text_sizes.append(1) text_sizes.sort() text_sizes = [i for i in text_sizes if i != 0] text_size = 0 if text_sizes: text_sizes_avg = sum(text_sizes) / len(text_sizes) for i in text_sizes: if i > text_sizes_avg: text_size = math.floor(i) break text_size = max(text_sizes) debug('Text Size: %s' % text_size) return text_size
def checkDirty(self): dirty = False for bone in self.boneList: bone.dirty = False for bone in self.boneList: bone.dirty = True for cns in bone.constraints: bnames = [] try: bnames.append( cns.subtar ) except AttributeError: pass try: bnames.append( cns.ptar ) except AttributeError: pass for bname in bnames: if bname: target = self.bones[bname] if not target.dirty: log.debug("Dirty %s before %s" % (bone.name, target.name)) dirty = True if dirty: raise NameError("Dirty bones encountered")
def persist_map(map, file): """ Writes the given map of strings-to-values into a file, by converting all of its values into strings. Any key value pair that contains the ':' character will not be written out. All other contents that were in the given file will be destroyed. Returns True on success, False on failure. """ try: import log with StreamWriter(file, False, Encoding.UTF8) as sw: sw.Write(":: This file was generated on "\ + strftime(r'%Y.%m.%d %X') + "\n\n") keys = map.keys() keys.sort() for key in keys: value = sstr(map[key]).strip() key = sstr(key).strip() if ':' in key or ':' in value: log.debug("WARNING: can't write map entry containing ':'; ", key, " -> ", value) else: sw.Write(key + ' : ' + value + "\n") return True except: log.debug_exc("problem persisting map to file: " + sstr(file)) return False
def run(self): """线程主函数 循环运行,接受新的客户端的连接。 """ log.info('GPS server thread: start, port: %d' % self.gps_port) try: server = socket.socket(socket.AF_INET, socket.SOCK_STREAM) server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) server.bind(('0.0.0.0', self.gps_port)) server.listen(1) server.settimeout(3) # timeout: 3s while self.running: try: conn, address = server.accept() conn.settimeout(3) self.got_client_cb(conn, address) log.debug('new client from: %s' % str(address)) except socket.timeout: pass server.close() log.info('GPS server thread: bye') except Exception as e: log.error('GPS server thread error: %s' % e) self.running = False
def loadFootPose(self, filename): log.debug("Loading special foot pose from %s", filename) self.selectedFile = filename if not filename: # Unload current pose self.selectedFile = None self.selectedPose = None # Remove the special pose from existing pose by restoring the original org_pose = self._get_current_unmodified_pose() if org_pose is None: self.human.setActiveAnimation(None) elif self.human.hasAnimation(org_pose.name): self.human.setActiveAnimation(org_pose.name) else: self.human.addAnimation(org_pose) self.human.setActiveAnimation(org_pose.name) # Remove pose reserved for foot pose library from human if self.human.hasAnimation('special-foot-pose'): self.human.removeAnimation('special-foot-pose') self.human.refreshPose(updateIfInRest=True) return # Load pose bvh_file = bvh.load(filename, convertFromZUp="auto") anim = bvh_file.createAnimationTrack(self.human.getBaseSkeleton()) self.applyFootPose(anim)
def _connection(self): devices = self.__adb('devices') if not self.__isNetworkConnection(): if not str(devices).__contains__(self.id): print 'device %s need to plugin' %self.id return False else: self.adb('root') self.adb('remount') return True else: if not str(devices).__contains__(self.id): # self.adb('disconnect') r = self.__adb('connect %s' %self.id) if str(r).__contains__('unable to connect to'): print 'unable to connect to %s' %self.id return False time.sleep(1) r = self.adb('root') log.debug('root devices:\n %s' %r) if 'adbd is already running as root' not in r: time.sleep(2) self.__adb('connect %s' %self.id) time.sleep(1) self.adb('remount') log.debug('remount devices:\n %s' %r) return True
def bake(self, skel): """ Bake animation as skinning matrices for the specified skeleton. Results in significant performance gain when skinning. We do skinning with 3x4 matrixes, as suggested in http://graphics.ucsd.edu/courses/cse169_w05/2-Skeleton.htm Section 2.3 (We assume the 4th row contains [0 0 0 1]) """ if self.disableBaking: return from progress import Progress log.debug('Updating baked animation %s (%s frames)', self.name, self.nFrames) progress = Progress(self.nFrames) bones = skel.getBones() if len(bones) != self.nBones: raise RuntimeError("Error baking animation %s: number of bones in animation data differs from bone count of skeleton %s" % (self.name, skel.name)) old_pose = skel.getPose() self._data_baked = np.zeros((self.dataLen, 3, 4)) for f_idx in xrange(self.nFrames): i = f_idx * self.nBones skel.setPose(self._data[i:i+self.nBones]) for b_idx in xrange(self.nBones): idx = i + b_idx self._data_baked[idx,:,:] = bones[b_idx].matPoseVerts[:3,:4] progress.step("Baking animation frame %s", f_idx+1) skel.setPose(old_pose)
def selectProxy(self, mhclofile): """ Called when a new proxy has been selected. If this library selects only a single proxy, specifying None as mhclofile parameter will deselect the current proxy and set the selection to "none". If this library allows selecting multiple proxies, specifying None as mhclofile will have no effect. """ if not mhclofile: if self.multiProxy: return else: self.deselectProxy(None) return log.message('Selecting proxy file "%s" from %s library.', mhclofile, self.proxyName) human = self.human pxy = None mhcloId = getpath.canonicalPath(mhclofile) if mhcloId in self._proxyCache: pxy = self._proxyCache[mhcloId] if pxy.mtime < os.path.getmtime(mhclofile): pxy = None if not pxy: pxy = proxy.loadProxy(human, mhclofile, type=self.proxyName.capitalize()) self._proxyCache[mhcloId] = pxy if pxy.uuid in [p.uuid for p in self.getSelection() if p is not None]: log.debug("Proxy with UUID %s (%s) already loaded in %s library. Skipping.", pxy.uuid, pxy.file, self.proxyName) return if not self.multiProxy and self.isProxySelected(): # Deselect previously selected proxy self.deselectProxy(None, suppressSignal = True) mesh,obj = pxy.loadMeshAndObject(human) mesh.setPickable(True) # Allow mouse picking for proxies attached to human if not mesh: return gui3d.app.addObject(obj) self.filechooser.selectItem(mhclofile) self.adaptProxyToHuman(pxy, obj) obj.setSubdivided(human.isSubdivided()) # Copy subdivided state of human # Add to selection self.selectedProxies.append(pxy) self.filechooser.selectItem(mhclofile) self.proxySelected(pxy) self.signalChange()
def __init__(self): debug("* Creating topnet") self.name = "topnet" self.nets = [] self.link = [] self.traffic = [] self.traffic_pattern = ""
def run(self): log.debug('Starting wiki-crawler...') for slave in self._slaves: slave.start() for slave in self._slaves: slave.join()
def update_files( self, force=False, force_interactive=False, fetch=False, clean=True, skip_changes=False, ): """Ensure the source matches the specified revision.""" log.info("Updating source files...") # Clone the repository if needed assert self.name valid_checkout_dir = False if os.path.isdir(self.name): valid_checkout_dir = len(os.listdir(self.name)) == 0 else: valid_checkout_dir = True if valid_checkout_dir: git.clone( self.type, self.repo, self.name, sparse_paths=self.sparse_paths, rev=self.rev, ) # Enter the working tree shell.cd(self.name) if not git.valid(): if force: git.rebuild(self.type, self.repo) fetch = True else: raise self._invalid_repository # Check for uncommitted changes if not force: log.debug("Confirming there are no uncommitted changes...") if skip_changes: if git.changes(self.type, include_untracked=clean, display_status=False): common.show( f'Skipped update due to uncommitted changes in {os.getcwd()}', color='git_changes', ) return elif force_interactive: if git.changes(self.type, include_untracked=clean, display_status=False): common.show( f'Uncommitted changes found in {os.getcwd()}', color='git_changes', ) while True: yn_input = str( input("Do you want to overwrite? (Y/N)[Y]: ") ).rstrip('\r\n') if yn_input.lower() == "y" or not yn_input: break if yn_input.lower() == "n": common.show(f'Skipped update in {os.getcwd()}', color='git_changes') return else: if git.changes(self.type, include_untracked=clean): raise exceptions.UncommittedChanges( f'Uncommitted changes in {os.getcwd()}') # Fetch the desired revision if fetch or git.is_fetch_required(self.type, self.rev): git.fetch(self.type, self.repo, self.name, rev=self.rev) # Update the working tree to the desired revision git.update(self.type, self.repo, self.name, fetch=fetch, clean=clean, rev=self.rev)
def map_type(cls): """Infer the converter type from a dataclass, type, or annotation.""" log.debug(f'Mapping {cls} to converter') if dataclasses.is_dataclass(cls): converters = {} for field in dataclasses.fields(cls): converters[field.name] = map_type(field.type) converter = Object.subclass(cls, converters) log.debug(f'Mapped {cls} to new converter: {converter}') return converter if hasattr(cls, '__origin__'): converter = None if cls.__origin__ == list: try: converter = map_type(cls.__args__[0]) except TypeError as exc: log.debug(exc) exc = TypeError(f"Type is required with 'List' annotation") raise exc from None else: converter = List.subclass(converter) if cls.__origin__ == dict: log.warn("Schema enforcement not possible with 'Dict' annotation") key = map_type(cls.__args__[0]) value = map_type(cls.__args__[1]) converter = Dictionary.subclass(key, value) elif cls.__origin__ == Union: converter = map_type(cls.__args__[0]) assert len(cls.__args__) == 2 assert cls.__args__[1] == type(None) converter = converter.as_optional() if converter: log.debug(f'Mapped {cls} to new converter: {converter}') return converter raise TypeError(f'Unsupported container type: {cls.__origin__}') else: for converter in Converter.__subclasses__(): if converter.TYPE == cls: log.debug(f'Mapped {cls} to existing converter: {converter}') return converter if issubclass(cls, Converter): log.debug(f'Mapped {cls} to existing converter (itself)') return cls raise TypeError(f'Could not map type: {cls}')
def loadModifiers(filename, human): """ Load modifiers from a modifier definition file. """ log.debug("Loading modifiers from %s", filename) import json import os from collections import OrderedDict modifiers = [] lookup = OrderedDict() data = json.load(open(filename, 'rt'), object_pairs_hook=OrderedDict) for modifierGroup in data: groupName = modifierGroup['group'] for mDef in modifierGroup['modifiers']: # Construct modifier if "modifierType" in mDef: modifierClass = globals()[mDef["modifierType"]] elif 'macrovar' in mDef: modifierClass = MacroModifier else: modifierClass = UniversalModifier if 'macrovar' in mDef: modifier = modifierClass(groupName, mDef['macrovar']) else: modifier = modifierClass(groupName, mDef['target'], mDef.get('min', None), mDef.get('max', None), mDef.get('mid', None)) if "defaultValue" in mDef: modifier._defaultValue = mDef["defaultValue"] modifiers.append(modifier) lookup[modifier.fullName] = modifier if human is not None: for modifier in modifiers: modifier.setHuman(human) log.message('Loaded %s modifiers from file %s', len(modifiers), filename) # Attempt to load modifier descriptions _tmp = os.path.splitext(filename) descFile = _tmp[0] + '_desc' + _tmp[1] hasDesc = OrderedDict([(key, False) for key in list(lookup.keys())]) if os.path.isfile(descFile): data = json.load(open(descFile, 'rt'), object_pairs_hook=OrderedDict) dCount = 0 for mName, mDesc in list(data.items()): try: mod = lookup[mName] mod.description = mDesc dCount += 1 hasDesc[mName] = True except: log.warning( "Loaded description for %s but modifier does not exist!", mName) log.message("Loaded %s modifier descriptions from file %s", dCount, descFile) for mName, mHasDesc in list(hasDesc.items()): if not mHasDesc: log.warning("No description defined for modifier %s!", mName) return modifiers
def debugModifiers(): human = G.app.selectedHuman modifierNames = sorted(human.modifierNames) for mName in modifierNames: m = human.getModifier(mName) log.debug("%s:", m) log.debug(" controls: %s", m.macroVariable) log.debug(" dependencies (variables): %s", str(m.macroDependencies)) log.debug(" dependencies (modifier groups): %s", str(list(human.getModifierDependencies(m)))) log.debug(" influences (modifier groups): %s", str(list(human.getModifiersAffectedBy(m)))) log.debug(" description: %s\n", m.description)
to copy the targets once built.""" import os import sys import string import re import time import types import posixpath import macfs import ascript import umake_lib import macpath import log log.debug( 'Imported: $Id: umake_ascript.py,v 1.10 2006/06/19 23:11:32 jfinnecy Exp $' ) def condense_mac_path(_path): """Remove skipped directories from a macintosh path, because two path seperators in a row means to backstep in Macintosh path speak.""" plist = string.split(_path, ":") path_list = [] for pc in plist: if pc == "": path_list = path_list[:-1] else: path_list.append(pc) return string.join(path_list, ":")
def test(self): debug('test') return "<p>Test - Ok</p>"
def run(self): debug('Run') threading.Thread(target=self._run).start()
def loadBinaryProxy(path, human, type): log.debug("Loading binary proxy %s.", path) npzfile = np.load(path) #if type is None: # proxyType = npzfile['proxyType'].tostring() #else: proxyType = type proxy = Proxy(path, proxyType, human) proxy.name = str(npzfile['name'].tostring(), 'utf8') proxy.uuid = str(npzfile['uuid'].tostring(), 'utf8') proxy.basemesh = str(npzfile['basemesh'].tostring(), 'utf8') if 'description' in npzfile: proxy.description = str(npzfile['description'].tostring(), 'utf8') if 'version' in npzfile: proxy.version = int(npzfile['version']) if 'lic_str' in npzfile and 'lic_idx' in npzfile: proxy.license.fromNumpyString(npzfile['lic_str'], npzfile['lic_idx']) proxy.tags = set( _unpackStringList(npzfile['tags_str'], npzfile['tags_idx'])) if 'z_depth' in npzfile: proxy.z_depth = int(npzfile['z_depth']) if 'max_pole' in npzfile: proxy.max_pole = int(npzfile['max_pole']) if 'special_pose_str' in npzfile: special_poses = _unpackStringList(npzfile['special_pose_str'], npzfile['special_pose_idx']) for idx in range(0, len(special_poses), 2): proxy.special_pose[special_poses[idx]] = special_poses[idx + 1] num_refverts = int(npzfile['num_refverts']) if num_refverts == 3: proxy.ref_vIdxs = npzfile['ref_vIdxs'] proxy.offsets = npzfile['offsets'] proxy.weights = npzfile['weights'] else: num_refs = npzfile['ref_vIdxs'].shape[0] proxy.ref_vIdxs = np.zeros((num_refs, 3), dtype=np.uint32) proxy.ref_vIdxs[:, 0] = npzfile['ref_vIdxs'] proxy.offsets = np.zeros((num_refs, 3), dtype=np.float32) proxy.weights = np.zeros((num_refs, 3), dtype=np.float32) proxy.weights[:, 0] = npzfile['weights'] if "deleteVerts" in npzfile: proxy.deleteVerts = npzfile['deleteVerts'] # Reconstruct reverse vertex (and weights) mapping proxy._reloadReverseMapping() proxy.tmatrix.fromNumpyStruct(npzfile) proxy.uvLayers = {} for uvIdx, uvName in enumerate( _unpackStringList(npzfile['uvLayers_str'], npzfile['uvLayers_idx'])): proxy.uvLayers[uvIdx] = uvName proxy.material = material.Material(proxy.name) if 'material_file' in npzfile: proxy._material_file = str(npzfile['material_file'].tostring(), 'utf8') if proxy.material_file: proxy.material.fromFile(proxy.material_file) proxy._obj_file = str(npzfile['obj_file'].tostring(), 'utf8') if 'vertexBoneWeights_file' in npzfile: proxy._vertexBoneWeights_file = str( npzfile['vertexBoneWeights_file'].tostring(), 'utf8') if proxy.vertexBoneWeights_file: from animation import VertexBoneWeights proxy.vertexBoneWeights = VertexBoneWeights.fromFile( proxy.vertexBoneWeights_file) if proxy.z_depth == -1: log.warning('Proxy file %s does not specify a Z depth. Using 50.', path) proxy.z_depth = 50 return proxy
def drawMesh(obj): if not obj.visibility: return if G.args.get('fullloggingopengl', False): log.debug("Rendering mesh %s", obj.name) glDepthFunc(GL_LEQUAL) # Transform the current object glPushMatrix() transformObject(obj) glColor3f(1.0, 1.0, 1.0) useShader = obj.shader and obj.solid and not obj.shadeless if not useShader: if obj.isTextured and obj.texture and obj.solid: # Bind texture for fixed function shading if have_activeTexture: glActiveTexture(GL_TEXTURE0) glEnable(GL_TEXTURE_2D) tex = getTexture(obj.texture) if tex not in (False, None): glBindTexture(GL_TEXTURE_2D, tex.textureId) else: glBindTexture(GL_TEXTURE_2D, TEX_NOT_FOUND.textureId) if have_activeTexture: for gl_tex_idx in xrange(GL_TEXTURE0 + 1, GL_TEXTURE0 + MAX_TEXTURE_UNITS): glActiveTexture(gl_tex_idx) glBindTexture(GL_TEXTURE_2D, 0) glDisable(GL_TEXTURE_2D) glBindTexture(GL_TEXTURE_1D, 0) glDisable(GL_TEXTURE_1D) else: # Disable all textures (when in fixed function textureless shading mode) for gl_tex_idx in xrange(GL_TEXTURE0, GL_TEXTURE0 + MAX_TEXTURE_UNITS): if have_activeTexture: glActiveTexture(gl_tex_idx) glBindTexture(GL_TEXTURE_2D, 0) glDisable(GL_TEXTURE_2D) glBindTexture(GL_TEXTURE_1D, 0) glDisable(GL_TEXTURE_1D) if obj.nTransparentPrimitives: # TODO not needed for alpha-to-coverage rendering (it's face order independent) # TODO for other pipelines/older harware better to statically sort faces of hair meshes around BBox center #obj.sortFaces() pass # Fill the array pointers with object mesh data if obj.hasUVs: glEnableClientState(GL_TEXTURE_COORD_ARRAY) glTexCoordPointer(2, GL_FLOAT, 0, obj.UVs) glEnableClientState(GL_VERTEX_ARRAY) glVertexPointer(3, GL_FLOAT, 0, obj.verts) glEnableClientState(GL_NORMAL_ARRAY) glNormalPointer(GL_FLOAT, 0, obj.norms) glEnableClientState(GL_COLOR_ARRAY) if not useShader and obj.solid: # Vertex colors should be multiplied with the diffuse material value, also for fixed function # (with the exception of wireframe rendering) glColorPointer(4, GL_UNSIGNED_BYTE, 0, obj.color_diff) else: glColorPointer(4, GL_UNSIGNED_BYTE, 0, obj.color) # Disable lighting if the object is shadeless if obj.shadeless: glDisable(GL_LIGHTING) else: glEnable(GL_LIGHTING) if obj.cull: glEnable(GL_CULL_FACE) glCullFace(GL_BACK if obj.cull > 0 else GL_FRONT) else: glDisable(GL_CULL_FACE) if obj.solid: # Set material properties mat = obj.material MatAmb = A(mat.ambientColor.values, 1.0) # Material - Ambient MatDif = A(mat.diffuseColor.values, mat.opacity) # Material - Diffuse MatSpc = A(mat.specularColor.values, 1.0) # Material - Specular MatShn = A(128 * mat.shininess) # Material - Shininess MatEms = A(mat.emissiveColor.values, 1.0) # Material - Emission else: # Wireframe # Set some default material properties MatAmb = A(0.11, 0.11, 0.11, 1.0) # Material - Ambient Values MatDif = A(1.0, 1.0, 1.0, 1.0) # Material - Diffuse Values MatSpc = A(0.2, 0.2, 0.2, 1.0) # Material - Specular Values MatShn = A(10.0, ) # Material - Shininess MatEms = A(0.0, 0.0, 0.0, 1.0) # Material - Emission Values glMaterialfv(GL_FRONT, GL_AMBIENT, MatAmb) # Set Material Ambience glMaterialfv(GL_FRONT, GL_DIFFUSE, MatDif) # Set Material Diffuse glMaterialfv(GL_FRONT, GL_SPECULAR, MatSpc) # Set Material Specular glMaterialfv(GL_FRONT, GL_SHININESS, MatShn) # Set Material Shininess glMaterialfv(GL_FRONT, GL_EMISSION, MatEms) # Set Material Emission if obj.useVertexColors: # Vertex colors affect materials (lighting is enabled) glEnable(GL_COLOR_MATERIAL) # Vertex colors affect diffuse of material glColorMaterial(GL_FRONT, GL_DIFFUSE) else: glDisable(GL_COLOR_MATERIAL) # Enable the shader if the driver supports it and there is a shader assigned if useShader: glUseProgram(obj.shader) # Set custom attributes if obj.shaderObj.requiresVertexTangent(): glVertexAttribPointer(obj.shaderObj.vertexTangentAttrId, 4, GL_FLOAT, GL_FALSE, 0, obj.tangents) glEnableVertexAttribArray(obj.shaderObj.vertexTangentAttrId) # TODO # This should be optimized, since we only need to do it when it's changed # Validation should also only be done when it is set obj.shaderObj.setUniforms(obj.shaderParameters) elif Shader.supported(): glUseProgram(0) # draw the mesh if not obj.solid: # Wireframe drawing glEnable(GL_COLOR_MATERIAL) glDisableClientState(GL_COLOR_ARRAY) glColor3f(0.0, 0.0, 0.0) glDisable(GL_LIGHTING) glColorMaterial( GL_FRONT, GL_AMBIENT_AND_DIFFUSE ) # Vertex colors affect ambient and diffuse of material glPolygonMode(GL_FRONT_AND_BACK, GL_LINE) glDrawElements(g_primitiveMap[obj.vertsPerPrimitive - 1], obj.primitives.size, GL_UNSIGNED_INT, obj.primitives) glEnableClientState(GL_COLOR_ARRAY) glEnable(GL_LIGHTING) glPolygonMode(GL_FRONT_AND_BACK, GL_FILL) glEnable(GL_POLYGON_OFFSET_FILL) glPolygonOffset(1.0, 1.0) glDrawElements(g_primitiveMap[obj.vertsPerPrimitive - 1], obj.primitives.size, GL_UNSIGNED_INT, obj.primitives) glDisable(GL_POLYGON_OFFSET_FILL) glDisable(GL_COLOR_MATERIAL) elif obj.nTransparentPrimitives: if have_multisample and obj.alphaToCoverage: # Enable alpha-to-coverage (also called CSAA) # using the multisample buffer for alpha to coverage disables its use for MSAA (anti-aliasing) glEnable(GL_SAMPLE_ALPHA_TO_COVERAGE) #glEnable(GL_SAMPLE_ALPHA_TO_ONE) # Enable this if transparent objects are too transparent glDisable(GL_BLEND) # Disable alpha blending else: glDepthMask(GL_FALSE) glEnable(GL_ALPHA_TEST) glAlphaFunc(GL_GREATER, 0.0) glDrawElements(g_primitiveMap[obj.vertsPerPrimitive - 1], obj.primitives.size, GL_UNSIGNED_INT, obj.primitives) glDisable(GL_ALPHA_TEST) if have_multisample and obj.alphaToCoverage: glDisable(GL_SAMPLE_ALPHA_TO_COVERAGE) glEnable(GL_BLEND) else: glDepthMask(GL_TRUE) elif obj.depthless: glDepthMask(GL_FALSE) glDisable(GL_DEPTH_TEST) glDrawElements(g_primitiveMap[obj.vertsPerPrimitive - 1], obj.primitives.size, GL_UNSIGNED_INT, obj.primitives) glEnable(GL_DEPTH_TEST) glDepthMask(GL_TRUE) else: glDrawElements(g_primitiveMap[obj.vertsPerPrimitive - 1], obj.primitives.size, GL_UNSIGNED_INT, obj.primitives) if obj.solid and not obj.nTransparentPrimitives: glDisableClientState(GL_COLOR_ARRAY) for i, (start, count) in enumerate(obj.groups): color = obj.gcolor(i) if color is None or np.all(color[:3] == 255): continue glColor4ub(*color) indices = obj.primitives[start:start + count, :] glDrawElements(g_primitiveMap[obj.vertsPerPrimitive - 1], indices.size, GL_UNSIGNED_INT, indices) glEnableClientState(GL_COLOR_ARRAY) # Disable the shader if the driver supports it and there is a shader assigned if useShader: glUseProgram(0) # Restore state defaults if have_activeTexture: glActiveTexture(GL_TEXTURE0) glDisable(GL_CULL_FACE) glColor3f(1.0, 1.0, 1.0) glColorMaterial(GL_FRONT, GL_DIFFUSE) if obj.useVertexColors: glDisable(GL_COLOR_MATERIAL) # Disable custom vertex arrays again if useShader and obj.shaderObj.requiresVertexTangent(): glDisableVertexAttribArray(obj.shaderObj.vertexTangentAttrId) # Re-enable lighting if it was disabled glEnable(GL_LIGHTING) glColorMaterial(GL_FRONT, GL_DIFFUSE) if obj.isTextured and obj.texture and obj.solid: glDisable(GL_TEXTURE_2D) if obj.hasUVs: glDisableClientState(GL_TEXTURE_COORD_ARRAY) glPopMatrix()
def _referrer_to_path(r): log.debug('Referrer was %s' % str(r)) if not r: return '' parsed = urlparse.urlparse(r) return parsed.netloc + parsed.path
def writeShapeKey(fp, name, shape, rmesh, config): if len(shape.verts) == 0: log.debug("Shapekey %s has zero verts. Ignored" % name) return progress = Progress() # Verts progress(0) target = np.array(rmesh.getCoord()) target[shape.verts] += shape.data[np.s_[...]] target = rotateCoord(target, config) nVerts = len(target) fp.write( ' <geometry id="%sMeshMorph_%s" name="%s">\n' % (rmesh.name, name, name) + ' <mesh>\n' + ' <source id="%sMeshMorph_%s-positions">\n' % (rmesh.name, name) + ' <float_array id="%sMeshMorph_%s-positions-array" count="%d">\n' % (rmesh.name, name, 3*nVerts) + ' ') fp.write( ''.join([("%.4f %.4f %.4f " % tuple(co)) for co in target]) ) fp.write('\n' + ' </float_array>\n' + ' <technique_common>\n' + ' <accessor source="#%sMeshMorph_%s-positions-array" count="%d" stride="3">\n' % (rmesh.name, name, nVerts) + ' <param name="X" type="float"/>\n' + ' <param name="Y" type="float"/>\n' + ' <param name="Z" type="float"/>\n' + ' </accessor>\n' + ' </technique_common>\n' + ' </source>\n') progress(0.3) # Normals """ fp.write( ' <source id="%sMeshMorph_%s-normals">\n' % (rmesh.name, name) + ' <float_array id="%sMeshMorph_%s-normals-array" count="18">\n' % (rmesh.name, name)) -0.9438583 0 0.3303504 0 0.9438583 0.3303504 0.9438583 0 0.3303504 0 -0.9438583 0.3303504 0 0 -1 0 0 1 fp.write( ' </float_array>\n' + ' <technique_common>\n' + ' <accessor source="#%sMeshMorph_%s-normals-array" count="6" stride="3">\n' % (rmesh.name, name) + ' <param name="X" type="float"/>\n' + ' <param name="Y" type="float"/>\n' + ' <param name="Z" type="float"/>\n' + ' </accessor>\n' + ' </technique_common>\n' + ' </source>\n') """ progress(0.6) # Polylist fvert = rmesh.getFvert() nFaces = len(fvert) fp.write( ' <vertices id="%sMeshMorph_%s-vertices">\n' % (rmesh.name, name) + ' <input semantic="POSITION" source="#%sMeshMorph_%s-positions"/>\n' % (rmesh.name, name) + ' </vertices>\n' + ' <polylist count="%d">\n' % nFaces + ' <input semantic="VERTEX" source="#%sMeshMorph_%s-vertices" offset="0"/>\n' % (rmesh.name, name) + #' <input semantic="NORMAL" source="#%sMeshMorph_%s-normals" offset="1"/>\n' % (rmesh.name, name) + ' <vcount>') fp.write( ''.join(["4 " for fv in fvert]) ) fp.write('\n' + ' </vcount>\n' + ' <p>') fp.write( ''.join([("%d %d %d %d " % (fv[0], fv[1], fv[2], fv[3])) for fv in fvert]) ) fp.write('\n' + ' </p>\n' + ' </polylist>\n' + ' </mesh>\n' + ' </geometry>\n') progress(1)
def getVertexWeights(self, humanWeights, skel=None, allowCache=False): """ Map armature weights mapped to the human to the proxy mesh through the proxy mapping. humanWeights is expected to be an animation.VertexBoneWeights object. Only when this proxy has custom weights: Optionally remaps the weights to fit a user-selected skeleton when a skel is supplied as argument. If no skel argument is provided, the weights for the base skeleton are returned. Note: these vertex weights are intended for rigging and are not to be confused with getWeights() which returns the weights of the proxy mapping to the basemesh. """ # Override proxy weights mapping behaviour if this proxy has its own # bone weights defined explicitly. # This requires remapping the vertex weights of the proxy, defined on # the bones of the reference skeleton, to those of the current skeleton. # The current skeleton is retrieved from the human object linked to this # proxy. import time import log if self.hasCustomVertexWeights(): # TODO we could introduce caching of weights here as long as the skeleton is not changed if skel is None: return self.human.getBaseSkeleton().getVertexWeights( self.vertexBoneWeights, force_remap=True) else: return skel.getVertexWeights(self.vertexBoneWeights, force_remap=True) # Remap weights through proxy mapping WEIGHT_THRESHOLD = 1e-4 # Threshold for including bone weight recalculate = True weights = OrderedDict() if not allowCache: pass #print("Caching not allowed") else: if self.weightsCache is None: pass #print("There is no cache") else: if not skel is None: if skel == self.cacheSkel: recalculate = False else: log.debug("The skeleton is different") if recalculate: log.debug("remapping weights for proxy " + self.name) start = time.perf_counter() for bname, (indxs, wghts) in list(humanWeights.data.items()): vgroup = [] empty = True for (v, wt) in zip(indxs, wghts): try: vlist = self.vertWeights[v] except KeyError: vlist = [] for (pv, w) in vlist: pw = w * wt if (pw > WEIGHT_THRESHOLD): vgroup.append((pv, pw)) empty = False if not empty: weights[bname] = vgroup stop = time.perf_counter() hw = humanWeights.create(weights) if allowCache: self.weightsCache = hw self.cacheSkel = skel else: self.weightsCache = None self.cacheSkel = None log.debug("remapping weights for %s took %.5f seconds", self.name, stop - start) else: hw = self.weightsCache return hw
def handleCreateBidRequest(self, data): log.high_debug("Hit handleCreateBidRequest!") log.high_debug("Data:\n " + str(data)) data_dict = data["data"] # Ask manager to validate bid # request_params = { # "auction-sn": data_dict["auction-sn"], # "client-sn": data["client-sn"], # "bid-value": data_dict["bid-value"] # } # Sign and ask manager to validate client self.sign_data(data) validated_response = self.__sendRequestAndWait("manager", data) if validated_response["bid-is-valid"] == False: log.warning( "Bid did not pass Manager's validation process! Dropping it..." ) # Return right away if not valid response_params = { "operation-error": "Bid did not pass the validation process by the Auction Manager!" } response = self.buildResponse("create-bid", response_params) # Sign again since data has been updated self.sign_data(response) return response # Validate manager's authenticity if not self.validate_manager_request(validated_response): log.warning("Could not validate Manager's authenticity!") # Return right away if not valid response_params = { "operation-error": "Failed to verify Manager's authenticity! Aborting." } response = self.buildResponse("create-bid", response_params) # Sign again since data has been updated self.sign_data(response) return response # Sign client's original packet # self.sign_data(validated_response) response_params = validated_response log.debug("Auction Manager validated bid...") try: auction_sn = int(data_dict["auction-sn"]) matched_auctions = [ d for d in self.__auctionsList if d.serialNumber == auction_sn and d.isActive ] if len(matched_auctions) == 0: log.error( "Operation: {} from client-sn: {} => FAILED [{}] ".format( data["operation"], data["client-sn"], "No ACTIVE auctions were found by SN!")) response_params = { "operation-error": "No ACTIVE auctions were found by that Serial Number!" } else: target_auction = matched_auctions[0] log.high_debug(target_auction.getMinBidValue()) # Check if greater than min if target_auction.type_of_auction == "English" and int( data_dict["bid-value"] ) <= target_auction.getMinBidValue(): response_params = { "operation-error": "Bid value is less or equal than the minimum value" } log.info( "Operation: {} from client-sn: {} => FAILED [Bid of: {} on auction-sn: {} <= MIN value]" .format(data["operation"], data["client-sn"], data_dict["bid-value"], data_dict["auction-sn"])) else: target_auction.addNewBid( data["client-sn"], data_dict["bid-value"], json.dumps(validated_response, sort_keys=True)) log.high_debug(target_auction) log.info( "Operation: {} from client-sn: {} => OK [Bid of: {} on auction-sn: {}]" .format(data["operation"], data["client-sn"], data_dict["bid-value"], data_dict["auction-sn"])) except Exception as e: log.error( "Operation: {} from client-sn: {} => FAILED [{}] ".format( data["operation"], data["client-sn"], str(e))) response_params = { "operation-error": "A server internal error occured!" } log.error(str(e)) response = self.buildResponse("create-bid", response_params) # self.sign_data(response_data) return response
def main(): filename = None if len(sys.argv) > 1: filename = sys.argv[1] else: filename = 'test/test.c' with open(filename) as r: result = parser.parse(r.read()) log.info(result) if result is None: log.error('AST is None - exiting') return node_stack = [result] table_stack = [SymbolTable(SymbolScope.GLOBAL)] table_cache = {} while node_stack: node = node_stack.pop() log.debug(node) if not node: continue if node.symbol == 'function_def': signal = handler.handle_function_def(node_stack, table_stack, node) if signal == handler.Signal.CONTINUE: continue elif node.symbol == 'function_def_end': table_cache[node.args[0].symbol] = table_stack[-1] handler.handle_function_def_end(node_stack, table_stack, node) elif node.symbol == 'function_decl': handler.handle_function_decl(node_stack, table_stack, node) elif node.symbol == 'decl': handler.handle_decl(node_stack, table_stack, node) else: if node.attrs.get('name') == 'identifier': if table_stack[-1].get(node.symbol) is None: if table_stack[-1].scope == SymbolScope.LOCAL and table_stack[0].get(node.symbol) is None: log.error(f'{node.symbol} referenced before declaration') else: info(table_stack[0].get(node.symbol), usage=node.attrs.get('line', True)) else: info(table_stack[-1].get(node.symbol), usage=node.attrs.get('line', True)) continue elif node.symbol == 'function_call': func = table_stack[0].get(node.args[0].symbol) if func: func.attrs['call'] = True checker.check_function_call(node_stack, table_stack, node) elif node.symbol in checker.PROPAGATING_UNARY_SYMBOLS: if len(node.args) == 1: checker.check_unary(node_stack, table_stack, node) else: checker.check_binary(node_stack, table_stack, node) elif node.symbol in checker.PROPAGATING_BINARY_SYMBOLS: checker.check_binary(node_stack, table_stack, node) for child in reversed(node.args): if child: node_stack.append(child) # node_stack = [result] # while node_stack: # node = node_stack.pop() # if node is None: # continue # print(node) # for child in reversed(node.args): # if child: # node_stack.append(child) # check if a declared function was called, it was later defined for v in table_stack[0].table.values(): if not v.attrs.get('init') and v.attrs.get('call'): log.error(f'Function {v} declared and called but never defined') # check if main was defined if 'main' not in table_stack[0].table: log.error('main is not defined') # if there was an error do not generate code if error.ERROR: log.info('Exiting without any code generation') else: log.info('Generating code') output = generate(result, table_cache, table_stack[0]) log.info(output) flattened_output = utils.flatten_array(output) log.info(flattened_output) string_ouput = functools.reduce(lambda x, y: '\n'.join((str(x), str(y))), flattened_output) log.info(string_ouput) f = open(f'{filename.split(".")[0]}.mass', 'w+') f.write(f'{string_ouput}\n')
def mapImageSoft(srcImg, mesh, leftTop, rightBottom): progress = Progress()(0) dstImg = mh.Image(G.app.selectedHuman.getTexture()) dstW = dstImg.width dstH = dstImg.height srcImg = srcImg.convert(dstImg.components) camera = getCamera(mesh) faces = getFaces(mesh) # log.debug('matrix: %s', G.app.modelCamera.getConvertToScreenMatrix()) progress(0.05) texco = np.asarray( [0, dstH])[None, None, :] + mesh.texco[mesh.fuvs[faces]] * np.asarray( [dstW, -dstH])[None, None, :] matrix_ = np.asarray(G.app.modelCamera.getConvertToScreenMatrix(mesh)) coord = np.concatenate( (mesh.coord[mesh.fvert[faces]], np.ones((len(faces), 4, 1))), axis=-1) # log.debug('texco: %s, coord: %s', texco.shape, coord.shape) coord = np.sum(matrix_[None, None, :, :] * coord[:, :, None, :], axis=-1) # log.debug('coord: %s', coord.shape) coord = coord[:, :, :2] / coord[:, :, 3:] progress(0.1) # log.debug('coord: %s', coord.shape) # log.debug('coords: %f-%f, %f-%f', # np.amin(coord[...,0]), np.amax(coord[...,0]), # np.amin(coord[...,1]), np.amax(coord[...,1])) # log.debug('rect: %s %s', leftTop, rightBottom) coord -= np.asarray([leftTop[0], leftTop[1]])[None, None, :] coord /= np.asarray( [rightBottom[0] - leftTop[0], rightBottom[1] - leftTop[1]])[None, None, :] alpha = np.sum(mesh.vnorm[mesh.fvert[faces]] * camera[None, None, :], axis=-1) alpha = np.maximum(0, alpha) # alpha[...] = 1 # debug # log.debug('alpha: %s', alpha.shape) # log.debug('coords: %f-%f, %f-%f', # np.amin(coord[...,0]), np.amax(coord[...,0]), # np.amin(coord[...,1]), np.amax(coord[...,1])) progress(0.15) uva = np.concatenate((coord, alpha[..., None]), axis=-1) # log.debug('uva: %s', uva.shape) valid = np.any(alpha >= 0, axis=1) # log.debug('valid: %s', valid.shape) texco = texco[valid, :, :] uva = uva[valid, :, :] # log.debug('%s %s', texco.shape, uva.shape) # log.debug('src: %s, dst: %s', srcImg.data.shape, dstImg.data.shape) log.debug("mapImage: begin render") progress(0.2, 0.6) RasterizeTriangles(dstImg, texco[:, [0, 1, 2], :], UvAlphaShader(dstImg, srcImg, uva[:, [0, 1, 2], :])) progress(0.6, 0.99) RasterizeTriangles(dstImg, texco[:, [2, 3, 0], :], UvAlphaShader(dstImg, srcImg, uva[:, [2, 3, 0], :])) progress.finish() log.debug("mapImage: end render") return dstImg
def execute(*command): log.debug("Run: {}", ' '.join(command)) return os.system(' '.join(command))
def update_state_by_current_candle(subject_code, price): current_price = float(price['현재가']) start_price = float(price['시가']) ''' if subject.info[subject_code]['상태'] == '매매선터치' or subject.info[subject_code]['상태'] == '매매구간진입' : if calc.data[subject_code]['추세'][ calc.data[subject_code]['idx'] ] == '상승세': if current_price >= calc.data[subject_code]['매매선'][ calc.data[subject_code]['idx'] ]: log.debug('상태변경 : ' + subject.info[subject_code]['상태'] + ' -> 매매구간진입') subject.info[subject_code]['상태'] = '매매구간진입' subject.info[subject_code]['매매구간누적캔들'] += 1 log.debug('매매구간 누적캔들 : ' + str(subject.info[subject_code]['매매구간누적캔들'])) else: log.debug('상태변경 : ' + subject.info[subject_code]['상태'] + ' -> 매매선터치') subject.info[subject_code]['상태'] = '매매선터치' subject.info[subject_code]['매매구간누적캔들'] = 0 elif calc.data[subject_code]['추세'][ calc.data[subject_code]['idx'] ] == '하락세': if current_price <= calc.data[subject_code]['매매선'][ calc.data[subject_code]['idx'] ]: log.debug('상태변경 : ' + subject.info[subject_code]['상태'] + ' -> 매매구간진입') subject.info[subject_code]['상태'] = '매매구간진입' subject.info[subject_code]['매매구간누적캔들'] += 1 log.debug('매매구간 누적캔들 : ' + str(subject.info[subject_code]['매매구간누적캔들'])) else: log.debug('상태변경 : ' + subject.info[subject_code]['상태'] + ' -> 매매선터치') subject.info[subject_code]['상태'] = '매매선터치' subject.info[subject_code]['매매구간누적캔들'] = 0 ''' if subject.info[subject_code]['상태'] == '매매선터치' or subject.info[ subject_code]['상태'] == '매매구간진입': if calc.data[subject_code]['추세'][calc.data[subject_code] ['idx']] == '상승세': if current_price >= calc.data[subject_code]['매매선'][ calc.data[subject_code]['idx']]: if start_price < calc.data[subject_code]['매매선'][ calc.data[subject_code]['idx']]: log.debug('상태변경 : ' + subject.info[subject_code]['상태'] + ' -> 매매선터치') subject.info[subject_code]['상태'] = '매매선터치' subject.info[subject_code]['매매구간누적캔들'] = 0 else: log.debug('상태변경 : ' + subject.info[subject_code]['상태'] + ' -> 매매구간진입') subject.info[subject_code]['상태'] = '매매구간진입' subject.info[subject_code]['매매구간누적캔들'] += 1 log.debug('매매구간 누적캔들 : ' + str(subject.info[subject_code]['매매구간누적캔들'])) elif calc.data[subject_code]['추세'][calc.data[subject_code] ['idx']] == '하락세': if current_price <= calc.data[subject_code]['매매선'][ calc.data[subject_code]['idx']]: if start_price > calc.data[subject_code]['매매선'][ calc.data[subject_code]['idx']]: log.debug('상태변경 : ' + subject.info[subject_code]['상태'] + ' -> 매매선터치') subject.info[subject_code]['상태'] = '매매선터치' subject.info[subject_code]['매매구간누적캔들'] = 0 else: log.debug('상태변경 : ' + subject.info[subject_code]['상태'] + ' -> 매매구간진입') subject.info[subject_code]['상태'] = '매매구간진입' subject.info[subject_code]['매매구간누적캔들'] += 1 log.debug('매매구간 누적캔들 : ' + str(subject.info[subject_code]['매매구간누적캔들']))
def main(): log.debug("START test_formatting") exitcode = lib.tests.run_data_tests(Tests()) if exitcode != 0: log.error("some tests Failed, setting exit code %d" % exitcode) sys.exit(exitcode)
def shutdown(self, func: Callable[[], None]): self._shutdown_handlers.append(func) log.debug(f"registered new shutdown handler {func.__name__}()") return func
def is_it_OK(subject_code, current_price): ''' 이거 살까? ''' # 마감시간 임박 구매 불가 if get_time(30) >= int( subject.info[subject_code]['마감시간']) and get_time(0) < int( subject.info[subject_code]['마감시간']): log.debug('마감시간 임박으로 구매 불가') return {'신규주문': False} # 이평선 정렬확인 #if calc.data[subject_code]['정배열연속틱'] < subject.info[subject_code]['최소연속틱']: if calc.get_line_range(subject_code) < subject.info[subject_code]['최소연속틱']: return {'신규주문': False} #log.debug('정배열연속틱 : ' + str(calc.data[subject_code]['정배열연속틱']) + ' >= 최소연속틱 : ' + str(subject.info[subject_code]['최소연속틱']) + ' 구매조건 통과.') log.debug('추세시작부터 연속틱 : ' + str(calc.get_line_range(subject_code)) + ' >= 최소연속틱 : ' + str(subject.info[subject_code]['최소연속틱']) + ' 구매조건 통과.') # 일목균형표 확인 if calc.data[subject_code]['추세'][calc.data[subject_code]['idx']] == '상승세': if calc.data[subject_code]['일목균형표']['선행스팬1'][calc.data[subject_code][ 'idx']] > current_price and calc.data[subject_code]['일목균형표'][ '선행스팬2'][calc.data[subject_code]['idx']] > current_price: return {'신규주문': False} log.debug('현재가가 일목균형표 구름대보다 위에 있는 구매조건 통과.') elif calc.data[subject_code]['추세'][calc.data[subject_code] ['idx']] == '하락세': if calc.data[subject_code]['일목균형표']['선행스팬1'][calc.data[subject_code][ 'idx']] < current_price and calc.data[subject_code]['일목균형표'][ '선행스팬2'][calc.data[subject_code]['idx']] < current_price: return {'신규주문': False} log.debug('현재가가 일목균형표 구름대보다 아래에 있는 구매조건 통과.') # 추세선 터치여부 확인 ''' if subject.info[subject_code]['상태'] == '매매구간진입': log.debug('현재가가 매매구간진입 상태로 구매조건 통과.') ''' if subject.info[subject_code][ '상태'] == '매매구간진입' and subject.info[subject_code]['매매구간누적캔들'] >= 1: log.debug('현재가가 매매구간진입 상태이며, 매매구간누적캔들 ' + str(subject.info[subject_code]['매매구간누적캔들']) + '개로 구매조건 통과.') else: log.debug('현재상태 : ' + subject.info[subject_code]['상태'] + ', 매매구간누적캔들 : ' + str(subject.info[subject_code]['매매구간누적캔들']) + '로 구매조건 미달.') return {'신규주문': False} # 매매선과 5틱 이내일때만 구매 min_tick = 2 if abs(current_price - calc.data[subject_code]['매매선'][-1] ) > min_tick * subject.info[subject_code]['단위']: log.debug('현재가 : ' + str(current_price) + ', 매매선가 : ' + str(calc.data[subject_code]['매매선'][-1]) + ' ' + str(min_tick) + '틱 이상 차이로 구매 안함.') return {'신규주문': False} log.debug('매매선과 현재가가 ' + str(min_tick) + '틱 이내로 구매조건 통과.') # 추세선 기울기가 너무 작은지 확인 if calc.data[subject_code]['추세'][calc.data[subject_code]['idx']] == '상승세': if calc.data[subject_code]['추세선기울기'] < 0.002: log.debug('추세선 기울기가 ' + str(calc.data[subject_code]['추세선기울기']) + '로 너무 작아 매매 불가.') return {'신규주문': False} else: log.debug('추세선 기울기 : ' + str(calc.data[subject_code]['추세선기울기']) + '로 구매조건 통과.') elif calc.data[subject_code]['추세'][calc.data[subject_code] ['idx']] == '하락세': if calc.data[subject_code]['추세선기울기'] > -0.002: log.debug('추세선 기울기가 ' + str(calc.data[subject_code]['추세선기울기']) + '로 너무 작아 매매 불가.') return {'신규주문': False} else: log.debug('추세선 기울기 : ' + str(calc.data[subject_code]['추세선기울기']) + '로 구매조건 통과.') # 모든 조건 충족 시 현재 보유 계약 상태 확인해서 리턴 # 초기에 계좌 잔고 저장해서, 몇개 살 수 있는지 확인해서 리턴 contract_cnt = 2 # 매도수구분 설정 mesu_medo_type = None if calc.data[subject_code]['추세'][calc.data[subject_code]['idx']] == '상승세': mesu_medo_type = '신규매수' elif calc.data[subject_code]['추세'][calc.data[subject_code] ['idx']] == '하락세': mesu_medo_type = '신규매도' profit_tick = int(abs(calc.data[subject_code]['추세선기울기'] * 5000)) if profit_tick > 10: profit_tick = 10 order_contents = { '신규주문': True, '매도수구분': mesu_medo_type, '익절틱': profit_tick, '손절틱': profit_tick, '수량': contract_cnt } log.debug('santa.is_it_OK() : 모든 구매조건 통과.') log.debug(order_contents) return order_contents
def member_remove(self, func: Callable[[], None]): self._member_remove_handlers.append(func) log.debug(f"registered new member_remove handler {func.__name__}()") return func
def write(self, msg, *args): self.debug.write((msg % args) + "\n") log.debug(msg, *args)
def traceReference(self): log.debug("self.refCharPaths:") for key, value in self.refCharPaths.items(): log.debug(" %s: %s" % (key, value))
def ready(self, func: Callable[[], None]): self._ready_handlers.append(func) log.debug(f"registered new ready handler {func.__name__}()") return func
def _debug_print(root, pre=''): if not root: return for (key, vals) in list(root.items()): log.debug(pre + "%s" % key) _debug_print(vals, pre + ' ')
def recv(self): ret = htsmsg.deserialize(self._sock, False) log.debug('htsp rx:') log.debug(ret, pretty=True) return ret
def writeShapeKey(fp, name, shape, mesh, config): if len(shape.verts) == 0: log.debug("Shapekey %s has zero verts. Ignored" % name) return progress = Progress() # Verts progress(0) target = mesh.coord.copy() target[:] += config.offset target[shape.verts] += shape.data[np.s_[...]] target = rotateCoord(config.scale * target, config) nVerts = len(target) fp.write( ' <geometry id="%sMeshMorph_%s" name="%s">\n' % (mesh.name, name, name) + ' <mesh>\n' + ' <source id="%sMeshMorph_%s-positions">\n' % (mesh.name, name) + ' <float_array id="%sMeshMorph_%s-positions-array" count="%d">\n' % (mesh.name, name, 3 * nVerts) + ' ') fp.write(''.join([("%.4f %.4f %.4f " % tuple(co)) for co in target])) fp.write( '\n' + ' </float_array>\n' + ' <technique_common>\n' + ' <accessor source="#%sMeshMorph_%s-positions-array" count="%d" stride="3">\n' % (mesh.name, name, nVerts) + ' <param name="X" type="float"/>\n' + ' <param name="Y" type="float"/>\n' + ' <param name="Z" type="float"/>\n' + ' </accessor>\n' + ' </technique_common>\n' + ' </source>\n') progress(0.3) # Polylist nFaces = len(mesh.fvert) fp.write( ' <vertices id="%sMeshMorph_%s-vertices">\n' % (mesh.name, name) + ' <input semantic="POSITION" source="#%sMeshMorph_%s-positions"/>\n' % (mesh.name, name) + ' </vertices>\n' + ' <polylist count="%d">\n' % nFaces + ' <input semantic="VERTEX" source="#%sMeshMorph_%s-vertices" offset="0"/>\n' % (mesh.name, name) + #' <input semantic="NORMAL" source="#%sMeshMorph_%s-normals" offset="1"/>\n' % (mesh.name, name) + ' <vcount>') fp.write(''.join(["4 " for fv in mesh.fvert])) fp.write('\n' + ' </vcount>\n' + ' <p>') fp.write(''.join([("%d %d %d %d " % tuple(fv)) for fv in mesh.fvert])) fp.write('\n' + ' </p>\n' + ' </polylist>\n' + ' </mesh>\n' + ' </geometry>\n') progress(1)
def debugGroups(self): """ Debug print all group keys for the targets stored in groups. """ log.debug("Targets keys:\n%s", "\n".join(["-".join(k) for k in list(self.groups.keys())]))
def loadMesh(path, locX=0, locY=0, locZ=0, loadColors=1, maxFaces=None): """ This function loads the specified mesh object into internal MakeHuman data structures, and returns it. The loaded file should be in Wavefront OBJ format. Parameters: ----------- path: *String*. The file system path to the file containing the object to load. locX: *float* X location of loaded obj, default = 0 locY: *float* Y location of loaded obj, default = 0 locZ: *float* Z location of loaded obj, default = 0 Note: loadColors is currently unused maxFaces: *uint* Number of faces per vertex (pole), None for default (min 4) """ name = os.path.basename(path) obj = module3d.Object3D(name) if maxFaces: obj.MAX_FACES = maxFaces obj.path = path obj.x = locX obj.y = locY obj.z = locZ try: npzpath = os.path.splitext(path)[0] + '.npz' try: if not os.path.isfile(npzpath): log.message('compiled file missing: %s', npzpath) raise RuntimeError('compiled file missing: %s', npzpath) if os.path.isfile(path) and os.path.getmtime( path) > os.path.getmtime(npzpath): log.message('compiled file out of date: %s', npzpath) raise RuntimeError('compiled file out of date: %s', npzpath) loadBinaryMesh(obj, npzpath) except Exception as e: showTrace = not isinstance(e, RuntimeError) log.warning("Problem loading binary mesh: %s", e, exc_info=showTrace) loadTextMesh(obj, path) if isSubPath(npzpath, getPath('')): # Only write compiled binary meshes to user data path try: saveBinaryMesh(obj, npzpath) except StandardError: log.notice('unable to save compiled mesh: %s', npzpath) else: log.debug('Not writing compiled meshes to system paths (%s).', npzpath) except: log.error('Unable to load obj file: %s', path, exc_info=True) return False return obj
def calculate(self, event=None): log.debug("Event: %s", event) meters = utils.feet_to_meters(self.feet.get()) if meters is not None: self.meters.set(meters)