def cmd_Enable(self, msg): msg = lx.object.Message(msg) res = self.basic_Enable(msg) if res == None: msg.SetCode(lx.result.CMD_NOT_AVAILABLE) lx.throw(lx.result.CMD_NOT_AVAILABLE) elif not res: msg.SetCode(lx.result.CMD_DISABLED) lx.throw(lx.result.CMD_DISABLED) else: msg.SetCode(lx.result.OK)
def cmd_ArgSetDatatypes(self): if not self._has_varg: return for i in range(len(self._flags)): if self._flags[i] & lx.symbol.fCMDARG_REQFORVARIABLE and not self.dyna_IsSet(i): lx.throw(lx.result.CMD_MISSING_ARG) for i in range(len(self._flags)): self._flags[i] = self._flags[i] | lx.symbol.fCMDARG_REQFORVAR_SET if self._flags[i] & lx.symbol.fCMDARG_VARIABLE: self.dyna_SetType(i, self.basic_ArgType(i))
def load_LoadObject(self, loadInfo, monitor, dest): """ :param loadInfo: :param monitor: :param dest: """ _monitor = lx.object.Monitor(monitor) # Set position of file at end of header, self.filehandle.seek(self.end_header) vertices = [] faces = [] if self.format == "ascii": for element in self.elements: if element.get('name') == "vertex": # For this element, read expected types to list types = [] for prop in element.get("properties"): types.append(ascii_property_types.get( prop.get('type'))) # Each element is expected to be stored on one line, parse each # line with expected property types. for _ in range(element.get('count', 0)): data = tuple( t(value) for t, value in zip( types, self.filehandle.readline().strip().split())) vertices.append( data[:3] ) # push the first three types, assuming here they are position xyz elif element.get('name') == "face": for _ in range(element.get('count', 0)): data = self.filehandle.readline().strip().split() faces.append(tuple(int(d) for d in data[1:])) elif self.format == "binary_big_endian": chunk_size = 1024 for element in self.elements: if element.get('name') == "vertex": fmt = '>' for prop in element.get("properties"): fmt += binary_property_types.get(prop.get('type')) size = struct.calcsize(fmt) # each vertex have this size total_size = size * element.get( 'count', 0) # the total byte size for all vertices read_size = chunk_size - ( chunk_size % size ) # byte size we will read to get full set of vertices num_reads = total_size // read_size # the number of reads we will have to make num_verts = read_size // size # number of verts will be read overflow = total_size % read_size # how many bytes are left after reading all full segments _monitor.Initialize(num_reads + 1 if overflow else num_reads) for _ in range(num_reads): data = self.filehandle.read(read_size) for x in range(num_verts): vertex = struct.unpack_from(fmt, data, offset=x * size) vertices.append(vertex) _monitor.Increment(1) data = self.filehandle.read(overflow) for x in range(overflow // size): vertex = struct.unpack_from(fmt, data, offset=x * size) vertices.append(vertex) _monitor.Increment(1) elif element.get('name') == "face": face_count = element.get('count', 0) _monitor.Initialize(face_count) for _ in range(face_count): # With potentially variable length of list properties, # I think I might have to make this many reads :( pos = self.filehandle.tell() # First read how many indices we can expect, should be int/uchar so always single digit fmt = '>' + binary_property_types.get( element['properties'][0]['size']) size = struct.calcsize(fmt) data = self.filehandle.read(size) num_indices, = struct.unpack(fmt, data) # Then read the indices t = binary_property_types.get( element['properties'][0]['type']) fmt = '>' + str(num_indices) + t size = struct.calcsize(fmt) data = self.filehandle.read(size) try: indices = struct.unpack(fmt, data) faces.append(indices) except struct.error as e: print(e) print( "Failed reading {} bytes for format {}".format( size, fmt)) print("Failed @ {}".format(pos)) raise (e) _monitor.Increment(1) elif self.format == "binary_little_endian": pass else: pass scene = lx.object.Scene(dest) item = scene.ItemAdd( self.scene_service.ItemTypeLookup(lx.symbol.sITYPE_MESH)) chan_write = scene.Channels(lx.symbol.s_ACTIONLAYER_SETUP, 0.0) chan_write = lx.object.ChannelWrite(chan_write) mesh = chan_write.ValueObj( item, item.ChannelLookup(lx.symbol.sICHAN_MESH_MESH)) mesh = lx.object.Mesh(mesh) if not mesh.test(): lx.throw(lx.result.FALSE) point = mesh.PointAccessor() polygon = mesh.PolygonAccessor() if not point.test() and not polygon.test(): lx.throw(lx.result.FALSE) # vertex should be a tuple for position, points = {} _monitor.Initialize(len(vertices)) for index, position in enumerate(vertices): points[index] = point.New(position) _monitor.Increment(1) _monitor.Initialize(len(faces)) for face in faces: vertIds = tuple(points[i] for i in face) storage = lx.object.storage('p', len(vertIds)) for index, _id in enumerate(vertIds): storage[index] = _id polygon.New(lx.symbol.iPTYP_FACE, storage, len(vertIds), 0) _monitor.Increment(1) # Add comments from header to the item if self.comments: tag = lx.object.StringTag(item) tag.Set(lx.symbol.iTAG_COMMENT, "\n".join(self.comments)) mesh.SetMeshEdits(lx.symbol.f_MESHEDIT_POLYGONS) return lx.result.OK
def load_Recognize(self, filename, loadInfo): """ If we don't recognize the format, we should return lx.result.NOTFOUND :param filename: path to the file :param loadInfo: info object """ self.filehandle = open(filename, "rb") # Early exit if magic number not found, magicnumber = self.filehandle.readline().strip() if magicnumber != b"ply": lx.throw(lx.result.NOTFOUND) # Line after magic number should define the format, # not doing full check, only looking for the second value # to match the allowed format types _, format, version = self.filehandle.readline().split() if format == b"ascii": self.format = "ascii" elif format == b"binary_big_endian": self.format = "binary_big_endian" elif format == b"binary_little_endian": self.format = "binary_little_endian" else: lx.throw(lx.result.NOTFOUND) element = None # remember the previously defined element, # Read rest of the headers, raising lookup error when header # couldn't be parsed. while self.filehandle: line = self.filehandle.readline().strip() if line == b"": lx.throw(lx.result.NOTFOUND) if line == b"end_header": break # We've reached the end of header, # Check for comments and read to a list. if line.startswith(b"comment"): self.comments.append(str(line[8:].decode('ascii'))) # Parse elements, elif line.startswith(b"element"): _, name, count = line.split() element = {"name": name, "count": int(count), "properties": []} self.elements.append(element) # Parse properties for elements, elif line.startswith(b"property"): fields = line.split() if len(fields) == 3: # regular 'scalar' property _, datatype, name = fields element["properties"].append({ "name": name, "type": datatype }) elif len(fields) == 5: # list type property _, _, size, datatype, name = fields element["properties"].append({ "name": name, "type": datatype, "size": size }) else: lx.throw(lx.result.NOTFOUND) else: lx.throw(lx.result.NOTFOUND) self.end_header = self.filehandle.tell() info = lx.object.LoaderInfo(loadInfo) info.SetClass(lx.symbol.u_SCENE) self.load_target = lx.object.SceneLoaderTarget() self.load_target.set(loadInfo) self.load_target.SetRootType(lx.symbol.sITYPE_MESH) return lx.result.OK # Tell Modo we've recognized the file.
def cmd_ArgEnable(self, index): #if self.current_renderpass_group is not None and index == 0: # lx.throw(lx.symbol.e_CMD_DISABLED) if not self.dyna_Bool(3) and index > 3: lx.throw(lx.symbol.e_CMD_DISABLED) return lx.symbol.e_OK
def cmd_ArgEnable(self, index): # All this does is disable input into the custom angle field (index of 5) if the axis (index of 0) isn't set to custom (value of 2). if self.dyna_IsSet(0): if self.dyna_Int(0) != 2 and index == 4: lx.throw(lx.symbol.e_CMD_DISABLED) return lx.symbol.e_OK