コード例 #1
0
ファイル: backend.py プロジェクト: stechu/pysnark
def prove():
    # TODO: this is pretty slow, maybe use this to improve performance:
    # https://github.com/google/flatbuffers/issues/4668
    
    f = open('computation.zkif', 'wb')
    
    print("*** zkinterface: writing circuit", file=sys.stderr)
    
    builder = flatbuffers.Builder(1024)

    vars = write_varlist(builder, pubvals, 1)
    
    Circuit.CircuitStartFieldMaximumVector(builder, 32)
    for i in reversed(range(32)):
        builder.PrependByte((modulus>>(i*8))&255)
    maxi = builder.EndVector(32)
    
    Circuit.CircuitStart(builder)
    Circuit.CircuitAddConnections(builder, vars)
    Circuit.CircuitAddFreeVariableId(builder, len(pubvals)+len(privvals)+1)
    Circuit.CircuitAddR1csGeneration(builder, True)
    Circuit.CircuitAddWitnessGeneration(builder, True)
    Circuit.CircuitAddFieldMaximum(builder, maxi)
    circ = Circuit.CircuitEnd(builder)
    
    Root.RootStart(builder)
    Root.RootAddMessageType(builder, Message.Message.Circuit)
    Root.RootAddMessage(builder, circ)
    root = Root.RootEnd(builder)
        
    builder.FinishSizePrefixed(root)
    buf = builder.Output()
    f.write(buf)
    
    print("*** zkinterface: writing witness", file=sys.stderr)
    
    # build witness
    builder = flatbuffers.Builder(1024)
    
    vars = write_varlist(builder, privvals, len(pubvals)+1)
    
    Witness.WitnessStart(builder)
    Witness.WitnessAddAssignedVariables(builder, vars)
    wit = Witness.WitnessEnd(builder)
    
    Root.RootStart(builder)
    Root.RootAddMessageType(builder, Message.Message.Witness)
    Root.RootAddMessage(builder, wit)
    root = Root.RootEnd(builder)    
    
    builder.FinishSizePrefixed(root)
    buf = builder.Output()
    f.write(buf)    
    
    print("*** zkinterface: writing constraints", file=sys.stderr)
    
    builder = flatbuffers.Builder(1024)
    
    def write_lc(lc):
        varls = list(lc.lc.keys())
        
        Variables.VariablesStartVariableIdsVector(builder, len(varls))
        for i in reversed(range(len(varls))):
            varix = varls[i] if varls[i]>=0 else len(pubvals)-varls[i]
            builder.PrependUint64(varix)
        vars = builder.EndVector(len(varls))
        
        Variables.VariablesStartValuesVector(builder, 32*len(varls))
        for i in reversed(range(len(varls))):
            for j in reversed(range(32)):
                val=lc.lc[varls[i]]%modulus
                builder.PrependByte((val>>(j*8))&255)
        vals = builder.EndVector(32*len(varls))
        
        Variables.VariablesStart(builder)
        Variables.VariablesAddVariableIds(builder, vars)
        Variables.VariablesAddValues(builder, vals)
        return Variables.VariablesEnd(builder)
    
    def write_constraint(c):
        la = write_lc(c[0])
        lb = write_lc(c[1])
        lc = write_lc(c[2])
        
        BilinearConstraint.BilinearConstraintStart(builder)
        BilinearConstraint.BilinearConstraintAddLinearCombinationA(builder, la)
        BilinearConstraint.BilinearConstraintAddLinearCombinationB(builder, lb)
        BilinearConstraint.BilinearConstraintAddLinearCombinationC(builder, lc)
        
        return BilinearConstraint.BilinearConstraintEnd(builder)       
        
    cs = [write_constraint(c) for c in constraints]
    
    R1CSConstraints.R1CSConstraintsStartConstraintsVector(builder, len(cs))
    for i in reversed(range(len(cs))):
        builder.PrependUOffsetTRelative(cs[i])
    cvec = builder.EndVector(len(cs))
    
    R1CSConstraints.R1CSConstraintsStart(builder)
    R1CSConstraints.R1CSConstraintsAddConstraints(builder, cvec)
    r1cs = R1CSConstraints.R1CSConstraintsEnd(builder)
    
    Root.RootStart(builder)
    Root.RootAddMessageType(builder, Message.Message.R1CSConstraints)
    Root.RootAddMessage(builder, r1cs)
    root = Root.RootEnd(builder)    
    
    builder.FinishSizePrefixed(root)
    buf = builder.Output()
    f.write(buf)

    f.close() 
    
    print("*** zkinterface circuit, witness, constraints written to 'computation.zkif', size", len(buf))
コード例 #2
0
def write_flatbuffers_trace(rows,
                            filename,
                            directory="",
                            persistent=True,
                            with_time=True):
    import flatbuffers
    from serialization.Timescales import Message, NullableInt64, NullableBool

    try:
        os.remove(
            os.path.join(directory,
                         '{filename}.bin'.format(filename=filename)))
    except OSError:
        pass

    with open(
            os.path.join(directory,
                         '{filename}.bin'.format(filename=filename)),
            'w+b') as f:
        previous = dict()
        for row in rows:
            builder = flatbuffers.Builder(512)
            Message.MessageStart(builder)

            if with_time:
                Message.MessageAddTime(
                    builder,
                    NullableInt64.CreateNullableInt64(builder, row.time))

            if not persistent:
                current = row.data._asdict()
                if 'p' in current:
                    Message.MessageAddPropP(
                        builder,
                        NullableBool.CreateNullableBool(builder, current['p']))
                if 'q' in current:
                    Message.MessageAddPropQ(
                        builder,
                        NullableBool.CreateNullableBool(builder, current['q']))
                if 'r' in current:
                    Message.MessageAddPropR(
                        builder,
                        NullableBool.CreateNullableBool(builder, current['r']))
                if 's' in current:
                    Message.MessageAddPropS(
                        builder,
                        NullableBool.CreateNullableBool(builder, current['s']))
            else:
                current = row.data._asdict()
                if 'p' in current and (not 'p' in previous
                                       or current['p'] != previous['p']):
                    Message.MessageAddPropP(
                        builder,
                        NullableBool.CreateNullableBool(builder, current['p']))
                if 'q' in current and (not 'q' in previous
                                       or current['q'] != previous['q']):
                    Message.MessageAddPropQ(
                        builder,
                        NullableBool.CreateNullableBool(builder, current['q']))
                if 'r' in current and (not 'r' in previous
                                       or current['r'] != previous['r']):
                    Message.MessageAddPropR(
                        builder,
                        NullableBool.CreateNullableBool(builder, current['r']))
                if 's' in current and (not 's' in previous
                                       or current['s'] != previous['s']):
                    Message.MessageAddPropS(
                        builder,
                        NullableBool.CreateNullableBool(builder, current['s']))

                previous = current

            msg = Message.MessageEnd(builder)
            builder.FinishSizePrefixed(msg)
            buf = builder.Output()
            f.write(buf)
コード例 #3
0
  def _create_metadata(self):
    """Creates the metadata for a text classifier."""

    # Creates model info.
    model_meta = _metadata_fb.ModelMetadataT()
    model_meta.name = self.model_info.name
    model_meta.description = self.model_info.description
    model_meta.version = self.model_info.version
    model_meta.author = "TensorFlow"
    model_meta.license = ("Apache License. Version 2.0 "
                          "http://www.apache.org/licenses/LICENSE-2.0.")

    # Creates input info.
    input_meta = _metadata_fb.TensorMetadataT()
    input_meta.name = "input_text"
    input_meta.description = (
        "Embedding vectors representing the input text to be classified. The "
        "input need to be converted from raw text to embedding vectors using "
        "the attached dictionary file.")
    # Create the vocab file.
    vocab_file = _metadata_fb.AssociatedFileT()
    vocab_file.name = os.path.basename(self.associated_files[1])
    vocab_file.description = ("Vocabulary file to convert natural language "
                              "words to embedding vectors.")
    vocab_file.type = _metadata_fb.AssociatedFileType.VOCABULARY

    # Create the RegexTokenizer.
    tokenizer = _metadata_fb.ProcessUnitT()
    tokenizer.optionsType = (
        _metadata_fb.ProcessUnitOptions.RegexTokenizerOptions)
    tokenizer.options = _metadata_fb.RegexTokenizerOptionsT()
    tokenizer.options.delimRegexPattern = self.model_info.delim_regex_pattern
    tokenizer.options.vocabFile = [vocab_file]

    input_meta.content = _metadata_fb.ContentT()
    input_meta.content.contentPropertiesType = (
        _metadata_fb.ContentProperties.FeatureProperties)
    input_meta.content.contentProperties = _metadata_fb.FeaturePropertiesT()
    input_meta.processUnits = [tokenizer]

    # Creates output info.
    output_meta = _metadata_fb.TensorMetadataT()
    output_meta.name = "probability"
    output_meta.description = "Probabilities of the labels respectively."
    output_meta.content = _metadata_fb.ContentT()
    output_meta.content.contentProperties = _metadata_fb.FeaturePropertiesT()
    output_meta.content.contentPropertiesType = (
        _metadata_fb.ContentProperties.FeatureProperties)
    output_stats = _metadata_fb.StatsT()
    output_stats.max = [1.0]
    output_stats.min = [0.0]
    output_meta.stats = output_stats
    label_file = _metadata_fb.AssociatedFileT()
    label_file.name = os.path.basename(self.associated_files[0])
    label_file.description = ("Labels for the categories that the model can "
                              "classify.")
    label_file.type = _metadata_fb.AssociatedFileType.TENSOR_AXIS_LABELS
    output_meta.associatedFiles = [label_file]

    # Creates subgraph info.
    subgraph = _metadata_fb.SubGraphMetadataT()
    subgraph.inputTensorMetadata = [input_meta]
    subgraph.outputTensorMetadata = [output_meta]
    model_meta.subgraphMetadata = [subgraph]

    b = flatbuffers.Builder(0)
    b.Finish(
        model_meta.Pack(b),
        _metadata.MetadataPopulator.METADATA_FILE_IDENTIFIER)
    self.metadata_buf = b.Output()
コード例 #4
0
ファイル: remote.py プロジェクト: YanXiaobin-Ben/pyprob
    def forward(self):
        builder = flatbuffers.Builder(64)

        # construct MessageBody
        ppx_Run.RunStart(builder)
        message_body = ppx_Run.RunEnd(builder)

        # construct Message
        ppx_Message.MessageStart(builder)
        ppx_Message.MessageAddBodyType(builder,
                                       ppx_MessageBody.MessageBody().Run)
        ppx_Message.MessageAddBody(builder, message_body)
        message = ppx_Message.MessageEnd(builder)
        builder.Finish(message)

        message = builder.Output()
        self._requester.send_request(message)

        while True:
            reply = self._requester.receive_reply()
            message_body = self._get_message_body(reply)

            if isinstance(message_body, ppx_RunResult.RunResult):
                result = self._protocol_tensor_to_variable(
                    message_body.Result())
                return result
            elif isinstance(message_body, ppx_Sample.Sample):
                address = message_body.Address().decode('utf-8')
                name = message_body.Name().decode('utf-8')
                if name == '':
                    name = None
                control = bool(message_body.Control())
                replace = bool(message_body.Replace())
                distribution_type = message_body.DistributionType()
                if distribution_type == ppx_Distribution.Distribution(
                ).Uniform:
                    uniform = ppx_Uniform.Uniform()
                    uniform.Init(message_body.Distribution().Bytes,
                                 message_body.Distribution().Pos)
                    low = self._protocol_tensor_to_variable(uniform.Low())
                    high = self._protocol_tensor_to_variable(uniform.High())
                    dist = Uniform(low, high)
                elif distribution_type == ppx_Distribution.Distribution(
                ).Normal:
                    normal = ppx_Normal.Normal()
                    normal.Init(message_body.Distribution().Bytes,
                                message_body.Distribution().Pos)
                    mean = self._protocol_tensor_to_variable(normal.Mean())
                    stddev = self._protocol_tensor_to_variable(normal.Stddev())
                    dist = Normal(mean, stddev)
                elif distribution_type == ppx_Distribution.Distribution(
                ).Categorical:
                    categorical = ppx_Categorical.Categorical()
                    categorical.Init(message_body.Distribution().Bytes,
                                     message_body.Distribution().Pos)
                    probs = self._protocol_tensor_to_variable(
                        categorical.Probs())
                    dist = Categorical(probs)
                elif distribution_type == ppx_Distribution.Distribution(
                ).Poisson:
                    poisson = ppx_Poisson.Poisson()
                    poisson.Init(message_body.Distribution().Bytes,
                                 message_body.Distribution().Pos)
                    rate = self._protocol_tensor_to_variable(poisson.Rate())
                    dist = Poisson(rate)
                else:
                    raise RuntimeError(
                        'ppx (Python): Sample from an unexpected distribution requested.'
                    )
                result = state.sample(distribution=dist,
                                      control=control,
                                      replace=replace,
                                      name=name,
                                      address=address)
                builder = flatbuffers.Builder(64)
                result = self._variable_to_protocol_tensor(builder, result)
                ppx_SampleResult.SampleResultStart(builder)
                ppx_SampleResult.SampleResultAddResult(builder, result)
                message_body = ppx_SampleResult.SampleResultEnd(builder)

                # construct Message
                ppx_Message.MessageStart(builder)
                ppx_Message.MessageAddBodyType(
                    builder,
                    ppx_MessageBody.MessageBody().SampleResult)
                ppx_Message.MessageAddBody(builder, message_body)
                message = ppx_Message.MessageEnd(builder)
                builder.Finish(message)

                message = builder.Output()
                self._requester.send_request(message)
            elif isinstance(message_body, ppx_Observe.Observe):
                address = message_body.Address().decode('utf-8')
                name = message_body.Name().decode('utf-8')
                if name == '':
                    name = None
                value = self._protocol_tensor_to_variable(message_body.Value())
                distribution_type = message_body.DistributionType()
                if distribution_type == ppx_Distribution.Distribution().NONE:
                    dist = None
                elif distribution_type == ppx_Distribution.Distribution(
                ).Uniform:
                    uniform = ppx_Uniform.Uniform()
                    uniform.Init(message_body.Distribution().Bytes,
                                 message_body.Distribution().Pos)
                    low = self._protocol_tensor_to_variable(uniform.Low())
                    high = self._protocol_tensor_to_variable(uniform.High())
                    dist = Uniform(low, high)
                elif distribution_type == ppx_Distribution.Distribution(
                ).Normal:
                    normal = ppx_Normal.Normal()
                    normal.Init(message_body.Distribution().Bytes,
                                message_body.Distribution().Pos)
                    mean = self._protocol_tensor_to_variable(normal.Mean())
                    stddev = self._protocol_tensor_to_variable(normal.Stddev())
                    dist = Normal(mean, stddev)
                elif distribution_type == ppx_Distribution.Distribution(
                ).Categorical:
                    categorical = ppx_Categorical.Categorical()
                    categorical.Init(message_body.Distribution().Bytes,
                                     message_body.Distribution().Pos)
                    probs = self._protocol_tensor_to_variable(
                        categorical.Probs())
                    dist = Categorical(probs)
                elif distribution_type == ppx_Distribution.Distribution(
                ).Poisson:
                    poisson = ppx_Poisson.Poisson()
                    poisson.Init(message_body.Distribution().Bytes,
                                 message_body.Distribution().Pos)
                    rate = self._protocol_tensor_to_variable(poisson.Rate())
                    dist = Poisson(rate)
                else:
                    raise RuntimeError(
                        'ppx (Python): Sample from an unexpected distribution requested: {}'
                        .format(distribution_type))

                state.observe(distribution=dist,
                              value=value,
                              name=name,
                              address=address)
                builder = flatbuffers.Builder(64)
                ppx_ObserveResult.ObserveResultStart(builder)
                message_body = ppx_ObserveResult.ObserveResultEnd(builder)

                # construct Message
                ppx_Message.MessageStart(builder)
                ppx_Message.MessageAddBodyType(
                    builder,
                    ppx_MessageBody.MessageBody().ObserveResult)
                ppx_Message.MessageAddBody(builder, message_body)
                message = ppx_Message.MessageEnd(builder)
                builder.Finish(message)

                message = builder.Output()
                self._requester.send_request(message)
            elif isinstance(message_body, ppx_Reset.Reset):
                raise RuntimeError(
                    'ppx (Python): Received a reset request. Protocol out of sync.'
                )
            else:
                raise RuntimeError(
                    'ppx (Python): Received unexpected message.')
コード例 #5
0
ファイル: _types.py プロジェクト: Shoobx/zlmdb
 def _serialize_value(self, value):
     builder = flatbuffers.Builder(0)
     obj = self._build(value, builder)
     builder.Finish(obj)
     buf = builder.Output()
     return bytes(buf)
コード例 #6
0
            #params = input("enter params : ")
        params = ''
        print(my_url + url)
        req = requests.get(my_url + url, params=params)

        print(req.content)

    if selected == '3':
        url = 'user/friend'

        userid = input('enter userid : ')
        targetidx = input('enter targetidx : ')

        state = input('enter state : ')

        builder = flatbuffers.Builder(128)
        proto_headerStart(builder)
        proto_headerAddUseridx(builder, int(userid))
        header_offset = proto_headerEnd(builder)

        proto_responseFriendRequestStart(builder)
        proto_responseFriendRequestAddHeader(builder, header_offset)
        proto_responseFriendRequestAddTargetidx(builder, int(targetidx))
        proto_responseFriendRequestAddState(builder, int(state))
        root_offset = proto_responseFriendRequestEnd(builder)

        builder.Finish(root_offset)

        #res = make_response(builder.Output())
        #res.headers['Content-Type'] = 'application/octet-stream'
        data = builder.Output()
コード例 #7
0
def convertModelNode(convertContext, data):
	"""
	Converts a ModelNode. The data map is expected to contain the following elements:
	- embeddedResources: optional set of resources to embed with the node. This is a map containing
	  the elements as expected by SceneResourcesConvert.convertSceneResources().
	- modelGeometry: array of model geometry. Each element of the array has the following members:
	  - type: the name of the geometry type, such as "obj" or "gltf".
	  - path: the path to the geometry.
	  - vertexFormat: array of vertex attributes defining the vertex format. Each element of the
	    array has the following members:
		- attrib: the attribute. This can either be an enum value from dsVertexAttrib, removing
		  the type prefix, or the integer for the attribute.
	    - format: the attribute format. See the dsGfxFormat enum for values, removing the type
	      prefix. Only the "standard" formats may be used.
	    - decoration: the decoration for the format. See the dsGfxFormat enum for values,
	      removing the type prefix. Only the decorator values may be used.
	  - indexSize: the size of the index in bytes. This must be either 2 or 4. If not set, no
	    indices will be produced.
	  - transforms: optional array of transforms to perform on the vertex values. Each element of
	    the array has the following members:
	    - attrib: the attribute, matching one of the attributes in vertexFormat.
	    - transform: transform to apply on the attribute. Valid values are:
	      - Identity: leaves the values un-transformed.
	      - Bounds: normalizes the values based on the original value's bounds
	      - UNormToSNorm: converts UNorm values to SNorm values.
	      - SNormToUNorm: converts SNorm values to UNorm values.
	  - drawInfo: array of definitions for drawing components of the geometry. Each element of the
	    array has the following members:
	    - name: the name of the model component. Note that only model components referenced in the
		  drawInfo array will be included in the final model.
	    - shader: te name of the shader to draw with.
	    - material: the name of the material to draw with.
	    - distanceRange: array of two floats for the minimum and maximum distance to draw at.
	      Defaults to [0, 3.402823466e38].
	    - listName The name of the item list to draw the model with.
	- models: array of models to draw with manually provided geometry. (i.e. not converted from
	  the modelGeometry array) Each element of the array has the following members:
	  - shader: the name of the shader to draw with.
	  - material: the name of the material to draw with.
	  - geometry: the name of the geometry to draw.
	  - distanceRange: array of two floats for the minimum and maximum distance to draw at. Defaults
	    to [0, 3.402823466e38].
	  - drawRange: the range of the geometry to draw. This is an object with the following members,
	    depending on if the geometry is indexed or not:
	    Indexed geometry:
	    - indexCount: the number of indices to draw.
	    - instanceCount: the number of instances to draw. Defaults to 1.
	    - firstIndex: the first index to draw. Defaults to 0.
	    - vertexOffset: the offset to apply to each index value. Defaults to 0.
	    - firstInstance: the first instance to draw. Defaults to 0.
	    Non-indexed geometry:
	    - vertexCount: the number of vertices to draw.
	    - instanceCount: the number of instances to draw. Defaults to 1.
	    - firstVertex: the first vertex to draw. Defaults to 0.
	    - firstIstance: the first instance to draw. Defaults to 0.
	  - primitiveType: the primitive type to draw with. See the dsPrimitiveType enum for values,
	    removing the type prefix. Defaults to "TriangleList".
	  - listName The name of the item list to draw the model with.
	- extraItemLists: array of extra item list names to add the node to.
	- bounds: 2x3 array of float values for the minimum and maximum values for the positions. This
	  will be automatically calculated from geometry in modelGeometry if unset. Otherwise if unset
	  the model will have no explicit bounds for culling.
	"""
	try:
		embeddedResources = data.get('embeddedResources', dict())
		if not isinstance(embeddedResources, dict):
			raise Exception ('ModelNode "embeddedResources" must be an object.')
		
		modelGeometry = data.get('modelGeometry')
		if modelGeometry:
			models, modelBounds = convertModelNodeGeometry(convertContext, modelGeometry,
				embeddedResources)
		else:
			models = []
			modelBounds = None

		modelInfoList = data.get('models')
		if modelInfoList:
			models.extend(convertModelNodeModels(modelInfoList))

		extraItemLists = data.get('extraItemLists')
		if 'bounds' in data:
			modelBounds = data['bounds']
			try:
				if len(modelBounds) != 2:
					raise Exception()
				for bound in modelBounds:
					if len(bound) != 3:
						raise Exception()
					for val in bound:
						if not isinstance(val, float):
							raise Exception()
			except:
				raise Exception('Invalid model bounds "' + str(modelBounds) + '".')
	except (TypeError, ValueError):
		raise Exception('ModelNode data must be an object.')
	except KeyError as e:
		raise Exception('ModelNode data doesn\'t contain element "' + str(e) + '".')

	builder = flatbuffers.Builder(0)
	if embeddedResources:
		embeddedResourcesData = convertSceneResources(convertContext, embeddedResources)
		embeddedResourcesOffset = builder.CreateByteVector(embeddedResourcesData)
	else:
		embeddedResourcesOffset = 0

	if extraItemLists:
		extraItemListOffsets = []
		try:
			for item in extraItemLists:
				extraItemListOffsets = builder.CreateString(str(item))
		except (TypeError, ValueError):
			raise Exception('ModelNode "extraItemLists" must be an array of strings.')

		ModelNodeStartExtraItemListsVector(builder, len(extraItemListOffsets))
		for offset in reversed(extraItemListOffsets):
			builder.PrependUOffsetTRelative(offset)
		extraItemListsOffset = builder.EndVector(len(extraItemListOffsets))
	else:
		extraItemListsOffset = 0

	modelOffsets = []
	for model in models:
		shaderOffset = builder.CreateString(model.shader)
		materialOffset = builder.CreateString(model.material)
		geometryOffset = builder.CreateString(model.geometry)
		distanceRangeOffset = CreateVector2f(builder, model.distanceRange[0],
			model.distanceRange[1])

		drawRange = model.drawRange
		if drawRange.rangeType == ModelDrawRange.DrawIndexedRange:
			DrawIndexedRangeStart(builder)
			DrawIndexedRangeAddIndexCount(builder, drawRange.indexCount)
			DrawIndexedRangeAddInstanceCount(builder, drawRange.instanceCount)
			DrawIndexedRangeAddFirstIndex(builder, drawRange.firstIndex)
			DrawIndexedRangeAddVertexOffset(builder, drawRange.vertexOffset)
			DrawIndexedRangeAddFirstInstance(builder, drawRange.firstInstance)
			drawRangeOffset = DrawIndexedRangeEnd(builder)
		else:
			DrawRangeStart(builder)
			DrawRangeAddVertexCount(builder, drawRange.vertexCount)
			DrawRangeAddInstanceCount(builder, drawRange.instanceCount)
			DrawRangeAddFirstVertex(builder, drawRange.firstVertex)
			DrawRangeAddFirstInstance(builder, drawRange.firstInstance)
			drawRangeOffset = DrawRangeEnd(builder)

		listNameOffset = builder.CreateString(model.listName)

		ModelInfoStart(builder)
		ModelInfoAddShader(builder, shaderOffset)
		ModelInfoAddMaterial(builder, materialOffset)
		ModelInfoAddGeometry(builder, geometryOffset)
		ModelInfoAddDistanceRange(builder, distanceRangeOffset)
		ModelInfoAddDrawRangeType(builder, drawRange.rangeType)
		ModelInfoAddDrawRange(builder, drawRangeOffset)
		ModelInfoAddPrimitiveType(builder, model.primitiveType)
		ModelInfoAddListName(builder, listNameOffset)
		modelOffsets.append(ModelInfoEnd(builder))

	ModelNodeStartModelsVector(builder, len(modelOffsets))
	for offset in reversed(modelOffsets):
		builder.PrependUOffsetTRelative(offset)
	modelsOffset = builder.EndVector(len(modelOffsets))

	if modelBounds:
		center = []
		halfExtents = []
		for i in range(0, 3):
			center.append((modelBounds[0][i] + modelBounds[1][i])/2)
			halfExtents.append((modelBounds[1][i] - modelBounds[0][i])/2)
		boundsOffset = CreateOrientedBox3f(builder, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0,
			center[0], center[1], center[2], halfExtents[0], halfExtents[1], halfExtents[2])
	else:
		boundsOffset = 0

	ModelNodeStart(builder)
	ModelNodeAddEmbeddedResources(builder, embeddedResourcesOffset)
	ModelNodeAddExtraItemLists(builder, extraItemListsOffset)
	ModelNodeAddModels(builder, modelsOffset)
	ModelNodeAddBounds(builder, boundsOffset)
	builder.Finish(ModelNodeEnd(builder))
	return builder.Output()
コード例 #8
0
    def make_postprocess_model(self,
                               max_detections=10,
                               detections_per_class=100,
                               max_classes_per_detection=1,
                               use_regular_nms=True,
                               nms_score_threshold=0.3,
                               nms_iou_threshold=0.6,
                               num_classes=90,
                               x_scale=10.0,
                               y_scale=10.0,
                               w_scale=5.0,
                               h_scale=5.0):
        """Returns the bytes of a tflite model containing a single TFLite_Detection_PostProcess op"""

        builder = flatbuffers.Builder(1024)

        # op_code
        custom_code = builder.CreateString("TFLite_Detection_PostProcess")
        OperatorCode.OperatorCodeStart(builder)
        OperatorCode.OperatorCodeAddDeprecatedBuiltinCode(
            builder, BuiltinOperator.CUSTOM)
        OperatorCode.OperatorCodeAddCustomCode(builder, custom_code)
        OperatorCode.OperatorCodeAddBuiltinCode(builder,
                                                BuiltinOperator.CUSTOM)
        op_code = OperatorCode.OperatorCodeEnd(builder)

        # op_codes
        Model.ModelStartOperatorCodesVector(builder, 1)
        builder.PrependUOffsetTRelative(op_code)
        op_codes = builder.EndVector(1)

        # Make tensors
        # [names, shape, type tensors]
        ts = []
        inputs_info = [('box_encodings', [-1, -1, 4]),
                       ('class_predictions', [-1, -1, -1]),
                       ('anchors', [-1, 4])]
        outputs_info = [('detection_boxes', [-1, -1, 4]),
                        ('detection_classes', [-1, -1]),
                        ('detection_scores', [-1, -1]),
                        ('num_detections', [-1])]
        for name_info, shape_info in inputs_info + outputs_info:

            name = builder.CreateString(name_info)
            shape = builder.CreateNumpyVector(
                np.maximum(np.array(shape_info, np.int32), 1))
            shape_signature = builder.CreateNumpyVector(
                np.array(shape_info, np.int32))

            Tensor.TensorStart(builder)
            Tensor.TensorAddShape(builder, shape)
            Tensor.TensorAddType(builder, TensorType.FLOAT32)
            Tensor.TensorAddName(builder, name)
            Tensor.TensorAddShapeSignature(builder, shape_signature)
            ts.append(Tensor.TensorEnd(builder))

        SubGraph.SubGraphStartTensorsVector(builder, len(ts))
        for tensor in reversed(ts):
            builder.PrependUOffsetTRelative(tensor)
        tensors = builder.EndVector(len(ts))

        # inputs
        SubGraph.SubGraphStartInputsVector(builder, 3)
        for inp in reversed([0, 1, 2]):
            builder.PrependInt32(inp)
        inputs = builder.EndVector(3)

        # outputs
        SubGraph.SubGraphStartOutputsVector(builder, 4)
        for out in reversed([3, 4, 5, 6]):
            builder.PrependInt32(out)
        outputs = builder.EndVector(4)

        flexbuffer = \
            b'y_scale\x00nms_score_threshold\x00max_detections\x00x_scale\x00w_scale\x00nms_iou_threshold' \
            b'\x00use_regular_nms\x00h_scale\x00max_classes_per_detection\x00num_classes\x00detections_per_class' \
            b'\x00\x0b\x16E>\x88j\x9e([v\x7f\xab\x0b\x00\x00\x00\x01\x00\x00\x00\x0b\x00\x00\x00*attr4**attr7*' \
            b'*attr10**attr9**attr1**attr2**attr3**attr11*\x00\x00\x00*attr8**attr5**attr6*\x06\x0e\x06\x06\x0e' \
            b'\x0e\x06j\x0e\x0e\x0e7&\x01'
        flexbuffer = flexbuffer.replace(b'*attr1*',
                                        struct.pack('<f', nms_iou_threshold))
        flexbuffer = flexbuffer.replace(b'*attr2*',
                                        struct.pack('<f', nms_score_threshold))
        flexbuffer = flexbuffer.replace(b'*attr3*',
                                        struct.pack('<i', num_classes))
        flexbuffer = flexbuffer.replace(
            b'*attr4*', struct.pack('<i', detections_per_class))
        flexbuffer = flexbuffer.replace(b'*attr5*', struct.pack('<f', x_scale))
        flexbuffer = flexbuffer.replace(b'*attr6*', struct.pack('<f', y_scale))
        flexbuffer = flexbuffer.replace(b'*attr7*', struct.pack('<f', h_scale))
        flexbuffer = flexbuffer.replace(b'*attr8*', struct.pack('<f', w_scale))
        flexbuffer = flexbuffer.replace(b'*attr9*',
                                        struct.pack('<i', max_detections))
        flexbuffer = flexbuffer.replace(
            b'*attr10*', struct.pack('<i', max_classes_per_detection))
        flexbuffer = flexbuffer.replace(b'*attr11*',
                                        struct.pack('<b', use_regular_nms))

        custom_options = builder.CreateNumpyVector(
            np.array(bytearray(flexbuffer)))

        # operator
        Operator.OperatorStart(builder)
        Operator.OperatorAddOpcodeIndex(builder, 0)
        Operator.OperatorAddInputs(builder, inputs)
        Operator.OperatorAddOutputs(builder, outputs)
        Operator.OperatorAddCustomOptions(builder, custom_options)
        Operator.OperatorAddCustomOptionsFormat(
            builder, CustomOptionsFormat.FLEXBUFFERS)
        operator = Operator.OperatorEnd(builder)

        # operators
        SubGraph.SubGraphStartOperatorsVector(builder, 1)
        builder.PrependUOffsetTRelative(operator)
        operators = builder.EndVector(1)

        # subgraph
        SubGraph.SubGraphStart(builder)
        SubGraph.SubGraphAddTensors(builder, tensors)
        SubGraph.SubGraphAddInputs(builder, inputs)
        SubGraph.SubGraphAddOutputs(builder, outputs)
        SubGraph.SubGraphAddOperators(builder, operators)
        subgraph = SubGraph.SubGraphEnd(builder)

        # subgraphs
        Model.ModelStartSubgraphsVector(builder, 1)
        builder.PrependUOffsetTRelative(subgraph)
        subgraphs = builder.EndVector(1)

        description = builder.CreateString("Model for tflite testing")

        Buffer.BufferStartDataVector(builder, 0)
        data = builder.EndVector(0)

        Buffer.BufferStart(builder)
        Buffer.BufferAddData(builder, data)
        buffer = Buffer.BufferEnd(builder)

        Model.ModelStartBuffersVector(builder, 1)
        builder.PrependUOffsetTRelative(buffer)
        buffers = builder.EndVector(1)

        # model
        Model.ModelStart(builder)
        Model.ModelAddVersion(builder, 3)
        Model.ModelAddOperatorCodes(builder, op_codes)
        Model.ModelAddSubgraphs(builder, subgraphs)
        Model.ModelAddDescription(builder, description)
        Model.ModelAddBuffers(builder, buffers)
        model = Model.ModelEnd(builder)

        builder.Finish(model, b"TFL3")
        return builder.Output()
コード例 #9
0
  def create_from_metadata(
      cls,
      model_buffer: bytearray,
      model_metadata: Optional[_metadata_fb.ModelMetadataT] = None,
      input_metadata: Optional[List[_metadata_fb.TensorMetadataT]] = None,
      output_metadata: Optional[List[_metadata_fb.TensorMetadataT]] = None,
      associated_files: Optional[List[str]] = None,
      input_process_units: Optional[List[_metadata_fb.ProcessUnitT]] = None,
      output_process_units: Optional[List[_metadata_fb.ProcessUnitT]] = None):
    """Creates MetadataWriter based on the metadata Flatbuffers Python Objects.

    Args:
      model_buffer: valid buffer of the model file.
      model_metadata: general model metadata [1]. The subgraph_metadata will be
        refreshed with input_metadata and output_metadata.
      input_metadata: a list of metadata of the input tensors [2].
      output_metadata: a list of metadata of the output tensors [3].
      associated_files: path to the associated files to be populated.
      input_process_units: a lits of metadata of the input process units [4].
      output_process_units: a lits of metadata of the output process units [5].
      [1]:
        https://github.com/tensorflow/tflite-support/blob/b80289c4cd1224d0e1836c7654e82f070f9eefaa/tensorflow_lite_support/metadata/metadata_schema.fbs#L640-L681
      [2]:
        https://github.com/tensorflow/tflite-support/blob/b80289c4cd1224d0e1836c7654e82f070f9eefaa/tensorflow_lite_support/metadata/metadata_schema.fbs#L590
      [3]:
        https://github.com/tensorflow/tflite-support/blob/b80289c4cd1224d0e1836c7654e82f070f9eefaa/tensorflow_lite_support/metadata/metadata_schema.fbs#L599
      [4]:
        https://github.com/tensorflow/tflite-support/blob/b5cc57c74f7990d8bc055795dfe8d50267064a57/tensorflow_lite_support/metadata/metadata_schema.fbs#L646
      [5]:
        https://github.com/tensorflow/tflite-support/blob/b5cc57c74f7990d8bc055795dfe8d50267064a57/tensorflow_lite_support/metadata/metadata_schema.fbs#L650
    Returns:
      A MetadataWriter Object.
    """
    # Create empty tensor metadata when input_metadata/output_metadata are None
    # to bypass MetadataPopulator verification.
    if not input_metadata:
      model = _schema_fb.Model.GetRootAsModel(model_buffer, 0)
      num_input_tensors = model.Subgraphs(0).InputsLength()
      input_metadata = [
          _metadata_fb.TensorMetadataT() for i in range(num_input_tensors)
      ]

    if not output_metadata:
      model = _schema_fb.Model.GetRootAsModel(model_buffer, 0)
      num_output_tensors = model.Subgraphs(0).OutputsLength()
      output_metadata = [
          _metadata_fb.TensorMetadataT() for i in range(num_output_tensors)
      ]

    _fill_default_tensor_names(
        input_metadata, writer_utils.get_input_tensor_names(model_buffer))

    _fill_default_tensor_names(
        output_metadata, writer_utils.get_output_tensor_names(model_buffer))

    subgraph_metadata = _metadata_fb.SubGraphMetadataT()
    subgraph_metadata.inputTensorMetadata = input_metadata
    subgraph_metadata.outputTensorMetadata = output_metadata
    subgraph_metadata.inputProcessUnits = input_process_units
    subgraph_metadata.outputProcessUnits = output_process_units

    if model_metadata is None:
      model_metadata = _metadata_fb.ModelMetadataT()
    model_metadata.subgraphMetadata = [subgraph_metadata]

    b = flatbuffers.Builder(0)
    b.Finish(
        model_metadata.Pack(b),
        _metadata.MetadataPopulator.METADATA_FILE_IDENTIFIER)
    return cls(model_buffer, b.Output(), associated_files)
コード例 #10
0
def serialize_tensor(ndarray):
    size = ndarray.nbytes + 100
    builder = flatbuffers.Builder(size)
    tensor = make_tensor(builder, ndarray)
    return output(builder, tensor)
コード例 #11
0
    def reply_proposal(self, proposal=None):
        # allocate buffer
        builder = flatbuffers.Builder(64)

        if proposal is None:
            infcomp.protocol.ProposalReply.ProposalReplyStart(builder)
            infcomp.protocol.ProposalReply.ProposalReplyAddSuccess(
                builder, False)
            message_body = infcomp.protocol.ProposalReply.ProposalReplyEnd(
                builder)
        else:
            if isinstance(proposal, UniformDiscrete):
                # construct probabilities
                proposal_probabilities = Tensor_to_NDArray(
                    builder, proposal.proposal_probabilities)
                # construct UniformDiscrete
                infcomp.protocol.UniformDiscrete.UniformDiscreteStart(builder)
                infcomp.protocol.UniformDiscrete.UniformDiscreteAddPriorMin(
                    builder, proposal.prior_min)
                infcomp.protocol.UniformDiscrete.UniformDiscreteAddPriorSize(
                    builder, proposal.prior_size)
                infcomp.protocol.UniformDiscrete.UniformDiscreteAddProposalProbabilities(
                    builder, proposal_probabilities)
                distribution = infcomp.protocol.UniformDiscrete.UniformDiscreteEnd(
                    builder)
                distribution_type = infcomp.protocol.Distribution.Distribution(
                ).UniformDiscrete
            elif isinstance(proposal, Normal):
                # construct Normal
                infcomp.protocol.Normal.NormalStart(builder)
                infcomp.protocol.Normal.NormalAddPriorMean(
                    builder, proposal.prior_mean)
                infcomp.protocol.Normal.NormalAddPriorStd(
                    builder, proposal.prior_std)
                infcomp.protocol.Normal.NormalAddProposalMean(
                    builder, proposal.proposal_mean)
                infcomp.protocol.Normal.NormalAddProposalStd(
                    builder, proposal.proposal_std)
                distribution = infcomp.protocol.Normal.NormalEnd(builder)
                distribution_type = infcomp.protocol.Distribution.Distribution(
                ).Normal
            elif isinstance(proposal, Flip):
                # construct Flip
                infcomp.protocol.Flip.FlipStart(builder)
                infcomp.protocol.Flip.FlipAddProposalProbability(
                    builder, proposal.proposal_probability)
                distribution = infcomp.protocol.Flip.FlipEnd(builder)
                distribution_type = infcomp.protocol.Distribution.Distribution(
                ).Flip
            elif isinstance(proposal, Discrete):
                # construct probabilities
                proposal_probabilities = Tensor_to_NDArray(
                    builder, proposal.proposal_probabilities)
                # construct Discrete
                infcomp.protocol.Discrete.DiscreteStart(builder)
                infcomp.protocol.Discrete.DiscreteAddPriorSize(
                    builder, proposal.prior_size)
                infcomp.protocol.Discrete.DiscreteAddProposalProbabilities(
                    builder, proposal_probabilities)
                distribution = infcomp.protocol.Discrete.DiscreteEnd(builder)
                distribution_type = infcomp.protocol.Distribution.Distribution(
                ).Discrete
            elif isinstance(proposal, Categorical):
                # construct probabilities
                proposal_probabilities = Tensor_to_NDArray(
                    builder, proposal.proposal_probabilities)
                # construct Categorical
                infcomp.protocol.Categorical.CategoricalStart(builder)
                infcomp.protocol.Categorical.CategoricalAddPriorSize(
                    builder, proposal.prior_size)
                infcomp.protocol.Categorical.CategoricalAddProposalProbabilities(
                    builder, proposal_probabilities)
                distribution = infcomp.protocol.Categorical.CategoricalEnd(
                    builder)
                distribution_type = infcomp.protocol.Distribution.Distribution(
                ).Categorical
            elif isinstance(proposal, UniformContinuous):
                # construct UniformContinuous
                infcomp.protocol.UniformContinuous.UniformContinuousStart(
                    builder)
                infcomp.protocol.UniformContinuous.UniformContinuousAddPriorMin(
                    builder, proposal.prior_min)
                infcomp.protocol.UniformContinuous.UniformContinuousAddPriorMax(
                    builder, proposal.prior_max)
                infcomp.protocol.UniformContinuous.UniformContinuousAddProposalMode(
                    builder, proposal.proposal_mode)
                infcomp.protocol.UniformContinuous.UniformContinuousAddProposalCertainty(
                    builder, proposal.proposal_certainty)
                distribution = infcomp.protocol.UniformContinuous.UniformContinuousEnd(
                    builder)
                distribution_type = infcomp.protocol.Distribution.Distribution(
                ).UniformContinuous
            elif isinstance(proposal, UniformContinuousAlt):
                # construct proposal parameters
                # print('means, stds, coeffs')
                # print(proposal.proposal_means)
                # print(proposal.proposal_stds)
                # print(proposal.proposal_coeffs)
                proposal_means = Tensor_to_NDArray(builder,
                                                   proposal.proposal_means)
                proposal_stds = Tensor_to_NDArray(builder,
                                                  proposal.proposal_stds)
                proposal_coeffs = Tensor_to_NDArray(builder,
                                                    proposal.proposal_coeffs)
                # construct UniformContinuousAlt
                infcomp.protocol.UniformContinuousAlt.UniformContinuousAltStart(
                    builder)
                infcomp.protocol.UniformContinuousAlt.UniformContinuousAltAddPriorMin(
                    builder, proposal.prior_min)
                infcomp.protocol.UniformContinuousAlt.UniformContinuousAltAddPriorMax(
                    builder, proposal.prior_max)
                infcomp.protocol.UniformContinuousAlt.UniformContinuousAltAddProposalMeans(
                    builder, proposal_means)
                infcomp.protocol.UniformContinuousAlt.UniformContinuousAltAddProposalStds(
                    builder, proposal_stds)
                infcomp.protocol.UniformContinuousAlt.UniformContinuousAltAddProposalCoeffs(
                    builder, proposal_coeffs)
                distribution = infcomp.protocol.UniformContinuousAlt.UniformContinuousAltEnd(
                    builder)
                distribution_type = infcomp.protocol.Distribution.Distribution(
                ).UniformContinuousAlt
            elif isinstance(proposal, Laplace):
                # construct Laplace
                infcomp.protocol.Laplace.LaplaceStart(builder)
                infcomp.protocol.Laplace.LaplaceAddPriorLocation(
                    builder, proposal.prior_location)
                infcomp.protocol.Laplace.LaplaceAddPriorScale(
                    builder, proposal.prior_scale)
                infcomp.protocol.Laplace.LaplaceAddProposalLocation(
                    builder, proposal.proposal_location)
                infcomp.protocol.Laplace.LaplaceAddProposalScale(
                    builder, proposal.proposal_scale)
                distribution = infcomp.protocol.Laplace.LaplaceEnd(builder)
                distribution_type = infcomp.protocol.Distribution.Distribution(
                ).Laplace
            elif isinstance(proposal, Gamma):
                # construct Gamma
                infcomp.protocol.Gamma.GammaStart(builder)
                infcomp.protocol.Gamma.GammaAddProposalLocation(
                    builder, proposal.proposal_location)
                infcomp.protocol.Gamma.GammaAddProposalScale(
                    builder, proposal.proposal_scale)
                distribution = infcomp.protocol.Gamma.GammaEnd(builder)
                distribution_type = infcomp.protocol.Distribution.Distribution(
                ).Gamma
            elif isinstance(proposal, Beta):
                # construct Beta
                infcomp.protocol.Beta.BetaStart(builder)
                infcomp.protocol.Beta.BetaAddProposalMode(
                    builder, proposal.proposal_mode)
                infcomp.protocol.Beta.BetaAddProposalCertainty(
                    builder, proposal.proposal_certainty)
                distribution = infcomp.protocol.Beta.BetaEnd(builder)
                distribution_type = infcomp.protocol.Distribution.Distribution(
                ).Beta
            elif isinstance(proposal, MultivariateNormal):
                # construct prior_mean, prior_cov, proposal_mean, proposal_vars
                prior_mean = Tensor_to_NDArray(builder, proposal.prior_mean)
                prior_cov = Tensor_to_NDArray(builder, proposal.prior_cov)
                proposal_mean = Tensor_to_NDArray(builder,
                                                  proposal.proposal_mean)
                proposal_vars = Tensor_to_NDArray(builder,
                                                  proposal.proposal_vars)
                # construct MultivariateNormal
                infcomp.protocol.MultivariateNormal.MultivariateNormalStart(
                    builder)
                infcomp.protocol.MultivariateNormal.MultivariateNormalAddPriorMean(
                    builder, prior_mean)
                infcomp.protocol.MultivariateNormal.MultivariateNormalAddPriorCov(
                    builder, prior_cov)
                infcomp.protocol.MultivariateNormal.MultivariateNormalAddProposalMean(
                    builder, proposal_mean)
                infcomp.protocol.MultivariateNormal.MultivariateNormalAddProposalVars(
                    builder, proposal_vars)
                distribution = infcomp.protocol.MultivariateNormal.MultivariateNormalEnd(
                    builder)
                distribution_type = infcomp.protocol.Distribution.Distribution(
                ).MultivariateNormal
            else:
                util.logger.log_error(
                    'reply_proposal: Unsupported proposal distribution: {0}'.
                    format(proposal))

            # construct message body (ProposalReply)
            infcomp.protocol.ProposalReply.ProposalReplyStart(builder)
            infcomp.protocol.ProposalReply.ProposalReplyAddSuccess(
                builder, True)
            infcomp.protocol.ProposalReply.ProposalReplyAddDistributionType(
                builder, distribution_type)
            infcomp.protocol.ProposalReply.ProposalReplyAddDistribution(
                builder, distribution)
            message_body = infcomp.protocol.ProposalReply.ProposalReplyEnd(
                builder)

        # construct message
        infcomp.protocol.Message.MessageStart(builder)
        infcomp.protocol.Message.MessageAddBodyType(
            builder,
            infcomp.protocol.MessageBody.MessageBody().ProposalReply)
        infcomp.protocol.Message.MessageAddBody(builder, message_body)
        message = infcomp.protocol.Message.MessageEnd(builder)

        builder.Finish(message)
        message = builder.Output()
        self._replier.send_reply(message)
コード例 #12
0
def serialize_metadata_response(metadata):
    builder = flatbuffers.Builder(4096)
    r = make_metadata_response(builder, metadata)
    return output(builder, r)
コード例 #13
0
def serialize_metadata_request():
    builder = flatbuffers.Builder(1024)
    r = make_metadata_request(builder)
    return output(builder, r)
コード例 #14
0
import flatbuffers as fb
from python.proto import Order

# encode
b = fb.Builder(0)

Order.OrderStart(b)
Order.OrderAddId(b, 100)
o = Order.OrderEnd(b)

b.Finish(o)
out = bytes(b.Output())

# s = ""
# for i in out:
# s += "0x{:02x}, ".format(i)
# print(s)

# decode self
order = Order.Order.GetRootAsOrder(out, 0)
print(order.Id())

# decode C-encoded Order
c_enc = b'\x08\x00\x00\x00\x76\x30\x30\x31\xf8\xff\xff\xff\x64\x00\x00\x00\x06\x00\x08\x00\x04\x00'

order = Order.Order.GetRootAsOrder(out, 0)
print(order.Id())
コード例 #15
0
def convertVectorShaders(convertContext, data):
    """
	Converts vector shaders used in a scene. The data map is expected to contain the following
	elements:
	- modules: array of versioned shader modules. The appropriate model based on the graphics API
	  version being used will be chosen at runtime. Each element of the array has the following
	  members:
	  - version: the version of the shader as a standard config. (e.g. glsl-4.1, spirv-1.0)
	  - module: path to the shader module or base64 encoded data prefixed with "base64:". The
	    module is expected to have been compiled with Modular Shader Language (MSL).
	  - output: the path to the location to copy the shader module to. This can be omitted to
	    embed the shader module directly.
	  - outputRelativeDir: the directory relative to output path. This will be removed from the
	    path before adding the reference.
	  - resourceType: the resource type. See the dsFileResourceType for values, removing the type
	    prefix. Defaults to "Embedded".
	- extraElements: list of extra meterial elements to add for the material description. Each
	  element of the array has the following members:
	  - name: the name of the element.
	  - type: the type of the element. See dsMaterialType enum for values, removing the type prefix.
	  - count: the number of array elements. If 0 or omitted, this is not an array.
	  - binding: the binding type for the element. See the dsMaterialBinding enum for values,
	    removing the type prefix. This is only used for texture, image, buffer, and shader variable
	    group types.
	  - shaderVariableGroupDesc: the name of the shader variable group description when the type
	    is a shader variable group.
	- materialDesc: the name of the material description to register. This can be referenced by
	  other objects, such as creating materials used for drawing vector images.
	- fillColor: the name of the shader for filling with a solid color. Defaults to
	  "dsVectorFillColor".
	- fillLinearGradient: the name of the shader for filling with a linear gradient. Defaults to
	  "dsVectorFillLinearGradient".
	- fillRadialGradient: the name of the shader for filling with a radial gradient. Defaults to
	  "dsVectorFillRadialGradient".
	- line: the name of the shader for a line with a color or gradient. Defaults to
	  "dsVectorLine".
	- image: the name of the shader for a texture applied as an image. Defaults to "dsVectorImage".
	- textColor: name of the shader for standard single-color text. Defaults to "dsVectorTextColor".
	- textColorOutline: name of the shader for standard single-color text with a single-colored
	  outline. Defaults to "dsVectorTextColorOutline".
	- textGradient: name of the shader for text using a gradient. Defaults to
	  "dsVectorTextGradient".
	- textGradientOutline: name of the shader for text with an outline using a gradient. Defaults to
	  "dsVectorTextGradientOutline".
	"""
    def createOptionalString(builder, string):
        if string:
            return builder.CreateString(string)
        else:
            return 0

    builder = flatbuffers.Builder(0)

    try:
        modules = data['modules']
        versionedModules = []
        try:
            for versionedModuleData in modules:
                version = str(versionedModuleData['version'])
                moduleStr = str(versionedModuleData['module'])
                try:
                    modulePath, moduleContents = readDataOrPath(moduleStr)
                except TypeError:
                    raise Exception(
                        'VectorShaders shader module "module" uses incorrect base64 encoding.'
                    )

                dataType, dataOffset = convertFileOrData(
                    builder, modulePath, moduleContents,
                    versionedModuleData.get('output'),
                    versionedModuleData.get('outputRelativeDir'),
                    versionedModuleData.get('resourceType'))
                versionedModules.append((version, dataType, dataOffset))
        except KeyError as e:
            raise Exception(
                'Versioned shader module data doesn\'t contain element ' +
                str(e) + '.')
        except (TypeError, ValueError):
            raise Exception(
                'Versioned shader module list must be an array of objects.')

        extraElementsData = data.get('extraElements', [])
        extraElements = []
        try:
            for elementData in extraElementsData:
                element = Object()
                element.name = str(elementData['name'])

                typeStr = str(elementData['type'])
                try:
                    element.type = getattr(MaterialType, typeStr)
                except AttributeError:
                    raise Exception('Invalid material type "' + typeStr + '".')

                countValue = elementData.get('count', 0)
                try:
                    element.count = int(countValue)
                    if element.count < 0:
                        raise Exception(
                        )  # Common error handling in except block.
                except:
                    raise Exception('Invalid vector shader element count "' +
                                    str(countValue) + '".')

                bindingStr = str(elementData['binding'])
                try:
                    element.binding = getattr(MaterialBinding, bindingStr)
                except AttributeError:
                    raise Exception('Invalid material binding "' + bindingStr +
                                    '".')

                element.shaderVariableGroupDesc = str(
                    elementData.get('shaderVariableGroupDesc', ''))
                extraElements.append(element)
        except KeyError as e:
            raise Exception(
                'Versioned shader module data doesn\'t contain element ' +
                str(e) + '.')
        except (TypeError, ValueError):
            raise Exception(
                'Versioned shader module list must be an array of objects.')

        materialDescName = str(data['materialDesc'])

        fillColor = str(data.get('fillColor', ''))
        fillLinearGradient = str(data.get('fillLinearGradient', ''))
        fillRadialGradient = str(data.get('fillRadialGradient', ''))
        line = str(data.get('line', ''))
        image = str(data.get('image', ''))
        textColor = str(data.get('textColor', ''))
        textColorOutline = str(data.get('textColorOutline', ''))
        textGradient = str(data.get('textGradient', ''))
        textGradientOutline = str(data.get('textGradientOutline', ''))
    except KeyError as e:
        raise Exception('VectorShaders doesn\'t contain element ' + str(e) +
                        '.')
    except (AttributeError, TypeError, ValueError):
        raise Exception('VectorShaders must be an object.')

    modulesOffsets = []
    for version, dataType, dataOffset in versionedModules:
        versionOffset = builder.CreateString(version)

        VersionedShaderModule.Start(builder)
        VersionedShaderModule.AddVersion(builder, versionOffset)
        VersionedShaderModule.AddDataType(builder, dataType)
        VersionedShaderModule.AddData(builder, dataOffset)
        modulesOffsets.append(VersionedShaderModule.End(builder))

    VectorShaders.StartModulesVector(builder, len(modulesOffsets))
    for offset in reversed(modulesOffsets):
        builder.PrependUOffsetTRelative(offset)
    modulesOffset = builder.EndVector()

    extraElementsOffsets = []
    for element in extraElements:
        elementNameOffset = builder.CreateString(element.name)
        shaderVariableGroupDescOffset = createOptionalString(
            builder, element.shaderVariableGroupDesc)

        MaterialElement.Start(builder)
        MaterialElement.AddName(builder, elementNameOffset)
        MaterialElement.AddType(builder, element.type)
        MaterialElement.AddCount(builder, element.count)
        MaterialElement.AddBinding(builder, element.binding)
        MaterialElement.AddShaderVariableGroupDesc(
            builder, shaderVariableGroupDescOffset)
        extraElementsOffsets.append(MaterialElement.End(builder))

    VectorShaders.StartExtraElementsVector(builder, len(extraElementsOffsets))
    for offset in reversed(extraElementsOffsets):
        builder.PrependUOffsetTRelative(offset)
    extraElementsOffset = builder.EndVector()

    materialDescNameOffset = builder.CreateString(materialDescName)
    fillColorOffset = createOptionalString(builder, fillColor)
    fillLinearGradientOffset = createOptionalString(builder,
                                                    fillLinearGradient)
    fillRadialGradientOffset = createOptionalString(builder,
                                                    fillRadialGradient)
    lineOffset = createOptionalString(builder, line)
    imageOffset = createOptionalString(builder, image)
    textColorOffset = createOptionalString(builder, textColor)
    textColorOutlineOffset = createOptionalString(builder, textColorOutline)
    textGradientOffset = createOptionalString(builder, textGradient)
    textGradientOutlineOffset = createOptionalString(builder,
                                                     textGradientOutline)

    VectorShaders.Start(builder)
    VectorShaders.AddModules(builder, modulesOffset)
    VectorShaders.AddExtraElements(builder, extraElementsOffset)
    VectorShaders.AddMaterialDesc(builder, materialDescNameOffset)
    VectorShaders.AddFillColor(builder, fillColorOffset)
    VectorShaders.AddFillLinearGradient(builder, fillLinearGradientOffset)
    VectorShaders.AddFillRadialGradient(builder, fillRadialGradientOffset)
    VectorShaders.AddLine(builder, lineOffset)
    VectorShaders.AddImage(builder, imageOffset)
    VectorShaders.AddTextColor(builder, textColorOffset)
    VectorShaders.AddTextColorOutline(builder, textColorOutlineOffset)
    VectorShaders.AddTextGradient(builder, textGradientOffset)
    VectorShaders.AddTextGradientOutline(builder, textGradientOutlineOffset)
    builder.Finish(VectorShaders.End(builder))
    return builder.Output()
コード例 #16
0
ファイル: VectorImageConvert.py プロジェクト: akb825/DeepSea
def convertVectorImage(convertContext, data):
	"""
	Converts a VectorImage. The data map is expected to contain the following elements:
	- image: path to the vector image or base64 encoded data prefixed with "base64:".
	- output: the path to the output the vector image. This can be omitted if the vector image is
	  embedded.
	- outputRelativeDir: the directory relative to output path. This will be removed from the path
	  before adding the reference.
	- resourceType: the resource type. See the dsFileResourceType for values, removing the type
	  prefix. Defaults to "Embedded".
	- targetSize: the target size of the vector image for the tessellation quality as an array of
	  two floats. Defaults to the original image size.
	- sharedMaterials: the name of the vector material set for shared material data.
	- vectorShaders: the name of the vector material set for shared material data.
	- vectorResources: list of strings for the names of the vector resources to get textures and
	  fonts from.
	- srgb: bool for whether or not the embedded materials should be treated as sRGB and converted
	  to linear when drawing. Defaults to false.
	"""
	builder = flatbuffers.Builder(0)

	try:
		imageStr = str(data['image'])
		try:
			imagePath, imageContents = readDataOrPath(imageStr)
		except TypeError:
			raise Exception('VectorImage "data" uses incorrect base64 encoding.')
		imageType, imageOffset = convertFileOrData(builder, imagePath, imageContents,
			data.get('output'), data.get('outputRelativeDir'), data.get('resourceType'))

		size = data.get('targetSize')
		if size:
			try:
				if len(size) != 2:
					raise Exception() # Common error handling in except block.
				size[0] = float(size[0])
				size[1] = float(size[1])
			except:
				raise Exception('Invalid vector image target size "' + str(size) + '".')

		sharedMaterials = str(data.get('sharedMaterials', ''))
		shaders = str(data['vectorShaders'])
		resources = data.get('resources', [])
		if not isinstance(resources, list):
			raise Exception('Invalid vector image resources "' + str(resources) + '".')

		srgb = bool(data.get('srgb'))
	except KeyError as e:
		raise Exception('VectorImage doesn\'t contain element ' + str(e) + '.')
	except (AttributeError, TypeError, ValueError):
		raise Exception('VectorImage must be an object.')

	if sharedMaterials:
		sharedMaterialsOffset = builder.CreateString(sharedMaterials)
	else:
		sharedMaterialsOffset = 0

	shadersOffset = builder.CreateString(shaders)

	resourceOffsets = []
	for resource in resources:
		resourceOffsets.append(builder.CreateString(resource))

	VectorImage.StartResourcesVector(builder, len(resourceOffsets))
	for offset in reversed(resourceOffsets):
		builder.PrependUOffsetTRelative(offset)
	resourcesOffset = builder.EndVector()

	VectorImage.Start(builder)
	VectorImage.AddImageType(builder, imageType)
	VectorImage.AddImage(builder, imageOffset)

	if size:
		sizeOffset = CreateVector2f(builder, size[0], size[1])
	else:
		sizeOffset = 0
	VectorImage.AddTargetSize(builder, sizeOffset)

	VectorImage.AddSharedMaterials(builder, sharedMaterialsOffset)
	VectorImage.AddVectorShaders(builder, shadersOffset)
	VectorImage.AddResources(builder, resourcesOffset)
	VectorImage.AddSrgb(builder, srgb)
	builder.Finish(VectorImage.End(builder))
	return builder.Output()
コード例 #17
0
score.content.range.min = 2

num_of_detection = _metadata_fb.TensorMetadataT()
num_of_detection.name = "number of detections"
num_of_detection.description = "The number of the detected boxes. "
num_of_detection.content = _metadata_fb.ContentT()
num_of_detection.content.contentPropertiesType = (_metadata_fb.ContentProperties.FeatureProperties)
num_of_detection.content.contentProperties = (_metadata_fb.FeaturePropertiesT())



subgraph = _metadata_fb.SubGraphMetadataT()
subgraph.outputTensorGroups = [output_tensorgroups]
subgraph.inputTensorMetadata = [input_meta]
subgraph.outputTensorMetadata = [location, category, score, num_of_detection]
model_meta.subgraphMetadata = [subgraph]

b = flatbuffers.Builder(0)
b.Finish(
    model_meta.Pack(b),
    _metadata.MetadataPopulator.METADATA_FILE_IDENTIFIER)
metadata_buf = b.Output()

populator = _metadata.MetadataPopulator.with_model_file(exported_model_path)
populator.load_metadata_buffer(metadata_buf)
populator.load_associated_files([labelmap_file])
populator.populate()

displayer = _metadata.MetadataDisplayer.with_model_file(exported_model_path)
export_json_file = os.path.join('C:/Users/anywh/Desktop/cetification/android/object_detection/android/app/src/main/assets', "colab_v2.json")
json_file = displayer.get_metadata_json()
コード例 #18
0
ファイル: anim_encoder.py プロジェクト: ca2-chambers/pycozmo
 def to_fb_stream(self, f: BinaryIO):
     builder = flatbuffers.Builder(1024)
     fbclips = self.to_fb(builder)
     builder.Finish(fbclips)
     buf = builder.Output()
     f.write(buf)
コード例 #19
0
    cmd = sys.argv[1]
    filename = sys.argv[2]
    do_output = "-o" in sys.argv or "--output" in sys.argv
    use_flatcc = "-c" in sys.argv or "--flatcc" in sys.argv
    if use_flatcc:
        import irondb.flatcc as irondb_flatbuf
    else:
        import irondb.flatbuf as irondb_flatbuf
    if cmd.startswith("read"):
        eprint("Using Flatbuffer module: " + irondb_flatbuf.__name__)

    if cmd == "create_find_data":
        num_entries = int(sys.argv[3])
        leaf_cutoff = num_entries - (num_entries / 10)

        builder = flatbuffers.Builder(0)
        leaf_arr = []
        for x in range(0, num_entries):
            e = None
            if x > leaf_cutoff:
                uuid = builder.CreateString(
                    "11111111-1111-1111-1111-111111111111")
                metric = builder.CreateString("dummy metric " + str(x))
                check = builder.CreateString("dummy check " + str(x))
                category = builder.CreateString("graphite")
                egress = builder.CreateString("avg")
                LeafData.LeafDataStart(builder)
                LeafData.LeafDataAddUuid(builder, uuid)
                LeafData.LeafDataAddMetricName(builder, metric)
                LeafData.LeafDataAddCheckName(builder, check)
                LeafData.LeafDataAddCategory(builder, category)
コード例 #20
0
ファイル: joiner.py プロジェクト: dblarons/networks-project
def read_join_response(command):
    union_join = registrar.Registrar.Join.Join()
    union_join.Init(command.Message().Bytes, command.Message().Pos)

    room = union_join.Room()
    print('CLIENT: Joined room with name ' + str(room.Name()))

    clients_length = union_join.ClientsLength()
    print('CLIENT: Joined room that has ' + str(clients_length) + ' other clients')
    for i in range(clients_length):
        client = union_join.Clients(i)
        print('CLIENT: Client ' + str(i) + ' in room has ip ' + str(client.Ip()))

############ LIST REQUEST ############

builder = flatbuffers.Builder(1024)
offset = build_command(
    builder,
    registrar.Registrar.Message.Message().List,
    0)

print('CLIENT: Sending a List request');
response = send_request(builder, req_socket, offset)

command = registrar.Registrar.Command.Command.GetRootAsCommand(response, 0)

first_room_guid = None
if command.MessageType() == registrar.Registrar.Message.Message().List:
    print('CLIENT: Received List response from server')
    first_room_guid = read_list_response(command)
else:
コード例 #21
0
def main():
    builder = flatbuffers.Builder(0)

    # Create some weapons for our Monster ('Sword' and 'Axe').
    weapon_one = builder.CreateString('Sword')
    weapon_two = builder.CreateString('Axe')

    MyGame.Sample.Weapon.WeaponStart(builder)
    MyGame.Sample.Weapon.WeaponAddName(builder, weapon_one)
    MyGame.Sample.Weapon.WeaponAddDamage(builder, 3)
    sword = MyGame.Sample.Weapon.WeaponEnd(builder)

    MyGame.Sample.Weapon.WeaponStart(builder)
    MyGame.Sample.Weapon.WeaponAddName(builder, weapon_two)
    MyGame.Sample.Weapon.WeaponAddDamage(builder, 5)
    axe = MyGame.Sample.Weapon.WeaponEnd(builder)

    # Serialize the FlatBuffer data.
    name = builder.CreateString('Orc')

    MyGame.Sample.Monster.MonsterStartInventoryVector(builder, 10)
    # Note: Since we prepend the bytes, this loop iterates in reverse order.
    for i in reversed(range(0, 10)):
        builder.PrependByte(i)
    inv = builder.EndVector(10)

    MyGame.Sample.Monster.MonsterStartWeaponsVector(builder, 2)
    # Note: Since we prepend the data, prepend the weapons in reverse order.
    builder.PrependUOffsetTRelative(axe)
    builder.PrependUOffsetTRelative(sword)
    weapons = builder.EndVector(2)

    pos = MyGame.Sample.Vec3.CreateVec3(builder, 1.0, 2.0, 3.0)

    MyGame.Sample.Monster.MonsterStart(builder)
    MyGame.Sample.Monster.MonsterAddPos(builder, pos)
    MyGame.Sample.Monster.MonsterAddHp(builder, 300)
    MyGame.Sample.Monster.MonsterAddName(builder, name)
    MyGame.Sample.Monster.MonsterAddInventory(builder, inv)
    MyGame.Sample.Monster.MonsterAddColor(builder,
                                          MyGame.Sample.Color.Color().Red)
    MyGame.Sample.Monster.MonsterAddWeapons(builder, weapons)
    MyGame.Sample.Monster.MonsterAddEquippedType(
        builder,
        MyGame.Sample.Equipment.Equipment().Weapon)
    MyGame.Sample.Monster.MonsterAddEquipped(builder, axe)
    orc = MyGame.Sample.Monster.MonsterEnd(builder)

    builder.Finish(orc)

    # We now have a FlatBuffer that we could store on disk or send over a network.

    # ...Saving to file or sending over a network code goes here...

    # Instead, we are going to access this buffer right away (as if we just
    # received it).

    buf = builder.Output()

    # Note: We use `0` for the offset here, since we got the data using the
    # `builder.Output()` method. This simulates the data you would store/receive
    # in your FlatBuffer. If you wanted to read from the `builder.Bytes` directly,
    # you would need to pass in the offset of `builder.Head()`, as the builder
    # actually constructs the buffer backwards.
    monster = MyGame.Sample.Monster.Monster.GetRootAsMonster(buf, 0)

    # Note: We did not set the `Mana` field explicitly, so we get a default value.
    assert monster.Mana() == 150
    assert monster.Hp() == 300
    assert monster.Name() == 'Orc'
    assert monster.Color() == MyGame.Sample.Color.Color().Red
    assert monster.Pos().X() == 1.0
    assert monster.Pos().Y() == 2.0
    assert monster.Pos().Z() == 3.0

    # Get and test the `inventory` FlatBuffer `vector`.
    for i in xrange(monster.InventoryLength()):
        assert monster.Inventory(i) == i

    # Get and test the `weapons` FlatBuffer `vector` of `table`s.
    expected_weapon_names = ['Sword', 'Axe']
    expected_weapon_damages = [3, 5]
    for i in xrange(monster.WeaponsLength()):
        assert monster.Weapons(i).Name() == expected_weapon_names[i]
        assert monster.Weapons(i).Damage() == expected_weapon_damages[i]

    # Get and test the `equipped` FlatBuffer `union`.
    assert monster.EquippedType() == MyGame.Sample.Equipment.Equipment().Weapon

    # An example of how you can appropriately convert the table depending on the
    # FlatBuffer `union` type. You could add `elif` and `else` clauses to handle
    # the other FlatBuffer `union` types for this field.
    if monster.EquippedType() == MyGame.Sample.Equipment.Equipment().Weapon:
        # `monster.Equipped()` returns a `flatbuffers.Table`, which can be used
        # to initialize a `MyGame.Sample.Weapon.Weapon()`, in this case.
        union_weapon = MyGame.Sample.Weapon.Weapon()
        union_weapon.Init(monster.Equipped().Bytes, monster.Equipped().Pos)

        assert union_weapon.Name() == "Axe"
        assert union_weapon.Damage() == 5

    print 'The FlatBuffer was successfully created and verified!'
コード例 #22
0
ファイル: pocketfft.py プロジェクト: yashk2810/jax
def pocketfft(c, a, *, fft_type: FftType, fft_lengths: List[int]):
    """PocketFFT kernel for CPU."""
    shape = c.get_shape(a)
    n = len(shape.dimensions())
    dtype = shape.element_type()
    builder = flatbuffers.Builder(128)

    fft_lengths = list(fft_lengths)
    assert len(fft_lengths) >= 1
    assert len(fft_lengths) <= n, (fft_lengths, n)

    forward = fft_type in (FftType.FFT, FftType.RFFT)
    if fft_type == FftType.RFFT:
        pocketfft_type = pd.PocketFftType.R2C

        assert dtype in (np.float32, np.float64), dtype
        out_dtype = np.dtype(np.complex64 if dtype ==
                             np.float32 else np.complex128)
        pocketfft_dtype = (pd.PocketFftDtype.COMPLEX64 if dtype == np.float32
                           else pd.PocketFftDtype.COMPLEX128)

        assert list(shape.dimensions())[-len(fft_lengths):] == fft_lengths, (
            shape, fft_lengths)
        out_shape = list(shape.dimensions())
        out_shape[-1] = out_shape[-1] // 2 + 1

    elif fft_type == FftType.IRFFT:
        pocketfft_type = pd.PocketFftType.C2R
        assert np.issubdtype(dtype, np.complexfloating), dtype

        out_dtype = np.dtype(np.float32 if dtype ==
                             np.complex64 else np.float64)
        pocketfft_dtype = (pd.PocketFftDtype.COMPLEX64 if dtype == np.complex64
                           else pd.PocketFftDtype.COMPLEX128)

        assert list(
            shape.dimensions())[-len(fft_lengths):-1] == fft_lengths[:-1]
        out_shape = list(shape.dimensions())
        out_shape[-1] = fft_lengths[-1]
        assert (out_shape[-1] // 2 + 1) == shape.dimensions()[-1]
    else:
        pocketfft_type = pd.PocketFftType.C2C

        assert np.issubdtype(dtype, np.complexfloating), dtype
        out_dtype = dtype
        pocketfft_dtype = (pd.PocketFftDtype.COMPLEX64 if dtype == np.complex64
                           else pd.PocketFftDtype.COMPLEX128)

        assert list(shape.dimensions())[-len(fft_lengths):] == fft_lengths, (
            shape, fft_lengths)
        out_shape = shape.dimensions()

    # PocketFft does not allow size 0 dimensions.
    if 0 in shape.dimensions() or 0 in out_shape:
        return xla_client.ops.Broadcast(
            xla_client.ops.Constant(c, np.array(0, dtype=out_dtype)),
            out_shape)

    # Builds a PocketFftDescriptor flatbuffer. This descriptor is passed to the
    # C++ kernel to describe the FFT to perform.
    pd.PocketFftDescriptorStartShapeVector(builder, n)
    for d in reversed(
            shape.dimensions() if fft_type != FftType.IRFFT else out_shape):
        builder.PrependUint64(d)
    if flatbuffers_version_2:
        pocketfft_shape = builder.EndVector()
    else:
        pocketfft_shape = builder.EndVector(n)

    pd.PocketFftDescriptorStartStridesInVector(builder, n)
    stride = dtype.itemsize
    for d in reversed(shape.dimensions()):
        builder.PrependUint64(stride)
        stride *= d
    if flatbuffers_version_2:
        strides_in = builder.EndVector()
    else:
        strides_in = builder.EndVector(n)
    pd.PocketFftDescriptorStartStridesOutVector(builder, n)
    stride = out_dtype.itemsize
    for d in reversed(out_shape):
        builder.PrependUint64(stride)
        stride *= d
    if flatbuffers_version_2:
        strides_out = builder.EndVector()
    else:
        strides_out = builder.EndVector(n)

    pd.PocketFftDescriptorStartAxesVector(builder, len(fft_lengths))
    for d in range(len(fft_lengths)):
        builder.PrependUint32(n - d - 1)
    if flatbuffers_version_2:
        axes = builder.EndVector()
    else:
        axes = builder.EndVector(len(fft_lengths))

    scale = 1. if forward else (1. / np.prod(fft_lengths))
    pd.PocketFftDescriptorStart(builder)
    pd.PocketFftDescriptorAddDtype(builder, pocketfft_dtype)
    pd.PocketFftDescriptorAddFftType(builder, pocketfft_type)
    pd.PocketFftDescriptorAddShape(builder, pocketfft_shape)
    pd.PocketFftDescriptorAddStridesIn(builder, strides_in)
    pd.PocketFftDescriptorAddStridesOut(builder, strides_out)
    pd.PocketFftDescriptorAddAxes(builder, axes)
    pd.PocketFftDescriptorAddForward(builder, forward)
    pd.PocketFftDescriptorAddScale(builder, scale)
    descriptor = pd.PocketFftDescriptorEnd(builder)
    builder.Finish(descriptor)
    descriptor_bytes = builder.Output()

    return xla_client.ops.CustomCallWithLayout(
        c,
        b"pocketfft",
        operands=(
            xla_client.ops.Constant(
                c, np.frombuffer(descriptor_bytes, dtype=np.uint8)),
            a,
        ),
        shape_with_layout=xla_client.Shape.array_shape(
            out_dtype, out_shape, tuple(range(n - 1, -1, -1))),
        operand_shapes_with_layout=(
            xla_client.Shape.array_shape(np.dtype(np.uint8),
                                         (len(descriptor_bytes), ), (0, )),
            xla_client.Shape.array_shape(dtype, shape.dimensions(),
                                         tuple(range(n - 1, -1, -1))),
        ))
コード例 #23
0
    async def game_loop(self):

        last_tick_game_time = None  # What the tick time of the last observed tick was
        last_call_real_time = datetime.now()  # When we last called the Agent

        packet = GameTickPacket()

        # Run until main process tells to stop
        while not self.quit_event.is_set():
            before = datetime.now()

            self.game_interface.update_live_data_packet(packet)

            # Run the Agent only if the gameInfo has updated.
            tick_game_time = packet.game_info.seconds_elapsed
            worth_communicating = tick_game_time != last_tick_game_time or \
                                  datetime.now() - last_call_real_time >= MAX_AGENT_CALL_PERIOD

            ball = packet.game_ball
            if ball is not None and worth_communicating and max(
                    self.running_indices) < packet.num_cars:
                last_tick_game_time = tick_game_time
                last_call_real_time = datetime.now()

                tiny_player_offsets = []
                builder = flatbuffers.Builder(0)

                for i in range(packet.num_cars):
                    tiny_player_offsets.append(
                        self.copy_player(packet.game_cars[i], builder))

                TinyPacket.TinyPacketStartPlayersVector(
                    builder, packet.num_cars)
                for i in reversed(range(0, len(tiny_player_offsets))):
                    rlbot_index = self.get_rlbot_index(i)
                    builder.PrependUOffsetTRelative(
                        tiny_player_offsets[rlbot_index])
                players_offset = builder.EndVector(len(tiny_player_offsets))

                ballOffset = self.copy_ball(ball, builder)

                TinyPacket.TinyPacketStart(builder)
                TinyPacket.TinyPacketAddPlayers(builder, players_offset)
                TinyPacket.TinyPacketAddBall(builder, ballOffset)
                packet_offset = TinyPacket.TinyPacketEnd(builder)

                builder.Finish(packet_offset)
                buffer = bytes(builder.Output())

                filtered_sockets = {s for s in self.current_sockets if s.open}
                for socket in filtered_sockets:
                    await socket.send(buffer)

                self.current_sockets = filtered_sockets

            after = datetime.now()
            duration = (after - before).total_seconds()

            sleep_secs = 1 / 60 - duration
            if sleep_secs > 0:
                await asyncio.sleep(sleep_secs)
    def _create_metadata(self):
        """Creates the metadata for an image classifier."""

        # Creates model info.
        model_meta = _metadata_fb.ModelMetadataT()
        model_meta.name = self.model_info.name
        model_meta.description = ("Identify the most prominent object in the "
                                  "image from a set of %d categories." %
                                  self.model_info.num_classes)
        model_meta.version = self.model_info.version
        model_meta.author = "TensorFlow"
        model_meta.license = ("Apache License. Version 2.0 "
                              "http://www.apache.org/licenses/LICENSE-2.0.")

        # Creates input info.
        input_meta = _metadata_fb.TensorMetadataT()
        input_meta.name = "image"
        input_meta.description = (
            "Input image to be classified. The expected image is {0} x {1}, with "
            "three channels (red, blue, and green) per pixel. Each value in the "
            "tensor is a single byte between {2} and {3}.".format(
                self.model_info.image_width, self.model_info.image_height,
                self.model_info.image_min, self.model_info.image_max))
        input_meta.content = _metadata_fb.ContentT()
        input_meta.content.contentProperties = _metadata_fb.ImagePropertiesT()
        input_meta.content.contentProperties.colorSpace = (
            _metadata_fb.ColorSpaceType.RGB)
        input_meta.content.contentPropertiesType = (
            _metadata_fb.ContentProperties.ImageProperties)
        input_normalization = _metadata_fb.ProcessUnitT()
        input_normalization.optionsType = (
            _metadata_fb.ProcessUnitOptions.NormalizationOptions)
        input_normalization.options = _metadata_fb.NormalizationOptionsT()
        input_normalization.options.mean = self.model_info.mean
        input_normalization.options.std = self.model_info.std
        input_meta.processUnits = [input_normalization]
        input_stats = _metadata_fb.StatsT()
        input_stats.max = [self.model_info.image_max]
        input_stats.min = [self.model_info.image_min]
        input_meta.stats = input_stats

        # Creates output info.
        output_meta = _metadata_fb.TensorMetadataT()
        output_meta.name = "probability"
        output_meta.description = "Probabilities of the %d labels respectively." % self.model_info.num_classes
        output_meta.content = _metadata_fb.ContentT()
        output_meta.content.content_properties = _metadata_fb.FeaturePropertiesT(
        )
        output_meta.content.contentPropertiesType = (
            _metadata_fb.ContentProperties.FeatureProperties)
        output_stats = _metadata_fb.StatsT()
        output_stats.max = [1.0]
        output_stats.min = [0.0]
        output_meta.stats = output_stats
        label_file = _metadata_fb.AssociatedFileT()
        label_file.name = os.path.basename(self.label_file_path)
        label_file.description = "Labels for objects that the model can recognize."
        label_file.type = _metadata_fb.AssociatedFileType.TENSOR_AXIS_LABELS
        output_meta.associatedFiles = [label_file]

        # Creates subgraph info.
        subgraph = _metadata_fb.SubGraphMetadataT()
        subgraph.inputTensorMetadata = [input_meta]
        subgraph.outputTensorMetadata = [output_meta]
        model_meta.subgraphMetadata = [subgraph]

        b = flatbuffers.Builder(0)
        b.Finish(model_meta.Pack(b),
                 _metadata.MetadataPopulator.METADATA_FILE_IDENTIFIER)
        self.metadata_buf = b.Output()
コード例 #25
0
def write_calibration_table(calibration_cache):
    '''
    Helper function to write calibration table to files.   
    '''
    import json
    import flatbuffers
    import onnxruntime.quantization.CalTableFlatBuffers.TrtTable as TrtTable
    import onnxruntime.quantization.CalTableFlatBuffers.KeyValue as KeyValue

    logging.info("calibration cache: {}".format(calibration_cache))

    with open("calibration.json", 'w') as file:
        file.write(json.dumps(
            calibration_cache))  # use `json.loads` to do the reverse

    # Serialize data using FlatBuffers
    builder = flatbuffers.Builder(1024)
    key_value_list = []
    for key in sorted(calibration_cache.keys()):
        values = calibration_cache[key]
        value = str(max(abs(values[0]), abs(values[1])))

        flat_key = builder.CreateString(key)
        flat_value = builder.CreateString(value)

        KeyValue.KeyValueStart(builder)
        KeyValue.KeyValueAddKey(builder, flat_key)
        KeyValue.KeyValueAddValue(builder, flat_value)
        key_value = KeyValue.KeyValueEnd(builder)

        key_value_list.append(key_value)

    TrtTable.TrtTableStartDictVector(builder, len(key_value_list))
    for key_value in key_value_list:
        builder.PrependUOffsetTRelative(key_value)
    main_dict = builder.EndVector(len(key_value_list))

    TrtTable.TrtTableStart(builder)
    TrtTable.TrtTableAddDict(builder, main_dict)
    cal_table = TrtTable.TrtTableEnd(builder)

    builder.Finish(cal_table)
    buf = builder.Output()

    with open("calibration.flatbuffers", 'wb') as file:
        file.write(buf)

    # Deserialize data (for validation)
    if False:
        cal_table = TrtTable.TrtTable.GetRootAsTrtTable(buf, 0)
        dict_len = cal_table.DictLength()
        for i in range(dict_len):
            key_value = cal_table.Dict(i)
            logging.info(key_value.Key())
            logging.info(key_value.Value())

    # write plain text
    with open("calibration.cache", 'w') as file:
        for key in sorted(calibration_cache.keys()):
            value = calibration_cache[key]
            s = key + ' ' + str(max(abs(value[0]), abs(value[1])))
            file.write(s)
            file.write('\n')
コード例 #26
0
def convertText(convertContext, data):
    """
	Converts text for use in a scene. The data map is expected to contain the following elements:
	- resources: the name of the vector resources to get the font from.
	- font: the name of the font to use if not provided by the text XML element. Defaults to
	  'serif'.
	- text: the text. This can either be a path to a .xml file or embedded XML string. The XML
	  contents should be a single <text> SVG element with any number of <tspan> embedded elements.
	  (see https://www.w3.org/TR/SVG2/text.html#TextElement for details) Only solid colors are
	  allowed for stroke and fill. When a position is provided, only a relative offset for the
	  vertical position is supported.
	"""
    builder = flatbuffers.Builder(0)

    try:
        vectorResources = str(data['vectorResources'])
        defaultFont = str(data.get('font', 'serif'))
        textData = str(data['text']).strip()
    except KeyError as e:
        raise Exception('SceneText doesn\'t contain element ' + str(e) + '.')
    except (AttributeError, TypeError, ValueError):
        raise Exception('SceneText must be an object.')

    if textData.startswith('<'):
        if sys.version_info < (3, 0):
            textData = unicode(textData)
        textXml = minidom.parse(io.StringIO(textData))
    else:
        textXml = minidom.parse(textData)

    materials = Materials('text')
    font, text, ranges = readText(textXml.firstChild, defaultFont, [0.0, 0.0],
                                  0.0, materials)
    if not text:
        raise Exception('Invalid SVG text data.')

    colorMaterials = dict()
    for i in range(0, len(materials.colors)):
        colorMaterials[materials.getColorName(i)] = materials.colors[i]

    vectorResourcesOffset = builder.CreateString(vectorResources)
    fontOffset = builder.CreateString(font.font)

    VectorResourceRef.Start(builder)
    VectorResourceRef.AddResources(builder, vectorResourcesOffset)
    VectorResourceRef.AddName(builder, fontOffset)
    vectorResourceRefOffset = VectorResourceRef.End(builder)

    textOffset = builder.CreateString(text)

    rangeOffsets = []
    for textRange in ranges:
        style = textRange.style

        SceneTextStyle.Start(builder)
        SceneTextStyle.AddStart(builder, textRange.start)
        SceneTextStyle.AddCount(builder, textRange.count)
        SceneTextStyle.AddSize(builder, style.font.size)
        SceneTextStyle.AddEmbolden(builder, style.font.embolden)
        SceneTextStyle.AddSlant(builder, style.font.slant)
        SceneTextStyle.AddOutlineWidth(
            builder, style.stroke.width if style.stroke else 0.0)
        SceneTextStyle.AddFuziness(builder, 1.0)
        SceneTextStyle.AddVerticalOffset(
            builder, textRange.position[1]
            if textRange.positionType == TextPosition.Offset else 0.0)

        if style.fill:
            colorOffset = createColor(colorMaterials, style.fill, builder)
        else:
            colorOffset = 0

        SceneTextStyle.AddColor(builder, colorOffset)

        if style.stroke:
            colorOffset = createColor(colorMaterials, style.stroke, builder)
        else:
            colorOffset = 0

        SceneTextStyle.AddOutlineColor(builder, colorOffset)
        rangeOffsets.append(SceneTextStyle.End(builder))

    SceneText.StartStylesVector(builder, len(rangeOffsets))
    for offset in reversed(rangeOffsets):
        builder.PrependUOffsetTRelative(offset)
    stylesOffset = builder.EndVector()

    SceneText.Start(builder)
    SceneText.AddFont(builder, vectorResourceRefOffset)
    SceneText.AddText(builder, textOffset)
    SceneText.AddStyles(builder, stylesOffset)
    builder.Finish(SceneText.End(builder))
    return builder.Output()
コード例 #27
0
def convertShadowManager(convertContext, data):
    """
	Converts a shadow manager for a scene. The data map is expected to contain the following
	elements:
	- lightSet: the name of the light set to query the light from. If set, this will be the default
	  for elements in the shadows array.
	- shadows: array of objects for the shadows the shadow manager will manage. Each element is
	  expected to have the following members:
	  - name: name of the shadows.
	  - lightSet: name of the light set to query the light from.
	  - lightType: type of the light to shadow. See dsSceneLightType enum for values, removing the
	    type prefix.
	  - light: name of the light to shadow. May be unset to disable initially until set at runtime.
	  - transformGroupDesc: name of the shader variable group description for the transform group.
	  - transformGroupName: name of the transform group to set as view global data. This may be
	    omitted if not used as global data on a view.
	  - maxCascades: the maximum number of cascades for cascaded directional light shadows. Defaults
	    to 4.
	  - maxFirstSplitDistance: maximum distance for the first split for cascaded shadows. Defaults
	    to 100.
	  - cascadeExpFactor: exponential factor for cascaded shadows in the range [0, 1], where 0 uses
	    linear distances between the splits and 1 is fully exponential. Defaults to 0.5.
	  - minDepthRanges: minimim distance between the near and far planes for each cascade. Spot and
	    point light shadows only use the first value. Can either be an array to set the cascade
		values or a float to set all 4 possible cascade values.
	  - fadeStartDistance: the distance to start fading out shadows. Defaults to 1000000, which is a
	    large distance less likely to break GPUs that use limited precision floats.
	  - maxDistance: the maximum distance to display shadows. Defaults to 1000000, which is a large
	    distance less likely to break GPUs that use limited precision floats.
	"""
    largeDistance = 100000000.0

    def readFloat(value, name, minVal=None, maxVal=None):
        try:
            floatVal = float(value)
            if (minVal is not None and floatVal < minVal) or \
              (maxVal is not None and floatVal > maxVal):
                raise Exception()  # Common error handling in except block.
            return floatVal
        except:
            raise Exception('Invalid ' + name + ' value "' + str(value) + '".')

    try:
        shadowsData = data['shadows']
        defaultLightSet = data.get('lightSet', '')
        shadows = []
        try:
            for shadowData in shadowsData:
                try:
                    shadow = Object()
                    shadow.name = str(shadowData['name'])
                    shadow.lightSet = str(
                        shadowData.get('lightSet', defaultLightSet))
                    if not shadow.lightSet:
                        raise KeyError('lightSet')

                    lightTypeStr = str(shadowData['lightType'])
                    try:
                        shadow.lightType = getattr(LightType, lightTypeStr)
                    except AttributeError:
                        raise Exception('Invalid light type "' + lightTypeStr +
                                        '".')

                    shadow.light = str(shadowData.get('light', ''))
                    shadow.transformGroupDesc = str(
                        shadowData['transformGroupDesc'])
                    shadow.transformGroupName = str(
                        shadowData.get('transformGroupName', ''))

                    maxCascadesVal = shadowData.get('maxCascades', 4)
                    try:
                        shadow.maxCascades = int(maxCascadesVal)
                        if shadow.maxCascades < 1 or shadow.maxCascades > 4:
                            raise Exception(
                            )  # Common error handling in except block.
                    except:
                        raise Exception('Invalid max cascade count "' +
                                        str(maxCascadesVal) + '".')

                    minDepthRangesVal = shadowData.get('minDepthRanges')
                    shadow.minDepthRanges = []
                    if isinstance(minDepthRangesVal, list):
                        for val in minDepthRangesVal:
                            shadow.minDepthRanges.append(
                                readFloat(val, 'minDepthRanges'))
                    elif minDepthRangesVal:
                        floatVal = readFloat(minDepthRangesVal,
                                             'minDepthRanges')
                        shadow.minDepthRanges = [
                            floatVal, floatVal, floatVal, floatVal
                        ]

                    shadow.maxFirstSplitDistance = readFloat(
                        shadowData.get('maxFirstSplitDistance', 100.0),
                        'max first split distance', 0.1)
                    shadow.cascadeExpFactor = readFloat(
                        shadowData.get('cascadeExpFactor', 0.5),
                        'cascade exp factor', 0.0, 1.0)
                    shadow.fadeStartDistance = readFloat(
                        shadowData.get('fadeStartDistance', largeDistance),
                        'fade start distance', 0.0)
                    shadow.maxDistance = readFloat(
                        shadowData.get('maxDistance', largeDistance),
                        'max distance', 0.1)
                    shadows.append(shadow)
                except KeyError as e:
                    raise Exception(
                        'ShadowManager shadows doesn\'t contain element ' +
                        str(e) + '.')
        except (AttributeError, TypeError, ValueError):
            raise Exception(
                'ShadowManager shadows must be an array of objects.')
    except KeyError as e:
        raise Exception('ShadowManager doesn\'t contain element ' + str(e) +
                        '.')
    except (AttributeError, TypeError, ValueError):
        raise Exception('ShadowManager must be an object.')

    builder = flatbuffers.Builder(0)

    shadowOffsets = []
    for shadow in shadows:
        nameOffset = builder.CreateString(shadow.name)
        lightSetOffset = builder.CreateString(shadow.lightSet)
        if shadow.light:
            lightOffset = builder.CreateString(shadow.light)
        else:
            lightOffset = 0

        transformGroupDescOffset = builder.CreateString(
            shadow.transformGroupDesc)
        if shadow.transformGroupName:
            transformGroupNameOffset = builder.CreateString(
                shadow.transformGroupName)
        else:
            transformGroupNameOffset = 0

        if shadow.minDepthRanges:
            SceneLightShadows.StartMinDepthRangesVector(
                builder, len(shadow.minDepthRanges))
            for val in reversed(shadow.minDepthRanges):
                builder.PrependFloat32(val)
            minDepthRangesOffset = builder.EndVector()
        else:
            minDepthRangesOffset = 0

        SceneLightShadows.Start(builder)
        SceneLightShadows.AddName(builder, nameOffset)
        SceneLightShadows.AddLightSet(builder, lightSetOffset)
        SceneLightShadows.AddLightType(builder, shadow.lightType)
        SceneLightShadows.AddLight(builder, lightOffset)
        SceneLightShadows.AddTransformGroupDesc(builder,
                                                transformGroupDescOffset)
        SceneLightShadows.AddTransformGroupName(builder,
                                                transformGroupNameOffset)
        SceneLightShadows.AddMaxCascades(builder, shadow.maxCascades)
        SceneLightShadows.AddMaxFirstSplitDistance(
            builder, shadow.maxFirstSplitDistance)
        SceneLightShadows.AddCascadeExpFactor(builder, shadow.cascadeExpFactor)
        SceneLightShadows.AddMinDepthRanges(builder, minDepthRangesOffset)
        SceneLightShadows.AddFadeStartDistance(builder,
                                               shadow.fadeStartDistance)
        SceneLightShadows.AddMaxDistance(builder, shadow.maxDistance)
        shadowOffsets.append(SceneLightShadows.End(builder))

    SceneShadowManager.StartShadowsVector(builder, len(shadowOffsets))
    for offset in reversed(shadowOffsets):
        builder.PrependUOffsetTRelative(offset)
    shadowsOffset = builder.EndVector()

    SceneShadowManager.Start(builder)
    SceneShadowManager.AddShadows(builder, shadowsOffset)
    builder.Finish(SceneShadowManager.End(builder))
    return builder.Output()
コード例 #28
0
ファイル: server.py プロジェクト: dblarons/networks-project
def build_command(builder, message_type, message):
    registrar.Registrar.Command.CommandStart(builder)
    registrar.Registrar.Command.CommandAddMessageType(builder, message_type)
    registrar.Registrar.Command.CommandAddMessage(builder, message)
    return registrar.Registrar.Command.CommandEnd(builder)


while True:
    #  Wait for next request from client
    request = rep_socket.recv()

    Command = registrar.Registrar.Command.Command.GetRootAsCommand(request, 0)

    # Switch on the message type to send a response.
    req_builder = flatbuffers.Builder(1024)
    pub_builder = flatbuffers.Builder(1024)
    message_offset = 0
    message_type = Command.MessageType()
    publish_offset = None
    channel = None
    if message_type == registrar.Registrar.Message.Message().List:
        print('SERVER: Received a List command')
        rooms = Room.select()
        message_offset = list_api.response(req_builder, rooms)
    elif message_type == registrar.Registrar.Message.Message().Create:
        print('SERVER: Received a Create command')
        union_create = registrar.Registrar.Create.Create()
        union_create.Init(Command.Message().Bytes, Command.Message().Pos)
        client = union_create.Client()
        room = None
コード例 #29
0
def builder():
    _builder = flatbuffers.Builder(0)
    return _builder
コード例 #30
0
    def forward(self):
        builder = flatbuffers.Builder(64)

        # construct MessageBody
        ppx_Run.RunStart(builder)
        message_body = ppx_Run.RunEnd(builder)

        # construct Message
        ppx_Message.MessageStart(builder)
        ppx_Message.MessageAddBodyType(builder,
                                       ppx_MessageBody.MessageBody().Run)
        ppx_Message.MessageAddBody(builder, message_body)
        message = ppx_Message.MessageEnd(builder)
        builder.Finish(message)

        message = builder.Output()
        self._requester.send_request(message)
        # Bradley: This is very inefficient. Should replace
        # with a hash map. But I don't think this will be used
        # once everything is connected.

        while True:
            reply = self._requester.receive_reply()
            message_body = self._get_message_body(reply)

            if isinstance(message_body, ppx_RunResult.RunResult):
                result = self._protocol_tensor_to_variable(
                    message_body.Result())
                return result
            elif isinstance(message_body, ppx_Sample.Sample):
                address = message_body.Address().decode('utf-8')
                name = message_body.Name().decode('utf-8')
                if name == '':
                    name = None
                control = bool(message_body.Control())
                replace = bool(message_body.Replace())
                distribution_type = message_body.DistributionType()
                if distribution_type == ppx_Distribution.Distribution(
                ).Uniform:
                    uniform = ppx_Uniform.Uniform()
                    uniform.Init(message_body.Distribution().Bytes,
                                 message_body.Distribution().Pos)
                    low = self._protocol_tensor_to_variable(uniform.Low())
                    high = self._protocol_tensor_to_variable(uniform.High())
                    dist = Uniform(low, high)
                elif distribution_type == ppx_Distribution.Distribution(
                ).Normal:
                    normal = ppx_Normal.Normal()
                    normal.Init(message_body.Distribution().Bytes,
                                message_body.Distribution().Pos)
                    mean = self._protocol_tensor_to_variable(normal.Mean())
                    stddev = self._protocol_tensor_to_variable(normal.Stddev())
                    dist = Normal(mean, stddev)
                elif distribution_type == ppx_Distribution.Distribution(
                ).Categorical:
                    categorical = ppx_Categorical.Categorical()
                    categorical.Init(message_body.Distribution().Bytes,
                                     message_body.Distribution().Pos)
                    probs = self._protocol_tensor_to_variable(
                        categorical.Probs())
                    dist = Categorical(probs)
                elif distribution_type == ppx_Distribution.Distribution(
                ).Poisson:
                    poisson = ppx_Poisson.Poisson()
                    poisson.Init(message_body.Distribution().Bytes,
                                 message_body.Distribution().Pos)
                    rate = self._protocol_tensor_to_variable(poisson.Rate())
                    dist = Poisson(rate)
                elif distribution_type == ppx_Distribution.Distribution(
                ).Gamma:
                    gamma = ppx_Gamma.Gamma()
                    gamma.Init(message_body.Distribution().Bytes,
                               message_body.Distribution().Pos)
                    concentration = self._protocol_tensor_to_variable(
                        gamma.Concentration())
                    rate = self._protocol_tensor_to_variable(gamma.Rate())
                    dist = Gamma(concentration, rate)
                elif distribution_type == ppx_Distribution.Distribution(
                ).LogNormal:
                    log_normal = ppx_LogNormal.LogNormal()
                    log_normal.Init(message_body.Distribution().Bytes,
                                    message_body.Distribution().Pos)
                    mean = self._protocol_tensor_to_variable(log_normal.Mean())
                    stddev = self._protocol_tensor_to_variable(
                        log_normal.Stddev())
                    dist = LogNormal(mean, stddev)
                elif distribution_type == ppx_Distribution.Distribution(
                ).Exponential:
                    exponential = ppx_Exponential.Exponential()
                    exponential.Init(message_body.Distribution().Bytes,
                                     message_body.Distribution().Pos)
                    rate = self._protocol_tensor_to_variable(
                        exponential.Rate())
                    dist = Exponential(rate)
                elif distribution_type == ppx_Distribution.Distribution(
                ).Weibull:
                    weibull = ppx_Weibull.Weibull()
                    weibull.Init(message_body.Distribution().Bytes,
                                 message_body.Distribution().Pos)
                    scale = self._protocol_tensor_to_variable(weibull.Scale())
                    concentration = self._protocol_tensor_to_variable(
                        weibull.Concentration())
                    dist = Weibull(scale, concentration)
                else:
                    raise RuntimeError(
                        'ppx (Python): Sample from an unexpected distribution requested.'
                    )
                result = state.sample(distribution=dist,
                                      control=control,
                                      replace=replace,
                                      name=name,
                                      address=address)
                # print(" Debug statement in pyprob.Remote.forward(). \n Printing the sampling statement \n \
                # Result :  {0} \n \
                # Distribution : {1}".format(result,dist))
                builder = flatbuffers.Builder(64)
                result = self._variable_to_protocol_tensor(builder, result)
                ppx_SampleResult.SampleResultStart(builder)
                ppx_SampleResult.SampleResultAddResult(builder, result)
                message_body = ppx_SampleResult.SampleResultEnd(builder)

                # construct Message
                ppx_Message.MessageStart(builder)
                ppx_Message.MessageAddBodyType(
                    builder,
                    ppx_MessageBody.MessageBody().SampleResult)
                ppx_Message.MessageAddBody(builder, message_body)
                message = ppx_Message.MessageEnd(builder)
                builder.Finish(message)

                message = builder.Output()
                self._requester.send_request(message)
            elif isinstance(message_body, ppx_Observe.Observe):
                address = message_body.Address().decode('utf-8')
                name = message_body.Name().decode('utf-8')
                if name == '':
                    name = None
                value = self._protocol_tensor_to_variable(message_body.Value())
                distribution_type = message_body.DistributionType()
                if distribution_type == ppx_Distribution.Distribution().NONE:
                    dist = None
                elif distribution_type == ppx_Distribution.Distribution(
                ).Uniform:
                    uniform = ppx_Uniform.Uniform()
                    uniform.Init(message_body.Distribution().Bytes,
                                 message_body.Distribution().Pos)
                    low = self._protocol_tensor_to_variable(uniform.Low())
                    high = self._protocol_tensor_to_variable(uniform.High())
                    dist = Uniform(low, high)
                elif distribution_type == ppx_Distribution.Distribution(
                ).Normal:
                    normal = ppx_Normal.Normal()
                    normal.Init(message_body.Distribution().Bytes,
                                message_body.Distribution().Pos)
                    mean = self._protocol_tensor_to_variable(normal.Mean())
                    stddev = self._protocol_tensor_to_variable(normal.Stddev())
                    dist = Normal(mean, stddev)
                elif distribution_type == ppx_Distribution.Distribution(
                ).Categorical:
                    categorical = ppx_Categorical.Categorical()
                    categorical.Init(message_body.Distribution().Bytes,
                                     message_body.Distribution().Pos)
                    probs = self._protocol_tensor_to_variable(
                        categorical.Probs())
                    dist = Categorical(probs)
                elif distribution_type == ppx_Distribution.Distribution(
                ).Poisson:
                    poisson = ppx_Poisson.Poisson()
                    poisson.Init(message_body.Distribution().Bytes,
                                 message_body.Distribution().Pos)
                    rate = self._protocol_tensor_to_variable(poisson.Rate())
                    dist = Poisson(rate)
                elif distribution_type == ppx_Distribution.Distribution(
                ).Gamma:
                    gamma = ppx_Gamma.Gamma()
                    gamma.Init(message_body.Distribution().Bytes,
                               message_body.Distribution().Pos)
                    concentration = self._protocol_tensor_to_variable(
                        gamma.Concentration())
                    rate = self._protocol_tensor_to_variable(gamma.Rate())
                    dist = Gamma(concentration, rate)
                elif distribution_type == ppx_Distribution.Distribution(
                ).LogNormal:
                    log_normal = ppx_LogNormal.LogNormal()
                    log_normal.Init(message_body.Distribution().Bytes,
                                    message_body.Distribution().Pos)
                    mean = self._protocol_tensor_to_variable(log_normal.Mean())
                    stddev = self._protocol_tensor_to_variable(
                        log_normal.Stddev())
                    dist = LogNormal(mean, stddev)
                elif distribution_type == ppx_Distribution.Distribution(
                ).Exponential:
                    exponential = ppx_Exponential.Exponential()
                    exponential.Init(message_body.Distribution().Bytes,
                                     message_body.Distribution().Pos)
                    rate = self._protocol_tensor_to_variable(
                        exponential.Rate())
                    dist = Exponential(rate)
                elif distribution_type == ppx_Distribution.Distribution(
                ).Weibull:
                    weibull = ppx_Weibull.Weibull()
                    weibull.Init(message_body.Distribution().Bytes,
                                 message_body.Distribution().Pos)
                    scale = self._protocol_tensor_to_variable(weibull.Scale())
                    concentration = self._protocol_tensor_to_variable(
                        weibull.Concetration())
                    dist = Weibull(scale, concentration)
                else:
                    raise RuntimeError(
                        'ppx (Python): Sample from an unexpected distribution requested: {}'
                        .format(distribution_type))

                state.observe(distribution=dist,
                              value=value,
                              name=name,
                              address=address)
                builder = flatbuffers.Builder(64)
                ppx_ObserveResult.ObserveResultStart(builder)
                message_body = ppx_ObserveResult.ObserveResultEnd(builder)

                # construct Message
                ppx_Message.MessageStart(builder)
                ppx_Message.MessageAddBodyType(
                    builder,
                    ppx_MessageBody.MessageBody().ObserveResult)
                ppx_Message.MessageAddBody(builder, message_body)
                message = ppx_Message.MessageEnd(builder)
                builder.Finish(message)

                message = builder.Output()
                self._requester.send_request(message)
            elif isinstance(message_body, ppx_Tag.Tag):
                address = message_body.Address().decode('utf-8')
                name = message_body.Name().decode('utf-8')
                if name == '':
                    name = None
                value = self._protocol_tensor_to_variable(message_body.Value())
                state.tag(value=value, name=name, address=address)
                builder = flatbuffers.Builder(64)
                ppx_TagResult.TagResultStart(builder)
                message_body = ppx_TagResult.TagResultEnd(builder)

                # construct Message
                ppx_Message.MessageStart(builder)
                ppx_Message.MessageAddBodyType(
                    builder,
                    ppx_MessageBody.MessageBody().TagResult)
                ppx_Message.MessageAddBody(builder, message_body)
                message = ppx_Message.MessageEnd(builder)
                builder.Finish(message)

                message = builder.Output()
                self._requester.send_request(message)
            elif isinstance(message_body, ppx_Reset.Reset):
                raise RuntimeError(
                    'ppx (Python): Received a reset request. Protocol out of sync.'
                )
            else:
                raise RuntimeError(
                    'ppx (Python): Received unexpected message.')