Esempio n. 1
0
	def ConcatV2(graph : Graph.Graph, curNode : Graph.Node, dictNodeNameToOutVarStr : dict, extraNodeInfoDict : dict):
		inputsRef = curNode.getInputsRef()
		N = curNode.getAttrMapRef()["\"N\""].getI()
		assert(len(inputsRef) == N+1) #One extra for axis
		#TODO : Since the axis of concat is constant, therefore, its known here - the input's sizes along that dim should be 
		#		passed as input to the below function.
		#		For now hardcoding.
		retAST = AST.UninterpFuncCall(extraNodeInfoDict[curNode.getName()][0],
									TFNodesAST.UninterpFuncCallNames.Concat.name + str(N) + 'T', 
									list(map(lambda x : AST.ID(dictNodeNameToOutVarStr[x]), inputsRef)),
									outputDiffInpDims=1
									) 
		return (None, retAST)
Esempio n. 2
0
	def Mean(graph : Graph.Graph, curNode : Graph.Node, dictNodeNameToOutVarStr : dict, extraNodeInfoDict : dict):
		inputsRef = curNode.getInputsRef()
		attrMapRef = curNode.getAttrMapRef()
		assert(len(inputsRef) == 2)
		keepdims = False
		if ("\"keep_dims\"" in attrMapRef):
			keepdims = attrMapRef["\"keep_dims\""].getB()
		curNodeShapeLi = extraNodeInfoDict[curNode.getName()][0]
		return (None, AST.Reduce(AST.ID(dictNodeNameToOutVarStr[inputsRef[0]]), 
								AST.ID(dictNodeNameToOutVarStr[inputsRef[1]]), 
								AST.Int(int(keepdims), 32, isSecret=False),
								curNodeShapeLi,
								TFNodesAST.getOperatorsIdx('mean')))
Esempio n. 3
0
    def GlobalAveragePool(
        node,
        value_info,
        node_name_to_out_var_dict,
        innermost_let_ast_node,
        out_var_count,
        mtdAST,
    ):
        node = OnnxNode(node)
        if DEBUG:
            print(node)
        inputsRef = node.inputs
        assert len(inputsRef) == 1

        reshaped_input_name = get_new_var_name(out_var_count)
        reshaped_input = get_reshaped_input_ast(inputsRef[0], value_info,
                                                node_name_to_out_var_dict)
        innermost_let_ast_node = update_program_with_new_node(
            innermost_let_ast_node, reshaped_input, reshaped_input_name,
            mtdAST)
        out_var_count += 1

        seedot_output_ast = AST.Pool(
            AST.Pool.PoolType.AvgPool,
            AST.ID(reshaped_input_name),
            {
                AST.PaddingKeysDict.FH: value_info[inputsRef[0]][1][2],
                AST.PaddingKeysDict.FW: value_info[inputsRef[0]][1][3],
                AST.PaddingKeysDict.zPadHLeft: 0,
                AST.PaddingKeysDict.zPadHRight: 0,
                AST.PaddingKeysDict.zPadWLeft: 0,
                AST.PaddingKeysDict.zPadWRight: 0,
                AST.PaddingKeysDict.strideH: 1,
                AST.PaddingKeysDict.strideW: 1,
            },
        )
        output_name = get_new_var_name(out_var_count)
        innermost_let_ast_node = update_program_with_new_node(
            innermost_let_ast_node, seedot_output_ast, output_name, mtdAST)
        out_var_count += 1

        reshaped_output_name = get_new_var_name(out_var_count)
        onnx_output_ast = get_reshaped_output_ast(node.outputs[0], value_info,
                                                  output_name)
        innermost_let_ast_node = update_program_with_new_node(
            innermost_let_ast_node, onnx_output_ast, reshaped_output_name,
            mtdAST)
        out_var_count += 1
        node_name_to_out_var_dict[node.outputs[0]] = reshaped_output_name

        return (innermost_let_ast_node, out_var_count)
Esempio n. 4
0
 def Transpose(graph: Graph.Graph, curNode: Graph.Node,
               dictNodeNameToOutVarStr: dict, extraNodeInfoDict: dict):
     inputsRef = curNode.getInputsRef()
     assert (len(inputsRef) == 2)
     permNodeName = inputsRef[1]
     # We need to fetch the tensor value of the perm Node
     permNode = graph.__getitem__(permNodeName)
     permTensor = permNode.getAttrVal("value").getTensor()
     permList = permTensor.getContentAsValArr()
     assert (permTensor.getDType().kind == "i")
     assert (permTensor.getShapeRef().getRank() == 1)
     return (None,
             AST.Transpose(AST.ID(dictNodeNameToOutVarStr[inputsRef[0]]),
                           permList))
Esempio n. 5
0
 def Cast(graph: Graph.Graph, curNode: Graph.Node,
          dictNodeNameToOutVarStr: dict, extraNodeInfoDict: dict):
     inputsRef = curNode.getInputsRef()
     assert (len(inputsRef) == 1)
     sourceType = curNode.getAttrMapRef()["\"SrcT\""].getDataType()
     destType = curNode.getAttrMapRef()["\"DstT\""].getDataType()
     return (None,
             AST.UninterpFuncCall(
                 extraNodeInfoDict[curNode.getName()][0],
                 TFNodesAST.UninterpFuncCallNames.Cast.name, [
                     AST.ID(dictNodeNameToOutVarStr[inputsRef[0]]),
                     AST.ID(sourceType.name),
                     AST.ID(destType.name)
                 ]))
Esempio n. 6
0
	def Split(node, value_info, node_name_to_out_var_dict, innermost_let_ast_node, out_var_count, mtdAST):
		node = OnnxNode(node)
		inputsRef = node.inputs
		output_count = len(node.outputs)

		for cur_count in range(output_count):
			seedot_output_ast = AST.UninterpFuncCall(list(value_info[node.outputs[cur_count]][1]), 'Split',
				 [AST.ID(node_name_to_out_var_dict[inputsRef[0]]), AST.Int(node.attrs['axis'], 32, False), AST.Int(cur_count, 32, False), AST.Int(output_count, 32, False)])
			output_name = get_new_var_name(out_var_count) 
			innermost_let_ast_node = update_program_with_new_node(innermost_let_ast_node, seedot_output_ast, output_name, mtdAST)
			out_var_count += 1
			node_name_to_out_var_dict[node.outputs[cur_count]] = output_name

		return (innermost_let_ast_node, out_var_count)		
Esempio n. 7
0
 def Slice(graph: Graph.Graph, curNode: Graph.Node,
           dictNodeNameToOutVarStr: dict, extraNodeInfoDict: dict):
     inputsRef = curNode.getInputsRef()
     assert (len(inputsRef) == 3)
     curNodeDataType = curNode.getAttrMapRef()["\"T\""].getDataType()
     retAST = AST.UninterpFuncCall(
         extraNodeInfoDict[curNode.getName()][0],
         TFNodesAST.UninterpFuncCallNames.CreateCopy.name,
         [
             AST.ID(dictNodeNameToOutVarStr[inputsRef[0]]),  # of this
             AST.ID(dictNodeNameToOutVarStr[inputsRef[1]]),  # begin idx
             AST.ID(dictNodeNameToOutVarStr[inputsRef[2]])  # size
         ])
     return (None, retAST)
Esempio n. 8
0
    def fillSymbolTable(self):

        if self.functions is not None:
            for function in self.functions:
                entry = SymbolTableEntry(function.id, 'FUNCTION', {'args': function.args, 'asssignments': function.assignments})
                AST.addToSymbolTable(entry)

        for assignment in self.assignments:
            sym_table_entry = assignment.fillSymbolTable()

            if sym_table_entry is not None:
                AST.addToSymbolTable(sym_table_entry)

        return self.symbolTable
Esempio n. 9
0
 def ExpandDims(
     graph: Graph.Graph,
     curNode: Graph.Node,
     dictNodeNameToOutVarStr: dict,
     extraNodeInfoDict: dict,
 ):
     inputsRef = curNode.getInputsRef()
     assert len(inputsRef) == 2
     retAST = AST.UninterpFuncCall(
         extraNodeInfoDict[curNode.getName()][0],
         TFNodesAST.UninterpFuncCallNames.ExpandDims.name,
         list(map(lambda x: AST.ID(dictNodeNameToOutVarStr[x]), inputsRef)),
     )
     return (None, {curNode.getName(): retAST})
Esempio n. 10
0
 def enterIterationStatement(self, ctx: CParser.IterationStatementContext):
     node = None
     if ctx.For():
         node = AST.ASTForStmtNode("For", ctx=ctx)
     elif ctx.While():
         node = AST.ASTWhileStmtNode("While", ctx=ctx)
     node.parent = self.current_node
     outer_for_scope = STT.STTNode()
     outer_for_scope.parent = self.current_node.scope
     outer_for_scope.depth = outer_for_scope.parent.depth + 1
     node.scope = outer_for_scope
     self.current_node.children.append(node)
     self.current_node.scope.children.append(outer_for_scope)
     self.current_node = node
Esempio n. 11
0
    def Conv2D(
        graph: Graph.Graph,
        curNode: Graph.Node,
        dictNodeNameToOutVarStr: dict,
        extraNodeInfoDict: dict,
    ):
        inputsRef = curNode.getInputsRef()
        assert len(inputsRef) == 2

        stridesUsed = curNode.getAttrMapRef()["strides"].getList().getILi()
        assert stridesUsed[0] == 1 and stridesUsed[3] == 1
        strideH = stridesUsed[1]
        strideW = stridesUsed[2]

        inputShape = extraNodeInfoDict[inputsRef[0]][0]
        imgH = inputShape[1]
        imgW = inputShape[2]

        filterShape = extraNodeInfoDict[inputsRef[1]][0]
        FH = filterShape[0]
        FW = filterShape[1]

        paddingUsedStr = curNode.getAttrMapRef()["padding"].getS()

        [zPadHLeft, zPadHRight, zPadWLeft,
         zPadWRight] = TFNodesAST.helper_findPadding(imgH, imgW, FH, FW,
                                                     strideH, strideW,
                                                     paddingUsedStr)

        options = {}
        options[AST.PaddingKeysDict.FH] = FH
        options[AST.PaddingKeysDict.FW] = FW
        options[AST.PaddingKeysDict.zPadHLeft] = zPadHLeft
        options[AST.PaddingKeysDict.zPadHRight] = zPadHRight
        options[AST.PaddingKeysDict.zPadWLeft] = zPadWLeft
        options[AST.PaddingKeysDict.zPadWRight] = zPadWRight
        options[AST.PaddingKeysDict.strideH] = strideH
        options[AST.PaddingKeysDict.strideW] = strideW
        return (
            None,
            {
                curNode.getName():
                AST.BOp(
                    AST.ID(dictNodeNameToOutVarStr[inputsRef[0]]),
                    TFNodesAST.getOperatorsIdx("#"),
                    AST.ID(dictNodeNameToOutVarStr[inputsRef[1]]),
                    options,
                )
            },
        )
Esempio n. 12
0
def get_reshaped_bias_ast(bias_name, value_info, node_name_to_out_var_dict,
                          dim):
    if dim == 2:
        return AST.Reshape(
            AST.ID(node_name_to_out_var_dict[bias_name]),
            [1, 1, 1, value_info[bias_name][1][0]],
            None,
        )
    else:
        return AST.Reshape(
            AST.ID(node_name_to_out_var_dict[bias_name]),
            [1, 1, 1, 1, value_info[bias_name][1][0]],
            None,
        )
Esempio n. 13
0
	def Fill(graph : Graph.Graph, curNode : Graph.Node, dictNodeNameToOutVarStr : dict, extraNodeInfoDict : dict):
		inputsRef = curNode.getInputsRef()
		assert(len(inputsRef) == 2)
		curNodeOutputShape = extraNodeInfoDict[inputsRef[0]][0]
		assert(len(curNodeOutputShape) == 1) #inputsRef[0] denotes a shape and should have a rank of 1
		
		curNodeOutputType = curNode.getAttrMapRef()["T"].getDataType()
		assert(curNodeOutputType is not Graph.DataTypeEnum.DT_INVALID)

		retAST = AST.UninterpFuncCall(extraNodeInfoDict[curNode.getName()][0],
									TFNodesAST.UninterpFuncCallNames.CreateTensor.name, 
									[AST.ID(dictNodeNameToOutVarStr[inputsRef[1]]) ],
									isSecret=False)
		return (None, { curNode.getName() : retAST})
Esempio n. 14
0
    def Conv3D(graph: Graph.Graph, curNode: Graph.Node,
               dictNodeNameToOutVarStr: dict, extraNodeInfoDict: dict):
        inputsRef = curNode.getInputsRef()
        assert (len(inputsRef) == 2)

        stridesUsed = curNode.getAttrMapRef()["strides"].getList().getILi()
        assert (stridesUsed[0] == 1 and stridesUsed[4] == 1)
        strideD = stridesUsed[1]
        strideH = stridesUsed[2]
        strideW = stridesUsed[3]

        inputShape = extraNodeInfoDict[inputsRef[0]][0]
        imgD = inputShape[1]
        imgH = inputShape[2]
        imgW = inputShape[3]

        filterShape = extraNodeInfoDict[inputsRef[1]][0]
        FD = filterShape[0]
        FH = filterShape[1]
        FW = filterShape[2]

        paddingUsedStr = curNode.getAttrMapRef()["padding"].getS()

        [zPadDLeft, zPadDRight, zPadHLeft, zPadHRight, zPadWLeft,
         zPadWRight] = TFNodesAST.helper_findPadding(imgH, imgW, FH, FW,
                                                     strideH, strideW,
                                                     paddingUsedStr, imgD, FD,
                                                     strideD)

        options = {}
        options[AST.PaddingKeysDict.FD] = FD
        options[AST.PaddingKeysDict.FH] = FH
        options[AST.PaddingKeysDict.FW] = FW
        options[AST.PaddingKeysDict.zPadDLeft] = zPadDLeft
        options[AST.PaddingKeysDict.zPadDRight] = zPadDRight
        options[AST.PaddingKeysDict.zPadHLeft] = zPadHLeft
        options[AST.PaddingKeysDict.zPadHRight] = zPadHRight
        options[AST.PaddingKeysDict.zPadWLeft] = zPadWLeft
        options[AST.PaddingKeysDict.zPadWRight] = zPadWRight
        options[AST.PaddingKeysDict.strideD] = strideD
        options[AST.PaddingKeysDict.strideH] = strideH
        options[AST.PaddingKeysDict.strideW] = strideW
        options[AST.PaddingKeysDict.ConvDim] = 3
        return (None, {
            curNode.getName():
            AST.BOp(AST.ID(dictNodeNameToOutVarStr[inputsRef[0]]),
                    TFNodesAST.getOperatorsIdx('#'),
                    AST.ID(dictNodeNameToOutVarStr[inputsRef[1]]), options)
        })
Esempio n. 15
0
def update_program_with_new_node(innermost_let_ast_node, new_node,
                                 new_node_name, mtdAST):
    cur_out_var_ast_node = AST.ID(new_node_name)
    new_let_node = AST.Let(cur_out_var_ast_node, new_node,
                           cur_out_var_ast_node)
    mtdAST.visit(new_let_node, {
        AST.ASTNode.mtdKeyTFOpName: 'no',
        AST.ASTNode.mtdKeyTFNodeName: 'no'
    })
    # Updating the innermost Let AST node and the expression for previous Let Node
    innermost_let_ast_node.expr = new_let_node
    innermost_let_ast_node = new_let_node

    # node_name_to_out_var_dict[node.outputs[0]] = new_node_name
    return innermost_let_ast_node
Esempio n. 16
0
	def Transpose(node, value_info, node_name_to_out_var_dict, innermost_let_ast_node, out_var_count, mtdAST):
		node = OnnxNode(node) 
		if(DEBUG):
			print(node)

		inputsRef = node.inputs
		assert(len(inputsRef)==1)

		seedot_output_ast = AST.Transpose(AST.ID(node_name_to_out_var_dict[inputsRef[0]]), node.attrs['perm'])
		output_name = get_new_var_name(out_var_count)
		innermost_let_ast_node = update_program_with_new_node(innermost_let_ast_node, seedot_output_ast, output_name, mtdAST)
		out_var_count += 1
		node_name_to_out_var_dict[node.outputs[0]] = output_name

		return (innermost_let_ast_node, out_var_count)	
Esempio n. 17
0
	def Flatten(node, value_info, node_name_to_out_var_dict, innermost_let_ast_node, out_var_count, mtdAST):
		node = OnnxNode(node) 
		if(DEBUG):
			print(node)

		inputsRef = node.inputs
		assert(len(inputsRef)==1)

		seedot_output_ast = AST.Reshape(AST.ID(node_name_to_out_var_dict[inputsRef[0]]), list(value_info[node.outputs[0]][1]), None)
		output_name = get_new_var_name(out_var_count)
		innermost_let_ast_node = update_program_with_new_node(innermost_let_ast_node, seedot_output_ast, output_name, mtdAST)
		out_var_count += 1
		node_name_to_out_var_dict[node.outputs[0]] = output_name

		return (innermost_let_ast_node, out_var_count)		
Esempio n. 18
0
	def Identity(graph : Graph.Graph, curNode : Graph.Node, dictNodeNameToOutVarStr : dict, extraNodeInfoDict : dict):
		#In SeeDot, J2=J1 creates a new reference for J1 -- so 
		#	the corresponding code in Seedot cannot simply be J2 = J1. 
		#	Instead create a new tensor first and then assign the old one to the new one.
		inputsRef = curNode.getInputsRef()
		assert(len(inputsRef)==1)

		curNodeDataType = curNode.getAttrMapRef()["T"].getDataType()
		assert(curNodeDataType is not Graph.DataTypeEnum.DT_INVALID)
		
		curNodeShape = extraNodeInfoDict[curNode.getName()][0]
		retAST = AST.UninterpFuncCall(curNodeShape,
									TFNodesAST.UninterpFuncCallNames.CreateIdentity.name, 
									[AST.ID(dictNodeNameToOutVarStr[inputsRef[0]])])
		return (None, { curNode.getName() : retAST})
Esempio n. 19
0
	def Squeeze(graph : Graph.Graph, curNode : Graph.Node, dictNodeNameToOutVarStr : dict, extraNodeInfoDict : dict):
		inputsRef = curNode.getInputsRef()
		inputTensorShape = extraNodeInfoDict[inputsRef[0]][0]
		inputTensorRank = len(inputTensorShape)

		squeezeDims = curNode.getAttrMapRef()["squeeze_dims"].getList().getILi()
		squeezeDimsRank = len(squeezeDims)

		return (None, { curNode.getName() : AST.UninterpFuncCall(extraNodeInfoDict[curNode.getName()][0],
											TFNodesAST.UninterpFuncCallNames.Squeeze.name,
											list(map(lambda x : AST.Int(x, 32, isSecret=False), squeezeDims)) + 
											[
											AST.ID(dictNodeNameToOutVarStr[inputsRef[0]])
											]
											)})
Esempio n. 20
0
    def Add(node, value_info, node_name_to_out_var_dict,
            innermost_let_ast_node, out_var_count, mtdAST):
        node = OnnxNode(node)
        if (DEBUG):
            print(node)
        inputsRef = node.inputs
        assert (len(inputsRef) == 2)

        reshaped_input_name = get_new_var_name(out_var_count)
        reshaped_input = get_reshaped_input_ast(inputsRef[0], value_info,
                                                node_name_to_out_var_dict)
        innermost_let_ast_node = update_program_with_new_node(
            innermost_let_ast_node, reshaped_input, reshaped_input_name,
            mtdAST)
        out_var_count += 1

        reshaped_input_name1 = get_new_var_name(out_var_count)
        reshaped_input1 = get_reshaped_input_ast(inputsRef[1], value_info,
                                                 node_name_to_out_var_dict)
        innermost_let_ast_node = update_program_with_new_node(
            innermost_let_ast_node, reshaped_input1, reshaped_input_name1,
            mtdAST)
        out_var_count += 1

        seedot_output_ast = AST.BOp(AST.ID(reshaped_input_name),
                                    getOperatorsIdx('+'),
                                    AST.ID(reshaped_input_name1))
        output_name = get_new_var_name(out_var_count)
        innermost_let_ast_node = update_program_with_new_node(
            innermost_let_ast_node, seedot_output_ast, output_name, mtdAST)
        out_var_count += 1

        reshaped_output_name = get_new_var_name(out_var_count)
        onnx_output_ast = get_reshaped_output_ast(node.outputs[0], value_info,
                                                  output_name)
        innermost_let_ast_node = update_program_with_new_node(
            innermost_let_ast_node, onnx_output_ast, reshaped_output_name,
            mtdAST)
        out_var_count += 1
        node_name_to_out_var_dict[node.outputs[0]] = reshaped_output_name

        if (DEBUG):
            print(node.outputs[0])
            print(onnx_input_shape, onnx_input_shape1, '->',
                  seedot_input_shape, seedot_input_shape1, '->',
                  onnx_output_shape)

        return (innermost_let_ast_node, out_var_count)
Esempio n. 21
0
    def helper_processPool(graph: Graph.Graph, curNode: Graph.Node,
                           dictNodeNameToOutVarStr: dict,
                           extraNodeInfoDict: dict, typeOfPool: str):
        inputsRef = curNode.getInputsRef()
        assert (len(inputsRef) == 1)

        options = {}

        stridesUsed = curNode.getAttrMapRef()["strides"].getList().getILi()
        assert ((stridesUsed[0] == 1) and (stridesUsed[3] == 1))
        strideH = stridesUsed[1]
        strideW = stridesUsed[2]

        kSizeUsed = curNode.getAttrMapRef()["ksize"].getList().getILi()
        assert ((kSizeUsed[0] == 1) and (kSizeUsed[3] == 1))
        kSizeH = kSizeUsed[1]
        kSizeW = kSizeUsed[2]

        inputShape = extraNodeInfoDict[inputsRef[0]][0]
        imgH = inputShape[1]
        imgW = inputShape[2]

        paddingUsedStr = curNode.getAttrMapRef()["padding"].getS()
        [zPadHLeft, zPadHRight, zPadWLeft,
         zPadWRight] = TFNodesAST.helper_findPadding(imgH, imgW, kSizeH,
                                                     kSizeW, strideH, strideW,
                                                     paddingUsedStr)

        poolType = None
        if typeOfPool == 'MAXPOOL': poolType = AST.Pool.PoolType.MaxPool
        elif typeOfPool == 'AVGPOOL': poolType = AST.Pool.PoolType.AvgPool
        else:
            print("Unknown type of pooling layer.", file=sys.stderr)
            assert (False)
        return (None, {
            curNode.getName():
            AST.Pool(
                poolType, AST.ID(dictNodeNameToOutVarStr[inputsRef[0]]), {
                    AST.PaddingKeysDict.FH: kSizeH,
                    AST.PaddingKeysDict.FW: kSizeW,
                    AST.PaddingKeysDict.zPadHLeft: zPadHLeft,
                    AST.PaddingKeysDict.zPadHRight: zPadHRight,
                    AST.PaddingKeysDict.zPadWLeft: zPadWLeft,
                    AST.PaddingKeysDict.zPadWRight: zPadWRight,
                    AST.PaddingKeysDict.strideH: strideH,
                    AST.PaddingKeysDict.strideW: strideW
                })
        })
Esempio n. 22
0
	def Input(node, value_info, node_name_to_out_var_dict):
		if(DEBUG):
			print(node.outputs[0])
		# There are two types of inputs
		dims = list(node.dims if hasattr(node, 'dims') else ([val.dim_value for val in  node.type.tensor_type.shape.dim]))	
		data_type = node.data_type if hasattr (node, 'data_type') else node.type.tensor_type.elem_type
		return AST.Input(dims, onnx2seedot(data_type))
Esempio n. 23
0
 def enterDeclaration(self, ctx: CParser.DeclarationContext):
     node = AST.ASTDeclarationNode(c_idx=len(self.current_node.children),
                                   ctx=ctx)
     node.parent = self.current_node
     node.scope = self.current_node.scope
     self.current_node.children.append(node)
     self.current_node = node
Esempio n. 24
0
 def enterReturn(self, ctx: CParser.ReturnContext):
     node = AST.ASTReturnNode(c_idx=len(self.current_node.children),
                              ctx=ctx)
     node.parent = self.current_node
     node.scope = self.current_node.scope
     self.current_node.children.append(node)
     self.current_node = node
Esempio n. 25
0
    def Placeholder(
        graph: Graph.Graph,
        curNode: Graph.Node,
        dictNodeNameToOutVarStr: dict,
        extraNodeInfoDict: dict,
    ):
        curNodeShapeLi = extraNodeInfoDict[curNode.getName()][0]
        curNodeInputType = curNode.getAttrMapRef()["dtype"].getDataType()
        assert curNodeInputType is not Graph.DataTypeEnum.DT_INVALID

        # NOTE: There has to be some way for Athos to differentiate model from image, since in the compiled code
        # 	(in the scenario of secure inference), model is input by server and image by client.
        # 	We assume in the following that the PlaceHolder op node represents the image and
        # 	all model parameters are represented using Variable op nodes.
        # 	Hence, in the call to AST.Input, we pass inputByParty=1.

        return (
            None,
            {
                curNode.getName():
                AST.Input(
                    curNodeShapeLi,
                    curNodeInputType.name,
                    isSecret=True,
                    inputByParty=AST.Party.CLIENT,
                )
            },
        )
Esempio n. 26
0
 def enterLabeledStatement(self, ctx: CParser.LabeledStatementContext):
     label = ctx.Identifier()
     node = AST.ASTLabelStmtNode(label, ctx=ctx)
     node.parent = self.current_node
     node.scope = self.current_node.scope
     self.current_node.children.append(node)
     self.current_node = node
Esempio n. 27
0
    def VariableV2(
        graph: Graph.Graph,
        curNode: Graph.Node,
        dictNodeNameToOutVarStr: dict,
        extraNodeInfoDict: dict,
    ):
        curNodeShapeLi = curNode.getAttrMapRef()["shape"].getShape().getDimRef(
        )[:]
        curNodeInputType = curNode.getAttrMapRef()["dtype"].getDataType()

        # NOTE : since this becomes an input node right now, i have also added to be prefixed at top in ProcessTFGraph::prefixAllPlaceHolderNodes()
        # NOTE: There has to be some way for Athos to differentiate model from image, since in the compiled code
        # 	(in the scenario of secure inference), model is input by server and image by client.
        # 	We assume in the following that the PlaceHolder op node represents the image and
        # 	all model parameters are represented using Variable op nodes.
        # 	Hence, in the call to AST.Input, we pass inputByParty as SERVER.
        return (
            None,
            {
                curNode.getName():
                AST.Input(
                    curNodeShapeLi,
                    curNodeInputType.name,
                    isSecret=True,
                    inputByParty=AST.Party.SERVER,
                )
            },
        )
Esempio n. 28
0
 def StopGradient(graph: Graph.Graph, curNode: Graph.Node,
                  dictNodeNameToOutVarStr: dict, extraNodeInfoDict: dict):
     inputsRef = curNode.getInputsRef()
     return (None, {
         curNode.getName():
         AST.ID(dictNodeNameToOutVarStr[inputsRef[0]])
     })
Esempio n. 29
0
    def searchUpHandleNode(self, mergeList, AST):
        '''
        在ASTBeftore中向上搜索
        '''
        structureNode = set()
        for i in mergeList:
            tempNode = AST.getNodeByID(i)
            index = self.getIndexInParent(tempNode.id, AST)
            tempNodeO = tempNode
            tempNode = tempNode.parent
            while True:
                if tempNode == None:
                    break
                elif tempNode.typeLabel == "MethodDeclaration":
                    # print(tempNodeO.id)
                    structureNode.add((tempNode.id, index))
                    break
                # print(tempNode.typeLabel,"----------")
                elif tempNode.typeLabel in self.structureHandle:
                    # print(tempNodeO.id)
                    structureNode.add((tempNode.id, index))
                    # print(tempNode.typeLabel, '123123123123123')
                # print(index)
                index = self.getIndexInParent(tempNode.id, AST)
                tempNode = tempNode.parent

        # print(structureNode)
        return structureNode
Esempio n. 30
0
    def fillSymbolTable(self):
        if self.expr:
            expr_type, expr_value = self.expr.fillSymbolTable()

            if AST.getEntry(self.id) is not None:
                if AST.getEntry(self.id).type != expr_type:
                    raise Exception('Error when trying to assign type {0} to {1}'.format(expr_type, AST.getEntry(self.id).type))
            return SymbolTableEntry(self.id, expr_type, expr_value)
        else:
            # check if self.id is of type group
            if AST.getEntry(self.id) is None:
                raise Exception('Undefined variable {}'.format(self.id))
            elif AST.getEntry(self.id).type != 'Group' :
                raise Exception('Expected variable type Group but got {}'.format(AST.getEntry(self.id).type))
            
            self.cmd.generateInstruction()
Esempio n. 31
0
def doeval(ele, env):
    if type(ele) == str:
        ele = AST([ele])

    # print env, ele
    cmd = ele.get_cmd()
    args = ele.get_args()
    if isinstance(cmd, AST):
        cur_env = Env(env.name + '_0', env)
        opt = doeval(cmd, cur_env)
    else :
        opt = env.search_symbol(cmd)
        if opt is None:
            print 'symbol %s not found!' % cmd
            return None
    rs = opt.apply(env, args)
    # print 'eval result: %s' % str(rs)
    return rs