def parseReductionFile(cm, reduction_file, verbose, benchmarkOpt, estimateOpt): # setup sdl parser configs sdl.masterPubVars = cm.reducMasterPubVars sdl.masterSecVars = cm.reducMasterSecVars if not hasattr(cm, "schemeType"): sys.exit("configAutoGroup: need to set 'schemeType' in config.") if cm.schemeType == PKENC: funcOrder = [ cm.reducSetupFuncName, cm.reducQueryFuncName, cm.reducChallengeFuncName ] setattr(cm, functionOrder, funcOrder) elif cm.schemeType == PKSIG: funcOrder = [cm.reducSetupFuncName, cm.reducQueryFuncName] setattr(cm, functionOrder, funcOrder) else: sys.exit("configAutoGroup: unrecognized 'schemeType' in config.") #TODO: create something like this for assumption? #for i in encConfigParams: # if not hasattr(cm, i): # errorOut(i) if not hasattr(cm, "secparam"): secparam = "BN256" # default pairing curve for now else: secparam = cm.secparam #do we need this for the assumption? dropFirst = None if hasattr(cm, "dropFirst"): dropFirst = cm.dropFirst options = { 'secparam': secparam, 'userFuncList': [], 'computeSize': estimateOpt, 'dropFirst': dropFirst, 'path': dest_path } sdl.parseFile(reduction_file, verbose, ignoreCloudSourcing=True) assignInfo_reduction = sdl.getAssignInfo() reductionData = { 'sdl_name': sdl.assignInfo[sdl.NONE_FUNC_NAME] [BV_NAME].getAssignNode().getRight().getAttribute(), 'setting': sdl.assignInfo[sdl.NONE_FUNC_NAME] [ALGEBRAIC_SETTING].getAssignNode().getRight().getAttribute(), 'assignInfo': assignInfo_reduction, 'typesBlock': sdl.getFuncStmts(TYPES_HEADER), 'userCodeBlocks': list( set(list(assignInfo_reduction.keys())).difference( cm.functionOrder + [TYPES_HEADER, NONE_FUNC_NAME])) } if hasattr(cm, "reductionMap"): reductionData['varmap'] = cm.reductionMap # this consists of the type of the input scheme (e.g., symmetric) setting = sdl.assignInfo[sdl.NONE_FUNC_NAME][ ALGEBRAIC_SETTING].getAssignNode().getRight().getAttribute() # name of the scheme sdl_name = sdl.assignInfo[ sdl.NONE_FUNC_NAME][BV_NAME].getAssignNode().getRight().getAttribute() typesBlock = sdl.getFuncStmts(TYPES_HEADER) info = {'verbose': verbose} # we want to ignore user defined functions from our analysis # (unless certain variables that we care about are manipulated there) userCodeBlocks = list( set(list(assignInfo_reduction.keys())).difference( cm.functionOrder + [TYPES_HEADER, NONE_FUNC_NAME])) options['userFuncList'] += userCodeBlocks lines = list(typesBlock[0].keys()) lines.sort() typesBlockLines = [ i.rstrip() for i in sdl.getLinesOfCodeFromLineNos(lines) ] begin = ["BEGIN :: " + TYPES_HEADER] end = ["END :: " + TYPES_HEADER] # start constructing the preamble for the Asymmetric SDL output newLines0 = [ BV_NAME + " := " + sdl_name, SETTING + " := " + sdl.ASYMMETRIC_SETTING ] newLines1 = begin + typesBlockLines + end # this fact is already verified by the parser # but if scheme claims symmetric # and really an asymmetric scheme then parser will # complain. assert setting == sdl.SYMMETRIC_SETTING, "No need to convert to asymmetric setting." # determine user preference in terms of keygen or encrypt short = SHORT_DEFAULT # default option if hasattr(cm, 'short'): if cm.short in SHORT_OPTIONS: short = cm.short print("reducing size of '%s'" % short) varTypes = dict(sdl.getVarTypes().get(TYPES_HEADER)) typesH = dict(varTypes) reductionData['typesH'] = typesH if not hasattr(cm, 'schemeType'): sys.exit("'schemeType' option missing in specified config file.") pairingSearch = [] # extract the statements, types, dependency list, influence list and exponents of influence list # for each algorithm in the SDL scheme if cm.schemeType == PKENC: (stmtS, typesS, depListS, depListNoExpS, infListS, infListNoExpS) = sdl.getVarInfoFuncStmts(cm.reducSetupFuncName) (stmtQ, typesQ, depListQ, depListNoExpQ, infListQ, infListNoExpQ) = sdl.getVarInfoFuncStmts(cm.reducQueryFuncName) (stmtC, typesC, depListC, depListNoExpC, infListC, infListNoExpC) = sdl.getVarInfoFuncStmts(cm.reducChallengeFuncName) depListData = { cm.reducChallengeFuncName: depListNoExpC, cm.reducQueryFuncName: depListNoExpQ, cm.reducSetupFuncName: depListNoExpS } varTypes.update(typesS) varTypes.update(typesQ) varTypes.update(typesC) if hasattr(cm, 'graphit') and cm.graphit and cm.single_reduction: dg_reduc_setup = generateGraphForward( cm.reducSetupFuncName, (stmtS, typesS, infListNoExpS)) dg_reduc_setup.adjustByMap(reductionData.get('varmap')) dg_reduc_query = generateGraph(cm.reducQueryFuncName, (typesQ, depListNoExpQ), types.G1, varTypes) dg_reduc_query.adjustByMap(reductionData.get('varmap')) dg_reduc_chall = generateGraph(cm.reducChallengeFuncName, (typesC, depListNoExpC), types.G1, varTypes) dg_reduc_chall.adjustByMap(reductionData.get('varmap')) if verbose: print("<=== Reduction Setup Graph ===>") print(dg_reduc_setup) print("<=== Reduction Setup Graph ===>") print("<=== Reduction Query Graph ===>") print(dg_reduc_query) print("<=== Reduction Query Graph ===>") print("<=== Reduction Challenge Graph ===>") print(dg_reduc_chall) print("<=== Reduction Challenge Graph ===>") dg_reduction = DotGraph("reduction") dg_reduction += dg_reduc_setup + dg_reduc_query + dg_reduc_chall if verbose: print("<=== Reduction Graph ===>") print(dg_reduction) print("<=== Reduction Graph ===>") reductionData['reductionGraph'] = dg_reduction # TODO: expand search to encrypt and potentially setup pairingSearch += [stmtS, stmtQ, stmtC] # aka start with decrypt. info[curveID] = options['secparam'] info[dropFirstKeyword] = options[dropFirstKeyword] gen = Generators(info) # JAA: commented out for benchmarking #print("List of generators for scheme") # retrieve the generators selected by the scheme # typically found in the setup routine in most cases. # extract the generators from the setup and keygen routine for later use if hasattr(cm, 'reducSetupFuncName'): gen.extractGens(stmtS, typesS) if hasattr(cm, 'reducQueryFuncName'): gen.extractGens(stmtQ, typesQ) if hasattr(cm, 'reducChallengeFuncName'): gen.extractGens(stmtC, typesC) else: sys.exit( "Assumption failed: setup not defined for this function. Where to extract generators?" ) generators = gen.getGens() # JAA: commented out for benchmarking #print("Generators extracted: ", generators) elif cm.schemeType == PKSIG: (stmtS, typesS, depListS, depListNoExpS, infListS, infListNoExpS) = sdl.getVarInfoFuncStmts(cm.reducSetupFuncName) (stmtQ, typesQ, depListQ, depListNoExpQ, infListQ, infListNoExpQ) = sdl.getVarInfoFuncStmts(cm.reducQueryFuncName) depListData = { cm.reducQueryFuncName: depListNoExpQ, cm.reducSetupFuncName: depListNoExpS } varTypes.update(typesS) varTypes.update(typesQ) if hasattr(cm, 'graphit') and cm.graphit: dg_reduc_setup = generateGraphForward( cm.reducSetupFuncName, (stmtS, typesS, infListNoExpS)) dg_reduc_setup.adjustByMap(reductionData.get('varmap')) #dg_reduc_query = generateGraphForward(cm.reducQueryFuncName, (stmtQ, typesQ, infListNoExpQ)) #dg_reduc_query.adjustByMap(reductionData.get('varmap')) new_depListNoExpQ = simplifyDepMap(stmtQ, typesQ, infListNoExpQ, depListNoExpQ) dg_reduc_query = generateGraph(cm.reducQueryFuncName, (typesQ, new_depListNoExpQ), types.G1, varTypes) dg_reduc_query.adjustByMap(reductionData.get('varmap')) if verbose: print("<=== Reduction Setup Graph ===>") print(dg_reduc_setup) print("<=== Reduction Setup Graph ===>") print("<=== Reduction Query Graph (backward) ===>") print(dg_reduc_query) print("<=== Reduction Query Graph (backward) ===>") dg_reduction = DotGraph("reduction") dg_reduction += dg_reduc_setup + dg_reduc_query if verbose: print("<=== Reduction Graph ===>") print(dg_reduction) print("<=== Reduction Graph ===>") reductionData['reductionGraph'] = dg_reduction # TODO: expand search to encrypt and potentially setup pairingSearch += [stmtS, stmtQ] # aka start with decrypt. info[curveID] = options['secparam'] info[dropFirstKeyword] = options[dropFirstKeyword] gen = Generators(info) # JAA: commented out for benchmarking #print("List of generators for scheme") # retrieve the generators selected by the scheme # typically found in the setup routine in most cases. # extract the generators from the setup and keygen routine for later use if hasattr(cm, 'reducSetupFuncName'): gen.extractGens(stmtS, typesS) if hasattr(cm, 'reducQueryFuncName'): gen.extractGens(stmtQ, typesQ) else: sys.exit( "Assumption failed: setup not defined for this function. Where to extract generators?" ) generators = gen.getGens() # JAA: commented out for benchmarking #print("Generators extracted: ", generators) # need a Visitor class to build these variables # TODO: expand to other parts of algorithm including setup, keygen, encrypt # Visits each pairing computation in the SDL and # extracts the inputs. This is the beginning of the # analysis of these variables as the SDL is converted into # an asymmetric scheme. hashVarList = [] pair_vars_G1_lhs = [] pair_vars_G1_rhs = [] gpv = GetPairingVariables(pair_vars_G1_lhs, pair_vars_G1_rhs) gpv.setDepListData(depListData) for eachStmt in pairingSearch: # loop through each pairing statement lines = eachStmt.keys() # for each line, do the following for i in lines: if type(eachStmt[i] ) == sdl.VarInfo: # make sure we have the Var Object # assert that the statement contains a pairing computation gpv.setFuncName(eachStmt[i].getFuncName()) if HasPairings(eachStmt[i].getAssignNode()): path_applied = [] # split pairings if necessary so that we don't influence # the solve in anyway. We can later recombine these during # post processing of the SDL eachStmt[i].assignNode = SplitPairings( eachStmt[i].getAssignNode(), path_applied) # JAA: commented out for benchmarking #if len(path_applied) > 0: print("Split Pairings: ", eachStmt[i].getAssignNode()) if info['verbose']: print("Each: ", eachStmt[i].getAssignNode()) #print(eachStmt[i].assignNode) sdl.ASTVisitor(gpv).preorder(eachStmt[i].getAssignNode()) elif eachStmt[i].getHashArgsInAssignNode(): # in case there's a hashed value...build up list and check later to see if it appears # in pairing variable list hashVarList.append(str(eachStmt[i].getAssignVar())) else: continue # not interested # constraint list narrows the solutions that # we care about constraintList = [] # for example, include any hashed values that show up in a pairing by default for i in hashVarList: if i in pair_vars_G1_lhs or i in pair_vars_G1_rhs: constraintList.append(i) # JAA: commented out for benchmarking # for each pairing variable, we construct a dependency graph all the way back to # the generators used. The input of assignTraceback consists of the list of SDL statements, # generators from setup, type info, and the pairing variables. # We do this analysis for both sides info['G1_lhs'] = (pair_vars_G1_lhs, assignTraceback(assignInfo_reduction, generators, varTypes, pair_vars_G1_lhs, constraintList)) info['G1_rhs'] = (pair_vars_G1_rhs, assignTraceback(assignInfo_reduction, generators, varTypes, pair_vars_G1_rhs, constraintList)) depList = {} depListUnaltered = {} if cm.schemeType == PKENC: for i in [depListS, depListQ, depListC]: for (key, val) in i.items(): if (not (len(val) == 0) and not (key == 'input') and not (key == 'output') and not (key == cm.reducCiphertextVar) and not (key == cm.reducQueriesSecVar) and not (key in cm.reducMasterPubVars) and not (key in cm.reducMasterSecVars)): if (key in reductionData['varmap']): depList[reductionData['varmap'][key]] = val depListUnaltered[key] = val else: depList[key] = val depListUnaltered[key] = val elif cm.schemeType == PKSIG: for i in [depListS, depListQ]: for (key, val) in i.items(): if (not (len(val) == 0) and not (key == 'input') and not (key == 'output') and not (key == cm.reducCiphertextVar) and not (key == cm.reducQueriesSecVar) and not (key in cm.reducMasterPubVars) and not (key in cm.reducMasterSecVars)): if (key in reductionData['varmap']): depList[reductionData['varmap'][key]] = val depListUnaltered[key] = val else: depList[key] = val depListUnaltered[key] = val info['deps'] = (depListUnaltered, assignTraceback(assignInfo_reduction, generators, varTypes, depListUnaltered, constraintList)) prunedDeps = {} for (key, val) in info['deps'][1].items(): if (not (len(val) == 0)): prunedDeps[key] = val the_map = gpv.pairing_map reductionData['info'] = info reductionData['depList'] = depList reductionData['deps'] = info['deps'] reductionData['prunedMap'] = prunedDeps reductionData['G1_lhs'] = info['G1_lhs'] reductionData['G1_rhs'] = info['G1_rhs'] reductionData['the_map'] = the_map reductionData['options'] = options reductionData['varTypes'] = varTypes #prune varTypes to remove ZR that we don't care about additionalDeps = dict(list(reductionData['info']['deps'][0].items())) items = [] newlist = [] newDeps = {} for (key, val) in additionalDeps.items(): #items = list(additionalDeps[key]) newlist = [] for j in val: if ((sdl.getVarTypeFromVarName(j, None, True) == types.G1) or (sdl.getVarTypeFromVarName(j, None, True) == types.G2)): newlist.append(j) if (not (len(set(newlist)) == 0)): if (key in reductionData['varmap']): newDeps[reductionData['varmap'][key]] = set(newlist) else: newDeps[key] = set(newlist) #newDeps[key] = set(newlist) reductionData['newDeps'] = newDeps reductionData['options']['type'] = "reduction" reductionData['reductionFile'] = reduction_file if cm.schemeType == PKENC and not cm.single_reduction: if hasattr(cm, 'graphit') and cm.graphit: exclude_list = [cm.reducQueriesSecVar ] + cm.reducMasterPubVars + cm.reducMasterSecVars dg_reduc_setup = generateGraphForward( cm.reducSetupFuncName, (stmtS, typesS, infListNoExpS)) dg_reduc_setup.adjustByMap(reductionData.get('varmap')) # process the query dg_reduc_query = generateGraph( cm.reducQueryFuncName, (typesQ, depListNoExpQ), types.G1, varTypes) #, stmts=stmtQ, infListNoExp=infListNoExpQ) dg_reduc_query.adjustByMap(reductionData.get('varmap')) try: newVarType = dict(typesS) newVarType.update(typesQ) # special variables that we don't want in the graph dg_reduc_query_forward = generateGraphForward( cm.reducQueryFuncName, (stmtQ, newVarType, infListNoExpQ), exclude=exclude_list) dg_reduc_query_forward.adjustByMap(reductionData.get('varmap')) # combine with backward analysis dg_reduc_query += dg_reduc_query_forward except Exception as e: print("EXCEPTION: ", cm.reducQueryFuncName, " forward tracing failed!") print(e.traceback()) dg_reduc_chall = generateGraph(cm.reducChallengeFuncName, (typesC, depListNoExpC), types.G1, varTypes) dg_reduc_chall.adjustByMap(reductionData.get('varmap')) try: newVarType.update(typesC) dg_reduc_chall_forward = generateGraphForward( cm.reducChallengeFuncName, (stmtC, newVarType, infListNoExpC), exclude=exclude_list) dg_reduc_chall_forward.adjustByMap(reductionData.get('varmap')) # combine with backward analysis dg_reduc_chall += dg_reduc_chall_forward except Exception as e: print("EXCEPTION: ", cm.reducChallengeFuncName, " forward tracing failed!") print(e.traceback()) if verbose: print("<=== Reduction Setup Graph ===>") print(dg_reduc_setup) print("<=== Reduction Setup Graph ===>") print("<=== Reduction Query Graph ===>") print(dg_reduc_query) print("<=== Reduction Query Graph ===>") print("<=== Reduction Challenge Graph ===>") print(dg_reduc_chall) print("<=== Reduction Challenge Graph ===>") dg_reduction = DotGraph("reduction") dg_reduction += dg_reduc_setup + dg_reduc_query + dg_reduc_chall if verbose: print("<=== Reduction Graph ===>") print(dg_reduction) print("<=== Reduction Graph ===>") reductionData['reductionGraph'] = dg_reduction #if hasattr(cm, "assumption_reduction_map"): # reductionData['assumption'] = cm.assumption_reduction_map[reduction_name] #else: # reductionData['assumption'] = "" return reductionData