コード例 #1
0
ファイル: ops_def.py プロジェクト: tanmaytirpankar/Satire
def _solve_(node, errList1, errList2, herr):

    errList2.append(herr * pow(2, -53))
    expr1 = sum([seng.Abs(erri) for erri in errList1])
    expr2 = sum([seng.Abs(erri) for erri in errList2])

    if (seng.count_ops(expr1) >= opMax):
        #	print("Solving Ferror @depth: ", node.depth)
        print("\nSolving f@depth :", node.depth)
        errList1 = [solve_remaining_error(expr1)]

    expr2_ops = seng.count_ops(expr2)
    #print("Solving h@depth :", node.depth)
    #errList2 = [solve_remaining_error(expr2)]

    if expr2_ops >= SopMax:
        print("\nSolving", expr2_ops, " h@depth :", node.depth)
        #errList2 = [solve_remaining_error(errList2)]
        errList2 = [_partial_solve_(errList2)]
    else:
        print("bwahahaha!", expr2_ops, expr2, SopMax)

    #errList2 = [solve_remaining_error(errList2+[herr*pow(2,-53)])]
    #if(seng.count_ops(expr2) > opMax):
    #	print("Solving Herror @depth: ", node.depth, expr2_ops)
    #	errList2 = [_partial_solve_(errList2+[herr*pow(2,-53)])]
    #else:
    #	print("Else:", seng.count_ops(expr2_ops))
    #	errList2.append(herr*pow(2,-53))

    return [errList1, errList2]
コード例 #2
0
ファイル: utils_bak.py プロジェクト: sljiaa/Satire
def generate_signature(sym_expr):
	try:
		if(seng.count_ops(sym_expr)==0):
			const_intv = float(str(sym_expr))
			return [const_intv, const_intv]
	except ValueError:
	    pass

	#d = OrderedDict()
	#freeSyms = [str(i) for i in sym_expr.free_symbols]
	#freeSyms.sort()
	#for i in range(0,len(freeSyms)):
	#	inp = freeSyms[i]
	#	#print(inp, type(inp), Globals.inptbl[inp])
	#	d[inp] = str(i)+"_"+"{intv}".format(intv=Globals.inputVars[inp]["INTV"])

	#regex = re.compile("(%s)" % "|".join(map(re.escape, d.keys())))

	#strSig = regex.sub(lambda mo: d[mo.string[mo.start():mo.end()]], str(sym_expr))
	#sig = hashSig(strSig, "md5")
	#print("STRSIG->", strSig, sig)
	#Globals.hashBank[sig] = Globals.hashBank.get(sig, utils.invoke_gelpia(sym_expr, self._inputStr))
	#s1 = time.time()
	hbs = len(Globals.hashBank.keys())
	#s2 = time.time()
	#print("\nTime for hashing sig = ", s2 - s1)
	#print("************ HBS : ", hbs, " ******************")
	if(hbs > 100):
		list(map(lambda x : Globals.hashBank.popitem(x) , list(Globals.hashBank.keys())[0:int(hbs/2)]))
	sig = genSig(sym_expr)
	check = Globals.hashBank.get(sig, None)
	if check is None:
		inputStr = extract_input_dep(list(sym_expr.free_symbols))
		#print("Gelpia input expr ops ->", seng.count_ops(sym_expr))
		g1 = time.time()
		val = invoke_gelpia(sym_expr, inputStr)
		#print("Actual return :", val, Globals.gelpiaID)
		Globals.hashBank[sig] = [val, Globals.gelpiaID] #invoke_gelpia(sym_expr, inputStr)
		g2 = time.time()
		print("Gelpia solve = ", g2 - g1, "opCount =", seng.count_ops(sym_expr))
	else:
		#inputStr = extract_input_dep(list(sym_expr.free_symbols))
		#orig_query = invoke_gelpia(sym_expr, inputStr)
		#hashed_query = Globals.hashBank[sig][0]
		#match_queryid = Globals.hashBank[sig][1]
		#print("MATCH FOUND")
		#if orig_query != hashed_query:
		#	print(orig_query, hashed_query,  match_queryid, Globals.gelpiaID)
		##Globals.hashBank[sig] = check
		pass

	return Globals.hashBank[sig][0]
コード例 #3
0
ファイル: AnalyzeNode_Serial.py プロジェクト: sljiaa/Satire
    def start(self):

        local_hashbank = {}
        mappedList = {}
        #print("Reached here\n")
        self.trimList = self.probeList
        maxOpCount = max(
            [seng.count_ops(n.f_expression) for n in self.trimList])
        abs_depth = max([n.depth for n in self.trimList])
        #print(maxOpCount, self.maxdepth, abs_depth)
        if self.force:
            pass
        elif maxOpCount > 1000 and self.maxdepth > 10 and abs_depth > 5:
            if self.argList.mindepth == self.argList.maxdepth:
                pass
            else:
                return {"maxOpCount": maxOpCount, "flag": False}

        if self.argList.compress:
            if (len(self.trimList) > 1):
                for node in self.probeList:
                    sig = utils.genSig(node.f_expression)
                    enode = local_hashbank.get(sig, None)
                    if enode is None:
                        local_hashbank[sig] = node
                        mappedList[node] = []
                    else:
                        #print("Ever")
                        mappedList[local_hashbank[sig]].append(node)
                self.trimList = mappedList.keys()

            print(
                "Primary cand list count={l1}, Compressed cand list count={l2}"
                .format(l1=len(self.probeList), l2=len(self.trimList)))
            logger.info(
                "Primary cand list count={l1}, Compressed cand list count={l2}"
                .format(l1=len(self.probeList), l2=len(self.trimList)))
            #print("const:", [(n.f_expression,id(n)) for n in Globals.depthTable[0]])
            self.parent_dict = helper.expression_builder(self.trimList,
                                                         build=False)

        self.__init_workStack__()
        self.__setup_outputs__()

        #print("Begin Derivatives\n", time.time())
        self.traverse_ast()
        #print("Finished Derivatives\n", time.time())
        self.completed.clear()
        results = self.first_order_error()

        del local_hashbank

        for node, depList in mappedList.items():
            for dnode in depList:
                results[dnode] = copy.deepcopy(results[node])
                assert (dnode in self.probeList)
        return results
コード例 #4
0
ファイル: AnalyzeNode_Serial.py プロジェクト: sljiaa/Satire
    def propagate_symbolic(self, node):
        for outVar in self.bwdDeriv[node].keys():
            expr_solve = (((\
                            (self.bwdDeriv[node][outVar]))*\
                (node.get_noise(node))*(max(node.get_rounding(),outVar.get_rounding()) if node.rnd!=0.0 else node.get_rounding()  ))\
                ).__abs__()

            if seng.count_ops(self.Accumulator[outVar]) > 4000:
                self.QworkList[outVar].append(self.Accumulator[outVar])
                self.Accumulator[outVar] = seng.expand(expr_solve)
            elif seng.count_ops(expr_solve) > 1000:
                self.QworkList[outVar].append(expr_solve)
            else:
                self.Accumulator[outVar] += seng.expand(expr_solve)
            if len(self.QworkList[outVar]) >= self.Qthreshold:
                self.Accumulator[outVar] += utils.error_query_reduction(
                    self.QworkList[outVar])
                self.QworkList[outVar].clear()
コード例 #5
0
ファイル: utils.py プロジェクト: sljiaa/Satire
def generate_signature(sym_expr):
    try:
        if (seng.count_ops(sym_expr) == 0):
            const_intv = float(str(sym_expr))
            return [const_intv, const_intv]
    except ValueError:
        pass

    return invoke_gelpia_serial(sym_expr)
コード例 #6
0
def build_functions(sympy_graph,
                    variables,
                    parameters=None,
                    wrt=None,
                    include_obj=True,
                    include_grad=False,
                    include_hess=False,
                    cse=True):
    if wrt is None:
        wrt = sympify(tuple(variables))
    if parameters is None:
        parameters = []
    else:
        parameters = [wrap_symbol_symengine(p) for p in parameters]
    variables = tuple(variables)
    parameters = tuple(parameters)
    func, grad, hess = None, None, None
    inp = sympify(variables + parameters)
    graph = sympify(sympy_graph)
    if count_ops(graph) > BACKEND_OPS_THRESHOLD:
        backend = 'lambda'
    else:
        backend = 'llvm'
    # TODO: did not replace zoo with oo
    if include_obj:
        func = lambdify(inp, [graph], backend=backend, cse=cse)
    if include_grad or include_hess:
        grad_graphs = list(graph.diff(w) for w in wrt)
        grad_ops = sum(count_ops(x) for x in grad_graphs)
        if grad_ops > BACKEND_OPS_THRESHOLD:
            grad_backend = 'lambda'
        else:
            grad_backend = 'llvm'
        if include_grad:
            grad = lambdify(inp, grad_graphs, backend=grad_backend, cse=cse)
        if include_hess:
            hess_graphs = list(
                list(g.diff(w) for w in wrt) for g in grad_graphs)
            # Hessians are hard-coded to always use the lambda backend, for performance
            hess = lambdify(inp, hess_graphs, backend='lambda', cse=cse)
    return BuildFunctionsResult(func=func, grad=grad, hess=hess)
コード例 #7
0
	def propagate_symbolic(self, node):
		#print("@node depth = ", node.depth, type(node).__name__, node.f_expression)
		#print([n.f_expression for n in node.parents])
		#print(node.parents)
		#print(self.parent_dict[node])
		#print("--------------------------------------")
		#print("Rounding: node-expr:", node.f_expression)
		#print("Rounding:", node.get_rounding())
		for outVar in self.bwdDeriv[node].keys():
			expr_solve = (((\
			                (self.bwdDeriv[node][outVar]))*\
							(node.get_noise(node))*(max(node.get_rounding(),outVar.get_rounding()) if node.rnd!=0.0 else node.get_rounding()  ))\
							).__abs__()

			if seng.count_ops(self.Accumulator[outVar]) > 4000:
				intv = max(utils.generate_signature(self.Accumulator[outVar]))
				self.Accumulator[outVar] = seng.expand(expr_solve)
				expr_solve = intv
			elif seng.count_ops( expr_solve ) > 1000:
				expr_solve = max(utils.generate_signature(expr_solve))
			self.Accumulator[outVar] += seng.expand(expr_solve)
コード例 #8
0
ファイル: ops_def.py プロジェクト: tanmaytirpankar/Satire
def _solve1_(node, errList1, errList2, herr):

    expr1 = sum([seng.Abs(erri) for erri in errList1])
    expr2 = sum([seng.Abs(erri) for erri in errList2]) + herr * pow(2, -53)

    #if(seng.count_ops(expr1) > opMax):
    #	print("Solving Ferror @depth: ", node.depth)
    print("\nSolving f@depth :", node.depth)
    errList1 = [solve_remaining_error(expr1)]

    expr2_ops = seng.count_ops(expr2)
    print("Solving h@depth :", expr2, node.depth)
    errList2 = [solve_remaining_error(expr2)]

    return [errList1, errList2]
コード例 #9
0
ファイル: ops_def.py プロジェクト: tanmaytirpankar/Satire
def _partial_solve_(errList):

    expr = sum([seng.Abs(erri) for erri in errList])
    expr_ops = seng.count_ops(expr)
    print("New partial:", expr_ops)
    size = len(errList)
    if size == 1 or expr_ops < SopMax:
        print("Unit level calls", size)
        #print(expr)
        val = max(utils.generate_signature_herror(expr))
        print("VAL : ", val)
        return val
    else:
        print("**************", size, expr_ops)
        return _partial_solve_(errList[0:int(size / 2)]) + _partial_solve_(
            errList[int(size / 2):size])
コード例 #10
0
ファイル: ASTtypes.py プロジェクト: sljiaa/Satire
    def simplify(lexpr):
        #if not Globals.simplify or (seng.count_ops(lexpr) > 30000):
        #	return lexpr
        #else:
        #	lexpr2 = seng.expand(lexpr)
        #	op1 = seng.count_ops(lexpr)
        #	op2 = seng.count_ops(lexpr2)
        #	if (op2 - op1 > 1000):
        #		Globals.simplify = False
        #	return lexpr2 if(seng.count_ops(lexpr2) < seng.count_ops(lexpr)) else lexpr

        ##else:
        ##	lexpr2 = seng.expand(lexpr)

        if (seng.count_ops(lexpr) > 30000):
            return lexpr
        else:
            return seng.expand(lexpr)
        return lexpr
コード例 #11
0
ファイル: utils_bak.py プロジェクト: sljiaa/Satire
def genSig(sym_expr):
	try:
		if seng.count_ops(sym_expr) == 0 :
			return float(str(sym_expr))
	except ValueError:
		pass
	d = OrderedDict()
	flist = [str(i) for i in sym_expr.free_symbols]
	flist.sort()
	freeSyms = [seng.var(fs) for fs in flist]
	# make this to a map
	#for i in range(0, len(freeSyms)):
	#	inp = freeSyms[i]
	#	d[inp] = str(i)+"_"+"{intv}".format(intv=Globals.inputVars[inp]["INTV"])

	fpt = map(lambda i : (str(freeSyms[i]), str(i)+"_"+"{intv}".format(intv=Globals.inputVars[freeSyms[i]]["INTV"])), \
	                      range(len(freeSyms)))
	d =	{p[0]:p[1] for p in fpt}

	regex = re.compile("(%s)" % "|".join(map(re.escape, d.keys())))

	strSig = regex.sub(lambda mo: d[mo.string[mo.start():mo.end()]], str(sym_expr))

	return hashSig(strSig, "md5")
コード例 #12
0
ファイル: test_arit.py プロジェクト: sudoWin/symengine.py
def test_count_ops():
    x, y = symbols("x, y")
    assert count_ops(x + y) == 1
    assert count_ops((x + y, x * y)) == 2
    assert count_ops([[x**y], [x + y - 1]]) == 3
    assert count_ops(x + y, x * y) == 2
コード例 #13
0
ファイル: utils.py プロジェクト: sljiaa/Satire
def error_query_reduction(QworkList):

    if len(QworkList) == 1:
        intv = generate_signature(QworkList[0])
        return max([abs(i) for i in intv])

    else:
        pass

    # SymQueryList      -> Elements are symbolic queries
    # ConstQueryList    -> Elements are constant queries
    SymQueryList, ConstQueryList = partitionList(
        QworkList, lambda x: seng.count_ops(x) == 0)

    # HashList -> ordered List of the hash signatures of expressions in SymQueryList
    HashList = list(map(genSig, SymQueryList))

    # HashBin -> Dictionary of key->value pairs such that: {key=signature : value=[indices in Hashlist of corresponding symbolic expressions with matching signatures}
    HashBin = {
        sig: [i for i, x in enumerate(HashList) if x == sig]
        for sig in list(set(HashList))
    }

    # Exist_Signatures -> List of matching signatures found in older hashbanks
    # New_Signatures   -> List of new signatures generated for this incoming batch of queries
    Exist_Signatures, New_Signatures = partitionList(
        HashList, lambda x: x not in Globals.hashBank.keys())

    # New_SymQueryList -> List of new freshly minted symbolic queries identified by their unique md5 signatures
    #New_SymQueryList = [SymQueryList[HashBin[sig][0]] for sig in New_Signatures]
    New_SymQueryList = map(lambda x: SymQueryList[HashBin[x][0]],
                           New_Signatures)

    # New_SymQuery_object -> Formatted New_SymQueryList with required datatypes
    # New_SymQuery_object = map( lambda x : (str(x), extract_input_dep(list(x.free_symbols))), New_SymQueryList)
    intv_QS = error_query_reduction_with_pool(New_SymQueryList)

    New_SymQuery_Accumulator = sum( \
                     [ \
              max([abs(i) for i in intv])*len(HashBin[New_Signatures[j]]) \
              for j,intv in enumerate(intv_QS)\
            ]\
           )

    Exist_SymQuery_Accumulator = sum( \
                [ \
         max([abs(i) for i in intv])*len(HashBin[Exist_Signatures[j]]) \
         for j,intv in enumerate([Globals.hashBank[k] for k in Exist_Signatures])\
       ]\
       )

    ConstQuery_Accumulator = sum([abs(float(str(x))) for x in ConstQueryList])

    ## ---------- update the hashBank for a specific threshold size ------- ##
    hbs = len(Globals.hashBank.keys())
    if len(New_Signatures) == 0:
        pass
    elif (hbs > 100):
        list(
            map(lambda x: Globals.hashBank.popitem(x),
                list(Globals.hashBank.keys())[0:int(hbs / 2)]))

    for i, k in enumerate(New_Signatures):
        Globals.hashBank[k] = intv_QS[i]

    error_acc = New_SymQuery_Accumulator + Exist_SymQuery_Accumulator + ConstQuery_Accumulator
    print("Happy to exit\n")
    return error_acc