import operations as ops import fit import exptree import random import functools import itertools x = sympy.symbols('x') ''' Note: prototype functions CAN NOT use ci (for int i) constant names''' j, k, l, m, n, o, p = sympy.symbols('j k l m n o p') '''List of function prototypes and their constants''' basic_functions = [x, x**2, ops.sqrt(x + j), ops.log(x + k), ops.exp(x*l), ops.sin(x*m +n), ops.atan(x*o + p)] def cross_multiply(l, k): '''Given a list, return a list of all possible product combinations (Up to k) ''' result = l for i in range(2, k + 1): if i > len(l): break combos = list(itertools.combinations(l, i)) for j in range(0, len(combos)): combos[j] = functools.reduce(lambda x, y: x*y, combos[j]) result = result + combos
if operator == 1: result = addition(num1, num2) elif operator == 2: result = subtraction(num1, num2) elif operator == 3: result = multiplication(num1, num2) elif operator == 4: result = division(num1, num2) elif operator == 5: result = integer_division(num1, num2) elif operator == 6: result = power(num1, num2) elif operator == 7: result = modulo(num1, num2) elif operator == 8: result = log(num1, num2) elif operator == 9: result = sigmoid(num1 + num2) elif operator == 10: result = rand_between(num1, num2) elif operator == 11: result = hcf(num1, num2) elif operator == 12: result = factorial(num1) elif operator == 13: result = exponential(num1) elif operator == 14: result = Sine() elif operator == 15: result = Cosine() elif operator == 16:
# Create a new graph Graph().as_default() X = Placeholder() c = Placeholder() # Create a weight matrix for 2 outout classes: # One with a weight vector (1, 1) for blue and one with a # weight vector (-1, -1) for red W = Variable([[1, -1], [1, -1]]) b = Variable([0, 0]) p = softmax(add(matmul(X, W), b)) # Cross-entropy loss J = negative(reduce_sum(reduce_sum(multiply(c, log(p)), axis=1))) # Create red points centered at (-2, -2) red_points = np.random.randn(50, 2) - 2 * np.ones((50, 2)) # Create blue points centered at (2, 2) blue_points = np.random.randn(50, 2) + 2 * np.ones((50, 2)) session = Session() print( session.run( J, { X: np.concatenate((blue_points, red_points)), c: [[1, 0]] * len(blue_points) + [[0, 1]] * len(red_points) }))
def cross_entropy(logits: op.Tensor, label: op.Tensor): assert logits.shape == label.shape return op.const(-1) * reduce_sum(op.log(logits) * label)
def sparse_cross_entropy(logits: op.Tensor, labels: op.Tensor): assert len(logits.shape) == 1, 'but now is %s' % logits.shape assert labels.shape == tuple(), 'label.shape should be a number' logits = op.log(logits) return op.const(-1) * op.Index(logits, labels)
x_data = [1, 2, 3, 4, 5] y_data = [3, 9, 19, 33, 51] expr = x print("'fit' just x (no free parameters)") popt, err = sym_fit(expr, [], x_data, y_data) print("Fit to y_data") print(popt) print(err) print("---") x_data = range(-10, 10) y_data = range(-10, 10) expr = ops.log(x + c0) + c1 print("Attempting to c1 * log(x + c0) + c2 (multiple guesses)") popt, err = sym_fit(expr, [c0, c1], x_data, y_data, 12) print("Fit to y_data") print(popt) print(err) print("") x_data = [-1, 1, 2, 3] y_data = [1, 2, 3, 4] expr = ops.log(x) + c0 print("Attempting to log(x) + c0 (should fail on this domain)")