d[x] = tangent.pop_stack(d[stack], d[op_id]) @tangent_(tangent.unbroadcast) def tunbroadcast(z, x, y): d[z] = tangent.unbroadcast(d[x], d[y]) @tangent_(tangent.Stack) def tstack(z): d[z] = tangent.Stack() @tangent_(tangent.astype) def tastype(z, x, y): d[z] = tangent.astype(d[x], d[y]) @tangent_(tangent.unreduce) def tunreduce(z, array, shape, axis, keepdims): d[z] = tangent.unreduce(d[array], d[shape], axis, keepdims) # Until we've written the adjoints of all functions we want to support, # we will throw an explicit "no tangent found" error for those we have not # finished. UNIMPLEMENTED will contain the list of all of these unimplemented # tangent functions UNIMPLEMENTED_TANGENTS = grads.get_module_functions( (numpy, numpy.fft, numpy.linalg, numpy.random, math)) - set(tangents)
@tangent_(tf.nn.avg_pool) def ttfavg_pool(y, x, sizes, strides, padding): raise tangent.ForwardNotImplementedError(tf.nn.avg_pool) @tangent_(tf.nn.max_pool) def ttfmax_pool(y, x, sizes, strides, padding): raise tangent.ForwardNotImplementedError(tf.nn.max_pool) @tangent_(tf.shape) def tshape(y, x): d[y] = tf.shape(d[x]) # # Blacklist unimplemented Eager grads # grads.UNIMPLEMENTED_ADJOINTS.update( grads.get_module_functions((tf, tf.distributions, tf.image, tf.layers, tf.linalg, tf.losses, tf.nn)) - set(grads.adjoints)) tangents.UNIMPLEMENTED_TANGENTS.update( grads.get_module_functions((tf, tf.distributions, tf.image, tf.layers, tf.linalg, tf.losses, tf.nn)) - set(tangents.tangents))