def call_with_critical_point_scanner(f, *args): """Calls f(scanner, *args) in TensorFlow session-context. Here, `scanner` will be a function with signature scanner(seed:int, scale:float) -> (potential, stationarity, pos_vector). The function `scanner` can only perform a scan when called from within the TF session-context that is set up by this function. """ graph = tf.Graph() with graph.as_default(): t_input = tf.placeholder(tf.float64, shape=[70]) t_v70 = tf.Variable(initial_value=numpy.zeros([70]), trainable=True, dtype=tf.float64) op_assign_input = tf.assign(t_v70, t_input) d = tf_so8_sugra_potential(t_v70) t_potential = d['potential'] t_stationarity = tf_so8_sugra_stationarity(d['a1'], d['a2']) opt = contrib_opt.ScipyOptimizerInterface(tf.asinh(t_stationarity), options=dict(maxiter=500)) with tf.Session() as sess: sess.run([tf.global_variables_initializer()]) def scanner(seed, scale): rng = numpy.random.RandomState(seed) v70 = rng.normal(scale=scale, size=[70]) sess.run([op_assign_input], feed_dict={t_input: v70}) opt.minimize(session=sess) n_ret = sess.run([t_potential, t_stationarity, t_v70]) return n_ret return f(scanner, *args)
def get_scanner(output_path, maxiter=1000, stationarity_threshold=1e-7): """Obtains a basic TensorFlow-based scanner for extremal points.""" graph = tf.Graph() with graph.as_default(): tf_scalar_evaluator = get_tf_scalar_evaluator() t_input = tf.compat.v1.placeholder(tf.float64, shape=[70]) t_v70 = tf.Variable(initial_value=numpy.zeros([70]), trainable=True, dtype=tf.float64) op_assign_input = tf.compat.v1.assign(t_v70, t_input) sinfo = tf_scalar_evaluator(tf.cast(t_v70, tf.complex128)) t_potential = sinfo.potential # t_stationarity = sinfo.stationarity op_opt = contrib_opt.ScipyOptimizerInterface( tf.asinh(t_stationarity), options={'maxiter': maxiter}) # def scanner(seed, scale=0.1, num_iterations=1): results = collections.defaultdict(list) rng = numpy.random.RandomState(seed) with graph.as_default(): with tf.compat.v1.Session() as sess: sess.run([tf.compat.v1.global_variables_initializer()]) for n in range(num_iterations): v70 = rng.normal(scale=scale, size=[70]) sess.run([op_assign_input], feed_dict={t_input: v70}) op_opt.minimize(sess) n_pot, n_stat, n_v70 = sess.run( [t_potential, t_stationarity, t_v70]) if n_stat <= stationarity_threshold: results[S_id(n_pot)].append( (n, n_pot, n_stat, list(n_v70))) # Overwrite output at every iteration. if output_path is not None: tmp_out = output_path + '.tmp' with open(tmp_out, 'w') as h: h.write('n=%4d: p=%.12g s=%.12g\n' % (n, n_pot, n_stat)) h.write(pprint.pformat(dict(results))) os.rename(tmp_out, output_path) return dict(results) # return scanner
def cn(x): """compressive nonlinearity.""" return tf.asinh(4. * x) / 4.
def stash(x, lamb=1.1613855392326946, alpha=0.6520042387583171): return _tf.where(x <= 0.0, 2.0 * _tf.tanh(alpha * x), lamb * _tf.asinh(2.0 * alpha * x / lamb))
def stash_old(x, lamb=1.1613326990732873, alpha=0.6521334159737763): return _tf.where(x <= 0.0, 2.0 * _tf.tanh(alpha * x), lamb * _tf.asinh(2.0 * alpha * x / lamb))
def __init__( self, pts=[], edg=[], fixed=False, JSON=None, keep_paths=False, PAIRS=PAIRS, POW=POW, POWn=POWn, POW_SN=POW_SN, # power extra factors of r_i+r_j in node repulsion max_workers=20, **kw): """ pts: node positions edg: edge list fixed: if nodes should remain fixed JSON: if network should be loaded from a json file (specify file paths or file object) keep_paths: whether to keep original link trajectories inside JSON file max_workers: # of parallel units for foce calculations kw: links = {'k': <spring constant>, 'thickness': <# or list of thicknesses>, 'amplitude': <of repulsive force>, 'ce': <cooling exponent for sim. annealing>, 'Temp0': <initial temperature as a fraction of thickness>, 'segs': <# of segments on each link>} nodes = {'amplitude': <of repulsive force>, 'radius': <range of repulsive Gaussian force>} net.points contians all link points. The link_link interaction matrix should now be part of the net.links object, not the individual links. This allows massive vectorization of all interactions. net.links: Now we have a single net_links object that contains info of all links. All that separates them is the net_links.idx dict which indexes which points in net_links.points belong to which link. net_links also contains all methods for interactions between links. """ tt.tic() # All calcs done in self.session self.session = tf.InteractiveSession() # All dynamical equations kept in assignments self.assignments_forces = tuple() self.assignments_points = tuple() self.params = { "links": { "k": 1e1, "amplitude": 5e2, "thickness": 0.1, "Temp0": 0.5, "ce": 1000, "segs": 5, "weighted": 1, }, "nodes": { "amplitude": 5e2, "radius": 1.0, "weighted": 1 }, } self.gnam = "E-ELF-sim" self.dt0 = 1e-5 # tf.Variable(1e-5, dtype=tf.float32) self.dt = tf.Variable(1e-5, dtype=tf.float32) self.dt_ = tf.placeholder(tf.float32) if JSON: self.get_JSON(JSON, kw) self.it_num = 0 kwl = kw["links"] if "links" in kw else {} kwn = kw["nodes"] if "nodes" in kw else {} self.params["links"].update(kwl) self.params["nodes"].update(kwn) self.max_workers = max_workers self.fixed = fixed self.keep_paths = keep_paths self.PAIRS = PAIRS self.params["POW"] = self.POW = POW self.params["POWn"] = self.POWn = POWn self.params["POW_SN"] = self.POW_SN = POW_SN if not JSON: self.pts = array(pts) # tf.Variable(pts, dtype=tf.float32) self.elist = array( edg, dtype=int32)[:, :2] # tf.Variable(edg, dtype=tf.float32) self.link_weights = (array(edg)[:, 2] if self.params["links"]["weighted"] else array([1] * len(self.elist))) if "labels" in self.params["nodes"]: self.make_link_labels() self.it_num = 0 self.t = 0 self.tv = [] # to save volume evolution # initialize variables self.th_mean = mean(self.params["links"]["thickness"] * self.link_weights) self.nrad_mean = mean(self.params["nodes"]["radius"]) self.r_min = float32(min(self.nrad_mean, self.th_mean) / 10.0) self.f_mild = lambda x: self.r_min * tf.asinh(x / self.r_min) # for assignments self.asg = {} tt.toc() print("Making links...") self.links = NetLinks(net=self, **self.params["links"]) # (**kwl) tt.toc() print("Making nodes...") self.nodes = NetNodes(net=self, **self.params["nodes"]) # (**kwn) tt.toc() print("initializing global variables...") init = tf.global_variables_initializer() self.session.run(init) tt.toc() print("Initial binning...") self.it_updates() tt.toc() print("setup: dt...") self.setup_dt() tt.toc() print("setup: volume...") self.vol = tf.reduce_sum(vec_len(self.links.dp)) tt.toc() print("setup: dynamics: forces...") # Define the comp group and iteraion steps self.step_forces = tf.group(*self.assignments_forces) tt.toc() print("setup: dynamics: points, dt...") self.step_points = tf.group(*self.assignments_points) self.step_dt = tf.group(*self.asg["dt"]) tt.toc() print("Done!")
def self_normalizing_asinh(x): return 1.256734802399369 * tf.asinh(x)