def graph_get(self, cr, uid, id, scale, context={}): nodes= [] nodes_name = [] transitions = [] start = [] tres = {} no_ancester = [] workflow = self.browse(cr, uid, id, context) for a in workflow.activities: nodes_name.append((a.id,a.name)) nodes.append(a.id) if a.flow_start: start.append(a.id) else: if not a.in_transitions: no_ancester.append(a.id) for t in a.out_transitions: transitions.append((a.id, t.act_to.id)) tres[t.id] = (a.id, t.act_to.id) g = graph(nodes, transitions, no_ancester) g.process(start) g.scale(*scale) result = g.result_get() results = {} for node in nodes_name: results[str(node[0])] = result[node[0]] results[str(node[0])]['name'] = node[1] return {'nodes': results, 'transitions': tres}
def test_rndgraph(number, N, density, Omega, Phi, Lambda): density = int(density) if (density < 0) or (density > 100): print "density not valid!" raise Exception name = "%s%0*d-%d_%d-%d-%d_%0*d" % ("rndgraph", 2, density, N, Omega, Phi, Lambda, 3, number) if os.path.isfile("tables/" + name.rsplit('_', 1)[0] + "/" + name): V = range(1, N + 1) with open("tables/" + name.rsplit('_', 1)[0] + "/" + name) as searchfile: for line in searchfile: if "A =" in line: A = eval(line[4:]) else: V, A, = graph(N, int(float(density) / 100 * N * (N - 1) / 2)) test(V, A, Omega, Phi, Lambda, name) return
def analiser(): global npk global nut global out l = npk #print(npk) #print(l,nut) avail = [ l[0], l[1], l[2], nut[0], nut[1], nut[2], nut[5], nut[4], nut[3], nut[6], l[3] ] opt = tools.graph_val(out) req = tools.graph_scaling(avail, opt) fig = tools.graph(avail, req) header = "{} CROP OPTIMIZER".format(out.upper()) graphJSON = plot_utl(fig) #print(graphJSON) return render_template('graph.html', graphJSON=graphJSON, header=header, required=req)
def graph_get(self, cr, uid, id, model, node_obj, conn_obj, src_node, des_node, label, scale, context=None): nodes=[] nodes_name=[] transitions=[] start=[] tres={} labels={} no_ancester=[] blank_nodes = [] _Model_Obj=self.pool.get(model) _Node_Obj=self.pool.get(node_obj) _Arrow_Obj=self.pool.get(conn_obj) for model_key,model_value in _Model_Obj._columns.items(): if model_value._type=='one2many': if model_value._obj==node_obj: _Node_Field=model_key _Model_Field=model_value._fields_id flag=False for node_key,node_value in _Node_Obj._columns.items(): if node_value._type=='one2many': if node_value._obj==conn_obj: if src_node in _Arrow_Obj._columns and flag: _Source_Field=node_key if des_node in _Arrow_Obj._columns and not flag: _Destination_Field=node_key flag = True datas = _Model_Obj.read(cr, uid, id, [],context) for a in _Node_Obj.read(cr,uid,datas[_Node_Field],[]): if a[_Source_Field] or a[_Destination_Field]: nodes_name.append((a['id'],a['name'])) nodes.append(a['id']) else: blank_nodes.append({'id': a['id'],'name':a['name']}) if a.has_key('flow_start') and a['flow_start']: start.append(a['id']) else: if not a[_Source_Field]: no_ancester.append(a['id']) for t in _Arrow_Obj.read(cr,uid, a[_Destination_Field],[]): transitions.append((a['id'], t[des_node][0])) tres[str(t['id'])] = (a['id'],t[des_node][0]) label_string = "" if label: for lbl in eval(label): if t.has_key(tools.ustr(lbl)) and tools.ustr(t[lbl])=='False': label_string = label_string + ' ' else: label_string = label_string + " " + tools.ustr(t[lbl]) labels[str(t['id'])] = (a['id'],label_string) g = graph(nodes, transitions, no_ancester) g.process(start) g.scale(*scale) result = g.result_get() results = {} for node in nodes_name: results[str(node[0])] = result[node[0]] results[str(node[0])]['name'] = node[1] return {'nodes': results, 'transitions': tres, 'label' : labels, 'blank_nodes': blank_nodes, 'node_parent_field': _Model_Field,}
def graph_get(self, cr, uid, id, res_model, res_id, scale, context=None): pool = pooler.get_pool(cr.dbname) process = pool.get('process.process').browse(cr, uid, id, context=context) name = process.name resource = None state = 'N/A' expr_context = {} states = {} perm = None if res_model: states = dict( pool.get(res_model).fields_get(cr, uid, context=context).get( 'state', {}).get('selection', {})) if res_id: current_object = pool.get(res_model).browse(cr, uid, res_id, context=context) current_user = pool.get('res.users').browse(cr, uid, uid, context=context) expr_context = Env(current_object, current_user) resource = current_object.name if 'state' in current_object: state = states.get(current_object.state, 'N/A') perm = pool.get(res_model).perm_read(cr, uid, [res_id], context=context)[0] notes = process.note or "N/A" nodes = {} start = [] transitions = {} for node in process.node_ids: data = {} data['name'] = node.name data['model'] = (node.model_id or None) and node.model_id.model data['kind'] = node.kind data['subflow'] = (node.subflow_id or False) and [ node.subflow_id.id, node.subflow_id.name ] data['notes'] = node.note data['active'] = False data['gray'] = False data['url'] = node.help_url # get assosiated workflow if data['model']: wkf_ids = self.pool.get('workflow').search( cr, uid, [('osv', '=', data['model'])]) data['workflow'] = (wkf_ids or False) and wkf_ids[0] if 'directory_id' in node and node.directory_id: data['directory_id'] = node.directory_id.id data['directory'] = self.pool.get( 'document.directory').get_resource_path( cr, uid, data['directory_id'], data['model'], False) if node.menu_id: data['menu'] = { 'name': node.menu_id.complete_name, 'id': node.menu_id.id } if node.model_id and node.model_id.model == res_model: try: data['active'] = eval(node.model_states, expr_context) except Exception: pass if not data['active']: try: gray = True for cond in node.condition_ids: if cond.model_id and cond.model_id.model == res_model: gray = gray and eval(cond.model_states, expr_context) data['gray'] = not gray except: pass nodes[node.id] = data if node.flow_start: start.append(node.id) for tr in node.transition_out: data = {} data['name'] = tr.name data['source'] = tr.source_node_id.id data['target'] = tr.target_node_id.id data['notes'] = tr.note data['buttons'] = buttons = [] for b in tr.action_ids: button = {} button['name'] = b.name button['state'] = b.state button['action'] = b.action buttons.append(button) data['groups'] = groups = [] for r in tr.transition_ids: if r.group_id: groups.append({'name': r.group_id.name}) for r in tr.group_ids: groups.append({'name': r.name}) transitions[tr.id] = data # now populate resource information def update_relatives(nid, ref_id, ref_model): relatives = [] for dummy, tr in transitions.items(): if tr['source'] == nid: relatives.append(tr['target']) if tr['target'] == nid: relatives.append(tr['source']) if not ref_id: nodes[nid]['res'] = False return nodes[nid]['res'] = resource = {'id': ref_id, 'model': ref_model} refobj = pool.get(ref_model).browse(cr, uid, ref_id, context=context) fields = pool.get(ref_model).fields_get(cr, uid, context=context) # check for directory_id from inherited from document module if nodes[nid].get('directory_id', False): resource['directory'] = self.pool.get( 'document.directory').get_resource_path( cr, uid, nodes[nid]['directory_id'], ref_model, ref_id) resource['name'] = refobj.name_get(context)[0][1] resource['perm'] = pool.get(ref_model).perm_read( cr, uid, [ref_id], context)[0] for r in relatives: node = nodes[r] if 'res' not in node: for n, f in fields.items(): if node['model'] == ref_model: update_relatives(r, ref_id, ref_model) elif f.get('relation') == node['model']: rel = refobj[n] if rel and isinstance(rel, list): rel = rel[0] try: # XXX: rel has been reported as string (check it) _id = (rel or False) and rel.id _model = node['model'] update_relatives(r, _id, _model) except: pass if res_id: for nid, node in nodes.items(): if not node['gray'] and (node['active'] or node['model'] == res_model): update_relatives(nid, res_id, res_model) break # calculate graph layout g = tools.graph( nodes.keys(), map(lambda x: (x['source'], x['target']), transitions.values())) g.process(start) g.scale(*scale) #g.scale(100, 100, 180, 120) graph = g.result_get() # fix the height problem miny = -1 for k, v in nodes.items(): x = graph[k]['x'] y = graph[k]['y'] if miny == -1: miny = y miny = min(y, miny) v['x'] = x v['y'] = y for k, v in nodes.items(): y = v['y'] v['y'] = min(y - miny + 10, y) return dict(name=name, resource=resource, state=state, perm=perm, notes=notes, nodes=nodes, transitions=transitions)
def graph_get(self, cr, uid, id, model, node_obj, conn_obj, src_node, des_node, label, scale, context={}): if not label: label = [] nodes = [] nodes_name = [] transitions = [] start = [] tres = {} labels = {} no_ancester = [] blank_nodes = [] _Model_Obj = self.pool.get(model) _Node_Obj = self.pool.get(node_obj) _Arrow_Obj = self.pool.get(conn_obj) for model_key, model_value in _Model_Obj._columns.items(): if model_value._type == "one2many": if model_value._obj == node_obj: _Node_Field = model_key _Model_Field = model_value._fields_id flag = False for node_key, node_value in _Node_Obj._columns.items(): if node_value._type == "one2many": if node_value._obj == conn_obj: if src_node in _Arrow_Obj._columns and flag: _Source_Field = node_key if des_node in _Arrow_Obj._columns and not flag: _Destination_Field = node_key flag = True datas = _Model_Obj.read(cr, uid, id, [], context) for a in _Node_Obj.read(cr, uid, datas[_Node_Field], []): if a[_Source_Field] or a[_Destination_Field]: nodes_name.append((a["id"], a["name"])) nodes.append(a["id"]) else: blank_nodes.append({"id": a["id"], "name": a["name"]}) if a.has_key("flow_start") and a["flow_start"]: start.append(a["id"]) else: if not a[_Source_Field]: no_ancester.append(a["id"]) for t in _Arrow_Obj.read(cr, uid, a[_Destination_Field], []): transitions.append((a["id"], t[des_node][0])) tres[str(t["id"])] = (a["id"], t[des_node][0]) label_string = "" if label: for lbl in eval(label): if t.has_key(str(lbl)) and str(t[lbl]) == "False": label_string = label_string + " " else: label_string = label_string + " " + t[lbl] labels[str(t["id"])] = (a["id"], label_string) g = graph(nodes, transitions, no_ancester) g.process(start) g.scale(*scale) result = g.result_get() results = {} for node in nodes_name: results[str(node[0])] = result[node[0]] results[str(node[0])]["name"] = node[1] return { "nodes": results, "transitions": tres, "label": labels, "blank_nodes": blank_nodes, "node_parent_field": _Model_Field, }
def graph_get(self, cr, uid, id, model, node_obj, conn_obj, src_node, des_node, label, scale, context=None): nodes = [] nodes_name = [] transitions = [] start = [] tres = {} labels = {} no_ancester = [] blank_nodes = [] _Model_Obj = self.pool.get(model) _Node_Obj = self.pool.get(node_obj) _Arrow_Obj = self.pool.get(conn_obj) for model_key, model_value in _Model_Obj._columns.items(): if model_value._type == 'one2many': if model_value._obj == node_obj: _Node_Field = model_key _Model_Field = model_value._fields_id flag = False for node_key, node_value in _Node_Obj._columns.items(): if node_value._type == 'one2many': if node_value._obj == conn_obj: if src_node in _Arrow_Obj._columns and flag: _Source_Field = node_key if des_node in _Arrow_Obj._columns and not flag: _Destination_Field = node_key flag = True datas = _Model_Obj.read(cr, uid, id, [], context) for a in _Node_Obj.read(cr, uid, datas[_Node_Field], []): if a[_Source_Field] or a[_Destination_Field]: nodes_name.append((a['id'], a['name'])) nodes.append(a['id']) else: blank_nodes.append({'id': a['id'], 'name': a['name']}) if a.has_key('flow_start') and a['flow_start']: start.append(a['id']) else: if not a[_Source_Field]: no_ancester.append(a['id']) for t in _Arrow_Obj.read(cr, uid, a[_Destination_Field], []): if not t[des_node]: continue transitions.append((a['id'], t[des_node][0])) tres[str(t['id'])] = (a['id'], t[des_node][0]) label_string = "" if label: for lbl in eval(label): if t.has_key(tools.ustr(lbl)) and tools.ustr( t[lbl]) == 'False': label_string = label_string + ' ' else: label_string = label_string + " " + tools.ustr( t[lbl]) labels[str(t['id'])] = (a['id'], label_string) g = graph(nodes, transitions, no_ancester) g.process(start) g.scale(*scale) result = g.result_get() results = {} for node in nodes_name: results[str(node[0])] = result[node[0]] results[str(node[0])]['name'] = node[1] return { 'nodes': results, 'transitions': tres, 'label': labels, 'blank_nodes': blank_nodes, 'node_parent_field': _Model_Field, }
def graph_get(self, cr, uid, id, res_model, res_id, scale, context=None): pool = pooler.get_pool(cr.dbname) process = pool.get('process.process').browse(cr, uid, id, context=context) name = process.name resource = False state = 'N/A' expr_context = {} states = {} perm = False if res_model: states = dict(pool.get(res_model).fields_get(cr, uid, context=context).get('state', {}).get('selection', {})) if res_id: current_object = pool.get(res_model).browse(cr, uid, res_id, context=context) current_user = pool.get('res.users').browse(cr, uid, uid, context=context) expr_context = Env(current_object, current_user) resource = current_object.name if 'state' in current_object: state = states.get(current_object.state, 'N/A') perm = pool.get(res_model).perm_read(cr, uid, [res_id], context=context)[0] notes = process.note or "N/A" nodes = {} start = [] transitions = {} for node in process.node_ids: data = {} data['name'] = node.name data['model'] = (node.model_id or None) and node.model_id.model data['kind'] = node.kind data['subflow'] = (node.subflow_id or False) and [node.subflow_id.id, node.subflow_id.name] data['notes'] = node.note data['active'] = False data['gray'] = False data['url'] = node.help_url # get assosiated workflow if data['model']: wkf_ids = self.pool.get('workflow').search(cr, uid, [('osv', '=', data['model'])]) data['workflow'] = (wkf_ids or False) and wkf_ids[0] if 'directory_id' in node and node.directory_id: data['directory_id'] = node.directory_id.id data['directory'] = self.pool.get('document.directory').get_resource_path(cr, uid, data['directory_id'], data['model'], False) if node.menu_id: data['menu'] = {'name': node.menu_id.complete_name, 'id': node.menu_id.id} if node.model_id and node.model_id.model == res_model: try: data['active'] = eval(node.model_states, expr_context) except Exception: pass if not data['active']: try: gray = True for cond in node.condition_ids: if cond.model_id and cond.model_id.model == res_model: gray = gray and eval(cond.model_states, expr_context) data['gray'] = not gray except: pass nodes[node.id] = data if node.flow_start: start.append(node.id) for tr in node.transition_out: data = {} data['name'] = tr.name data['source'] = tr.source_node_id.id data['target'] = tr.target_node_id.id data['notes'] = tr.note data['buttons'] = buttons = [] for b in tr.action_ids: button = {} button['name'] = b.name button['state'] = b.state button['action'] = b.action buttons.append(button) data['groups'] = groups = [] for r in tr.transition_ids: if r.group_id: groups.append({'name': r.group_id.name}) for r in tr.group_ids: groups.append({'name': r.name}) transitions[tr.id] = data # now populate resource information def update_relatives(nid, ref_id, ref_model): relatives = [] for dummy, tr in transitions.items(): if tr['source'] == nid: relatives.append(tr['target']) if tr['target'] == nid: relatives.append(tr['source']) if not ref_id: nodes[nid]['res'] = False return nodes[nid]['res'] = resource = {'id': ref_id, 'model': ref_model} refobj = pool.get(ref_model).browse(cr, uid, ref_id, context=context) fields = pool.get(ref_model).fields_get(cr, uid, context=context) # check for directory_id from inherited from document module if nodes[nid].get('directory_id', False): resource['directory'] = self.pool.get('document.directory').get_resource_path(cr, uid, nodes[nid]['directory_id'], ref_model, ref_id) resource['name'] = refobj.name_get(context)[0][1] resource['perm'] = pool.get(ref_model).perm_read(cr, uid, [ref_id], context)[0] for r in relatives: node = nodes[r] if 'res' not in node: for n, f in fields.items(): if node['model'] == ref_model: update_relatives(r, ref_id, ref_model) elif f.get('relation') == node['model']: rel = refobj[n] if rel and isinstance(rel, list) : rel = rel[0] try: # XXX: rel has been reported as string (check it) _id = (rel or False) and rel.id _model = node['model'] update_relatives(r, _id, _model) except: pass if res_id: for nid, node in nodes.items(): if not node['gray'] and (node['active'] or node['model'] == res_model): update_relatives(nid, res_id, res_model) break # calculate graph layout g = tools.graph(nodes.keys(), map(lambda x: (x['source'], x['target']), transitions.values())) g.process(start) g.scale(*scale) #g.scale(100, 100, 180, 120) graph = g.result_get() # fix the height problem miny = -1 for k,v in nodes.items(): x = graph[k]['x'] y = graph[k]['y'] if miny == -1: miny = y miny = min(y, miny) v['x'] = x v['y'] = y for k, v in nodes.items(): y = v['y'] v['y'] = min(y - miny + 10, y) nodes = dict([str(n_key), n_val] for n_key, n_val in nodes.iteritems()) transitions = dict([str(t_key), t_val] for t_key, t_val in transitions.iteritems()) return dict(name=name, resource=resource, state=state, perm=perm, notes=notes, nodes=nodes, transitions=transitions)
# E.g. Laplacian has an outpu L argument, while Ricci has output R and 2 temps. # # This rather extreme policy will also speed up the code, since no large memory # will need to be allocated in the main loop. # # Furthermore, we can ensure that all matrices are properly aligned for # vectorized SIMD operations. # # 5 matrices L, Ricci, mat1, mat2, sqdist = get_matrices(sqdist, 4) oldsqdist = L sanitize(sqdist, rescale, np.inf, 1.0, temp=mat1) graph(threshold, pointset, sqdist, ax, dim) Laplacian(sqdist, t, L) coarseRicci(L, sqdist, Ricci, mat1, mat2) # print 'initial distance' # print sqdist # print 'initial Ricci' # print Ricci applyRicci(sqdist, eta, T, Ricci, mode='sym') sanitize(sqdist, rescale, CLIP, 1.0, temp=mat1) graph(threshold, pointset, sqdist, ax, dim) clustered = False