def create_nodes_from_list(node_list, edges_list=()): """Given a list tuples (name, coords, destinations, links, generate the Nodes and edges Parameters ---------- node_list: list edges_list: list Returns ------- dict """ nodes = {} count = 0 for node_tuple in node_list: node = Node(node_tuple[0], destinations=node_tuple[2], coords=node_tuple[1]) nodes[node.name] = node count += 1 connection_dict = dict( zip([i[0] for i in node_list], [i[3] for i in node_list])) for name, node in nodes.items(): connections = connection_dict[name] for conn_name in connections: node.connect_to(nodes[conn_name], xy=get_edge_xy(node.name, conn_name, edges_list)) return nodes
def draw_outer_walls(self, color): assert type(color) == int assert 0 <= color < 64 for i in range(self.grid.width): self.draw_bottom_wall(Node(i, 0), color) self.draw_top_wall(Node(i, self.grid.height - 1), color) for i in range(self.grid.height): self.draw_left_wall(Node(0, i), color) self.draw_right_wall(Node(self.grid.width - 1, i), color)
def adduseropts(user): ret = [] field = Node("upload.type_image", "metafield") field.set("label", "image_schema") field.set("type", "text") ret.append(field) field = Node("upload.type_text", "metafield") field.set("label", "text_schema") field.set("type", "text") ret.append(field) db.session.commit() return ret
def generateMask(node): mask = getMask(node) maintype = getMainContentType(node) if not maintype: return # clean up for field in mask.children: mask.children.remove(field) #todo this also needs to be fixed allfields_parent = maintype allfields = maintype.metafields.all() allfieldnames = [mf.name for mf in allfields] for metafield in maintype.getMetaFields("s"): d = metafield.get("label") if not d: d = metafield.getName() new_maskitem = Node(d, type="searchmaskitem") mask.children.append(new_maskitem) if metafield.get("type") == "union": for t in metafield.get("valuelist").split(";"): if t and t in allfieldnames: new_maskitem.children.append( allfields_parent.children.filter_by(name=t).one()) else: new_maskitem.children.append(metafield) db.session.commit() return mask
def message_injection(target_node: Node, target_topic: Topic, node_name: str, message: Message): """ Inject data into a topic :param target_node: The node you want to inject data into. It could be a list :param target_topic: The Topic you want ot inject data into :param node_name: The name to give for this script. :param message: The message to inject. :return: """ # TODO: Create a connection here injection_url = "http://localhost:12345" # TODO: If there is no node we should instead publish to all (_, _, target_bus) = target_node.get_sub_list(node_name=node_name) subscriber_list = [] for s in target_node.sub_topics: if target_topic.name in s: subscriber_list.append(s) if len(subscriber_list) is 0: raise StateException("Node is not subscribed to topic ") else: # TODO: THIS SHOULDN'T DO ANYTHING for subscriber in subscriber_list: # TODO: CONSIDER KEEPING THE OTHERS target_node.server.publisherUpdate(node_name, subscriber.name, [injection_url]) target_topic.publish(message=message)
def test_parse_find_node_response(): nodes = [Node(addr='localhost', port=i, nodeid=ID(i)) for i in range(5)] find_node_response = msg.FindNodeResponse(b'', nodes[0:]).finalize(nodes[0]) parsed_nodes = msg.Message.parse_protobuf(find_node_response) assert parsed_nodes.nodes == nodes[0:]
def test_serialize(self): obj1 = Node("foo") obj1.addAttribute(BoolAttribute("testAttr", True)) obj1.addAttribute(StringAttribute("foo", "bar")) json1 = obj1.serialize() obj2 = Node.deserialize(json1) for attribute in obj1.attributes: self.assertEqual(obj1[attribute], obj2[attribute]) self.assertEqual(obj1, obj2)
def getEditModuleHierarchy(typename): _menu = {} menus = {} try: nodeclass = Node.get_class_for_typestring(typename.lower()) except KeyError: return {} if typename == "root": return {} _items = {} menu_str = get_edit_menu_tabs(nodeclass) if menu_str != "": menus[nodeclass.name] = parseMenuString(menu_str) _menu = {} _menu[-1] = [] editModules = getEditModules() # modules installed in system for module in editModules: if module.startswith("menu"): active = -1 for m in menus[nodeclass.name]: if m.getName().endswith(module): active = menus[nodeclass.name].index(m) break if active not in _menu.keys(): _menu[active] = [] _menu[active].append(module) else: active = -1 for m in menus[nodeclass.name]: items = m.getItemList() for item in items: if item == module: active = menus[nodeclass.name].index(m) if active not in _items.keys(): _items[active] = [] _items[active].append((module, items.index(item))) break if active == -1: if active not in _items.keys(): _items[active] = [] _items[active].append((module, 0)) for key in _menu.keys(): if key in _items.keys(): items = _items[key] items.sort(lambda x, y: cmp(x[1], y[1])) for item in items: _menu[key].append(item[0]) return _menu
def fill_holes(self, visited, color): assert type(color) == int assert 0 <= color < 64 for i in range(self.grid.width): for j in range(self.grid.height): node = Node(i, j) if node not in visited: self.fill(node, color)
def send_imageproperties_xml(req): nid, data = splitpath(req.path) if not Node.req_has_access_to_node_id(nid, u"read", req): return 404 img = get_cached_image_zoom_data(nid) req.write("""<IMAGE_PROPERTIES WIDTH="%d" HEIGHT="%d" NUMIMAGES="1" VERSION="1.8" TILESIZE="%d"/>""" % (img.width, img.height, Image.ZOOM_TILESIZE))
def test_compare(self): obj1 = Node("foo") obj2 = Node("foo") obj3 = Node("bar") obj4 = Node("bar") obj4.addAttribute(BoolAttribute("testAttr", True)) self.assertEqual(obj1, obj2) self.assertNotEqual(obj1, obj3) self.assertNotEqual(obj3, obj4)
def main(): """ create a window, add scene objects, then run rendering loop """ viewer = Viewer() shader = Shader("phong.vert", "phong.frag") node = Node(transform=rotate((0, 0, 1), 45)) viewer.add(node) light_dir = (0, -1, 0) node.add(*[ mesh for file in sys.argv[1:] for mesh in load_phong_mesh(file, shader, light_dir) ]) if len(sys.argv) != 2: print('Usage:\n\t%s [3dfile]*\n\n3dfile\t\t the filename of a model in' ' format supported by assimp.' % (sys.argv[0], )) # start rendering loop viewer.run()
def newMask(node): searchmask_root = q(Searchmasks).one() while True: maskname = unicode(hashlib.md5(ustr(random.random())).hexdigest()[0:8]) if maskname in searchmask_root.children.all(): continue else: break mask = Node(name=maskname, type=u"searchmask") searchmask_root.children.append(mask) node.set("searchmaskname", maskname) return mask
def check_undefined_nodeclasses(stub_undefined_nodetypes=None, fail_if_undefined_nodetypes=None, ignore_nodetypes=[]): """Checks if all nodetypes found in the database are defined as subclasses of Node. There are 3 modes which can be selected in the config file or by the parameters: * fail_if_undefined_nodetypes is True: => raise an Exception if a class if missing. Recommended. * fail_if_undefined_nodetypes is False, stub_undefined_nodetypes is True: => emit a warning that classes are missing and create stub classes directly inheriting from Node. Most code will continue to work, but it may fail if the real class overrides methods from Node. * fail_if_undefined_nodetypes is False, stub_undefined_nodetypes is False (default): => just emit a warning that classes are missing """ from core import Node, db known_nodetypes = set(c.__mapper__.polymorphic_identity for c in Node.get_all_subclasses()) nodetypes_in_db = set(t[0] for t in db.query(Node.type.distinct())) undefined_nodetypes = nodetypes_in_db - known_nodetypes - set( ignore_nodetypes) if undefined_nodetypes: if fail_if_undefined_nodetypes is None: fail_if_undefined_nodetypes = config.get( "config.fail_if_undefined_nodetypes", "false") == "true" msg = u"some node types are present in the database, but not defined in code. Missing plugins?\n{}".format( undefined_nodetypes) if fail_if_undefined_nodetypes: raise Exception(msg) else: logg.warn(msg) if stub_undefined_nodetypes is None: stub_undefined_nodetypes = config.get( "config.stub_undefined_nodetypes", "false") == "true" if stub_undefined_nodetypes: for t in undefined_nodetypes: clsname = t.capitalize() type(str(clsname), (Node, ), {}) logg.info("auto-generated stub class for node type '%s'", clsname)
def getMaskEditorHTML(self, field, metadatatype=None, language=None): try: value = field.getValues().split("\r\n") except AttributeError: #value = u"" value = [] while len(value) < 2: value.append('') attr = {} if metadatatype: for t in metadatatype.getDatatypes(): content_class = Node.get_class_for_typestring(t) node = content_class(name=u'') try: attr.update(node.getTechnAttributes()) attr['IPTC'] = get_wanted_iptc_tags() except AttributeError: logg.exception("attribute error in getMaskEditorHTML, continue") continue return tal.getTAL("metadata/meta.html", {"value": value, "t_attrs": attr}, macro="maskeditor", language=language)
def send_tile(req): nid, data = splitpath(req.path) if not Node.req_has_access_to_node_id(nid, u"read", req): return 404 if not req.path.endswith(".jpg"): logg.error("invalid tile request %s", req.path) return 404 jpg = req.path[req.path.rindex("/") + 1:-4] zoom, x, y = map(int, jpg.split("-")) try: img = get_cached_image_zoom_data(nid) tile = img.get_tile(zoom, x, y) if tile is None: return 404 req.write(tile) except: logg.exception("exception in send_tile") return 500
# deliver schemes for given contenttype if req.params.get('action') == 'getschemes': ret = [] for scheme in get_permitted_schemas_for_datatype( req.params.get('contenttype')): ret.append({'id': scheme.name, 'name': scheme.getLongName()}) req.write(json.dumps({'schemes': ret}, ensure_ascii=False)) return None # create node with given type/schema if req.params.get('action') == "createobject": schema = req.params.get('schema') ctype = req.params.get('contenttype') node = Node(name=u"", type=ctype, schema=schema) basenode = q(Node).get(req.params.get('id')) basenode.children.append(node) node.set("creator", user.login_name) node.set( "creationtime", ustr( time.strftime('%Y-%m-%dT%H:%M:%S', time.localtime(time.time())))) db.session.commit() res = {'newid': node.id, 'id': req.params.get('id')} req.write(json.dumps(res, ensure_ascii=False)) return None # create node using given identifier (doi, ...) if req.params.get('action') == "obj_from_identifier":
def test_session(session): """Tests if the session rolls back correctly and leaves no traces. Assert is in the fixture.""" from core import Node session.add(Node(u"name"))
def getContent(req, ids): user = users.getUserFromRequest(req) language = lang(req) def get_ids_from_query(): ids = get_ids_from_req(req) return ",".join(ids) if "action" in req.params: state = 'ok' if req.params.get('action') == "removefiles": basenode = q(Node).get(req.params.get('id')) for f in basenode.files: try: os.remove(f.abspath) pass except: state = "error" basenode.files = [] db.session.commit() req.write(json.dumps({'state': state}, ensure_ascii=False)) return None if req.params.get('action') == "buildnode": # create nodes basenode = q(Node).get(req.params.get('id')) newnodes = [] errornodes = [] basenodefiles_processed = [] if req.params.get('uploader', '') == 'plupload': filename2scheme = {} for k in req.params: if k.startswith("scheme_"): filename2scheme[ k.replace('scheme_', '', 1)] = req.params.get(k) for f in basenode.files: filename = f.name if filename in filename2scheme: mimetype = getMimeType(filename) if mimetype[1] == "bibtex": # bibtex import handler try: new_node = importBibTeX(f.abspath, basenode, req=req) newnodes.append(new_node.id) basenodefiles_processed.append(f) except ValueError, e: errornodes.append((filename, unicode(e))) logg.debug("filename: %s, mimetype: %s", filename, mimetype) logg.debug("__name__=%s, func=%s; _m=%s, _m[1]=%s", __name__, funcname(), mimetype, mimetype[1]) content_class = Node.get_class_for_typestring(mimetype[1]) node = content_class(name=filename, schema=filename2scheme[filename]) basenode.children.append(node) node.set("creator", user.login_name) node.set("creationtime", unicode(time.strftime('%Y-%m-%dT%H:%M:%S', time.localtime(time.time())))) # set filetype for uploaded file as requested by the content class f.filetype = content_class.get_upload_filetype() node.files.append(f) node.event_files_changed() newnodes.append(node.id) basenodefiles_processed.append(f) basenode.files.remove(f) db.session.commit() logg.info("%s created new node id=%s (name=%s, type=%s) by uploading file %s, " "node is child of base node id=%s (name=%s, type=%s)", user.login_name, node.id, node.name, node.type, filename, basenode.id, basenode.name, basenode.type) else: for filename in req.params.get('files').split('|'): mimetype = getMimeType(filename) logg.debug("... in %s.%s: getMimeType(filename=%s)=%s", __name__, funcname(), filename, mimetype) if mimetype[1] == req.params.get('type') or req.params.get('type') == 'file': for f in basenode.files: # ambiguity here ? if f.abspath.endswith(filename): # bibtex import handler if mimetype[1] == "bibtex" and not req.params.get('type') == 'file': try: new_node = importBibTeX(f.abspath, basenode, req=req) newnodes.append(new_node.id) basenodefiles_processed.append(f) except ValueError, e: errornodes.append((filename, unicode(e))) db.session.commit() else: logg.debug("creating new node: filename: %s", filename) logg.debug("files at basenode: %s", [(x.getName(), x.abspath) for x in basenode.files]) content_class = Node.get_class_for_typestring(req.params.get('type')) node = content_class(name=filename, schema=req.params.get('value')) basenode.children.append(node) node.set("creator", user.login_name) node.set("creationtime", unicode(time.strftime('%Y-%m-%dT%H:%M:%S', time.localtime(time.time())))) # clones to a file with random name cloned_file = importFileRandom(f.abspath) # set filetype for uploaded file as requested by the content class cloned_file.filetype = content_class.get_upload_filetype() node.files.append(cloned_file) node.event_files_changed() newnodes.append(node.id) basenodefiles_processed.append(f) logg.info("%s created new node id=%s (name=%s, type=%s) by uploading file %s, " "node is child of base node id=%s (name=%s, type=%s)", user.login_name, node.id, node.name, node.type, filename, basenode.id, basenode.name, basenode.type) break # filename may not be unique new_tree_labels = [{'id': basenode.id, 'label': getTreeLabel(basenode, lang=language)}] for f in basenodefiles_processed: basenode.files.remove(f) f_path = f.abspath if os.path.exists(f_path): logg.debug("%s going to remove file %s from disk", user.login_name, f_path) os.remove(f_path) mime = getMimeType(filename) scheme_type = {mime[1]: []} for scheme in get_permitted_schemas(): if mime[1] in scheme.getDatatypes(): scheme_type[mime[1]].append(scheme) # break db.session.commit() # standard file content = req.getTAL('web/edit/modules/upload.html', {'files': [filename], 'schemes': scheme_type}, macro="uploadfileok") res = {'state': state, 'newnodes': newnodes, 'errornodes': errornodes, 'new_tree_labels': new_tree_labels, 'ret': content} res = json.dumps(res, ensure_ascii=False) req.write(res) return None
def xml_start_element(self, name, attrs): try: node = self.nodes[-1] except: node = None if name == "nodelist": if "exportversion" in attrs: logg.info("starting xml import: %s", attrs) elif name == "node": self.node_already_seen = False parent = node try: datatype = attrs["datatype"] except KeyError: # compatibility for old xml files created with mediatum t = attrs.get("type") if t is not None: datatype = t else: datatype = "directory" if "id" not in attrs: attrs["id"] = ustr(random.random()) old_id = attrs["id"] if old_id in self.id2node: node = self.id2node[old_id] self.node_already_seen = True return elif datatype in ["mapping"]: content_class = Node.get_class_for_typestring(datatype) node = content_class(name=(attrs["name"] + "_imported_" + old_id)) else: content_class = Node.get_class_for_typestring(datatype) node = content_class(name=attrs["name"]) # todo: handle access #if "read" in attrs: # node.setAccess("read", attrs["read"].encode("utf-8")) #if "write" in attrs: # node.setAccess("write", attrs["write"].encode("utf-8")) #if "data" in attrs: # node.setAccess("data", attrs["data"].encode("utf-8")) if self.verbose: logg.info( "created node '%s', '%s', '%s', old_id from attr='%s'", node.name, node.type, node.id, attrs["id"]) self.id2node[attrs["id"]] = node node.tmpchilds = [] self.nodes.append(node) if self.root is None: self.root = node return elif name == "attribute" and not self.node_already_seen: attr_name = attrs["name"] if "value" in attrs: if attr_name in ["valuelist"]: node.set( attr_name, attrs["value"].replace("\n\n", "\n").replace( "\n", ";").replace(";;", ";")) else: node.set(attr_name, attrs["value"]) else: self.attributename = attr_name elif name == "child" and not self.node_already_seen: nid = attrs["id"] node.tmpchilds += [nid] elif name == "file" and not self.node_already_seen: try: datatype = attrs["type"] except: datatype = None try: mimetype = attrs["mime-type"] except: mimetype = None filename = attrs["filename"] node.files.append( File(path=filename, filetype=datatype, mimetype=mimetype))
def getMetaEditor(self, item, req): """ editor mask for field definition """ attr = {} fields = [] pidnode = None if "pid" not in req.params.keys(): for p in item.getParents(): try: if p.getMasktype() == "export": pidnode = p break except: continue metadatatype = req.params.get("metadatatype") for t in metadatatype.getDatatypes(): content_class = Node.get_class_for_typestring(t) node = content_class(name=u'') attr.update(node.getTechnAttributes()) if req.params.get("op", "") == "new": pidnode = q(Node).get(req.params.get("pid")) if hasattr(pidnode, 'getMasktype') and pidnode.getMasktype() in ("vgroup", "hgroup"): # XXX: getAllChildren does not exist anymore, is this dead code? for field in pidnode.getAllChildren(): if field.getType().getName() == "maskitem" and field.id != pidnode.id: fields.append(field) else: for m in metadatatype.getMasks(): if ustr(m.id) == ustr(req.params.get("pid")): for field in m.getChildren(): fields.append(field) fields.sort(lambda x, y: cmp(x.getOrderPos(), y.getOrderPos())) add_values = [] val = u"" if item.getField(): val = item.getField().getValues() db.session.commit() for t in getMetaFieldTypeNames(): f = getMetadataType(t) add_values.append(f.getMaskEditorHTML(val, metadatatype=metadatatype, language=lang(req))) metafields = metadatatype.getMetaFields() metafields.sort(lambda x, y: cmp(x.getName().lower(), y.getName().lower())) metafieldtypes = getMetaFieldTypes().values() metafieldtypes.sort(lambda x, y: cmp(translate(x.getName(), request=req).lower(), translate(y.getName(), request=req).lower())) add_descriptions = [] for metafield in metafields: add_descriptions.append('<div style="display:none" id="div_%d" name="%s" description="%s"/>' % (metafield.id, metafield.name, metafield.getDescription())) v = {} v["op"] = req.params.get("op", "") v["pid"] = req.params.get("pid", "") v["item"] = item v["metafields"] = metafields v["fields"] = fields v["fieldtypes"] = metafieldtypes v["dateoption"] = dateoption v["t_attrs"] = attr v["icons"] = {"externer Link": "/img/extlink.png", "Email": "/img/email.png"} v["add_values"] = add_values v["add_descriptions"] = add_descriptions v["translate"] = translate v["language"] = lang(req) if pidnode and hasattr(pidnode, 'getMasktype') and pidnode.getMasktype() == "export": v["mappings"] = [] for m in pidnode.getExportMapping(): v["mappings"].append(q(Node).get(m)) return req.getTAL("schema/mask/field.html", v, macro="metaeditor_" + pidnode.getMasktype()) else: return req.getTAL("schema/mask/field.html", v, macro="metaeditor")
def action(req): global editModules language = lang(req) user = current_user trashdir = user.trash_dir uploaddir = user.upload_dir trashdir_parents = trashdir.parents action = req.params.get("action", "") changednodes = {} if not user.is_editor: req.write("""permission denied""") req.setStatus(httpstatus.HTTP_FORBIDDEN) return if "tab" in req.params: tab = req.params.get("tab").split("_")[-1] return editModules[tab].getContent(req, [req.params.get("id")]) if action == "getlabels": nids = req.params.get('ids', []) nids = [nid.strip() for nid in nids.split(',') if nid.strip()] for nid in set(nids + [_n.id for _n in [trashdir, uploaddir]]): try: changednodes[nid] = getTreeLabel(q(Node).get(nid), language) except: logg.exception("exception ignored: could not make fancytree label for node %s", nid) res_dict = {'changednodes': changednodes} req.write(json.dumps(res_dict, indent=4, ensure_ascii=False)) return else: # all 'action's except 'getlabels' require a base dir (src) # but expanding of a subdir in the edit-tree via fancytree has # not a srcid, so no action is necessary srcid = req.params.get("src") if not srcid: return try: src = q(Node).get(srcid) except: req.writeTAL( "web/edit/edit.html", {"edit_action_error": srcid}, macro="edit_action_error") return if req.params.get('action') == 'addcontainer': node = q(Node).get(srcid) if not node.has_write_access(): # deliver errorlabel req.writeTALstr( '<tal:block i18n:translate="edit_nopermission"/>', {}) return # create new container newnode_type = req.params.get('type') if newnode_type in ['bare_collection', 'bare_directory']: newnode_type = newnode_type.replace('bare_', '') translated_label = t(lang(req), 'edit_add_' + newnode_type) if translated_label.startswith('edit_add_'): translated_label = t( lang(req), 'edit_add_container_default') + newnode_type content_class = Node.get_class_for_typestring(newnode_type) newnode = content_class(name=translated_label) node.children.append(newnode) newnode.set("creator", user.login_name) newnode.set("creationtime", unicode( time.strftime('%Y-%m-%dT%H:%M:%S', time.localtime(time.time())))) newnode.set("nodename", translated_label) # set attribute named "nodename" to label text # place newnode at top of the children by setting the orderpos to the lowest orderpos - 1 # if the orderpos gets negative, shift the oderpos of all children by incrementing with a positive number # make this number large enough, to avoid the next shifting of orderpos if more containers are added if len(node.children) == 1: # newnode is the only one child newnode.orderpos = 1000 else: newnode.orderpos = node.children[0].orderpos newnode.orderpos = min([c.orderpos for c in node.children]) - 1 while newnode.orderpos < 0: # in order to avoid negative orderpos, add a positive number to the orderpos of all children # make this number large enough, so there is no shift of orderpos is necessary if the next # container is added to the children for c in node.children: c.orderpos += 1000 db.session.commit() req.params["dest"] = newnode.id label = getTreeLabel(newnode, lang=language) fancytree_nodedata = { 'title': label, 'key': newnode.id, 'isLazy': False, 'isFolder': True, 'icon': getEditorIconPath(newnode), 'readonly': 0, 'tooltip': '%s (%s)' % (label, newnode.id), 'children': [], } req.write(json.dumps(fancytree_nodedata, ensure_ascii=False)) logg.info("%s adding new container %s (%s) to %s (%s, %s)", user.login_name, newnode.id, newnode.type, node.id, node.name, node.type) return try: destid = req.params.get("dest", None) dest = q(Node).get(destid) folderid = destid except: destid = None dest = None folderid = srcid idlist = getIDs(req) mysrc = None errorobj = None # try: if action == "clear_trash": for n in trashdir.children: # if trashdir is it's sole parent, remove file from disk # attn: this will not touch files from children of deleted # containers if len(n.parents) == 1: logg.info("%s going to remove files from disk for node %s (%s, %s)", user.login_name, n.id, n.name, n.type) for f in n.files: # dangerous ??? check this f_path = f.abspath if os.path.exists(f_path): logg.info("%s going to remove file %r from disk", user.login_name, f_path) os.remove(f_path) trashdir.children.remove(n) db.session.commit() dest = trashdir changednodes[trashdir.id] = 1 _parent_descr = [(p.name, p.id, p.type) for p in trashdir_parents] logg.info("%s cleared trash folder with id %s, child of %s", user.login_name, trashdir.id, _parent_descr) # return else: for id in idlist: obj = q(Node).get(id) mysrc = src if isDirectory(obj) or isCollection(obj): mysrc = obj.parents[0] if action == "delete": if mysrc.has_write_access() and obj.has_write_access(): if mysrc.id != trashdir.id: mysrc.children.remove(obj) changednodes[mysrc.id] = 1 trashdir.children.append(obj) db.session.commit() changednodes[trashdir.id] = 1 logg.info("%s moved to trash bin %s (%s, %s) from %s (%s, %s)", user.login_name, obj.id, obj.name, obj.type, mysrc.id, mysrc.name, mysrc.type) dest = mysrc else: logg.info("%s has no write access for node %s", user.login_name, mysrc.id) req.writeTALstr( '<tal:block i18n:translate="edit_nopermission"/>', {}) dest = mysrc elif action in ["move", "copy"]: if (dest != mysrc) and \ mysrc.has_write_access() and \ dest.has_write_access() and \ obj.has_write_access() and \ isinstance(dest, Container): if not dest.is_descendant_of(obj): if action == "move": mysrc.children.remove(obj) changednodes[mysrc.id] = 1 # getLabel(mysrc) dest.children.append(obj) changednodes[dest.id] = 1 # getLabel(dest) db.session.commit() if logg.isEnabledFor(logging.INFO): _what = "%s %s %r (%s, %s) " % ( user.login_name, action, obj.id, obj.name, obj.type) _from = "from %s (%s, %s) " % ( mysrc.id, mysrc.name, mysrc.type) _to = "to %s (%s, %s)" % ( dest.id, dest.name, dest.type) logg.info(_what + _from + _to) else: logg.error("%s could not %s %s from %s to %s", user.login_name, action, obj.id, mysrc.id, dest.id) else: return mysrc = None if not mysrc: mysrc = src if action in ["move", "copy", "delete", "clear_trash"]: for nid in changednodes: try: changednodes[nid] = getTreeLabel( q(Node).get(nid), lang=language) except: logg.exception("exception ignored: could not make fancytree label for node %s", nid) res_dict = {'changednodes': changednodes} req.write(json.dumps(res_dict, indent=4, ensure_ascii=False)) else: try: if dest is not None: req.write(dest.id) else: req.write('no-node-id-specified (web.edit.edit.action)') except: req.write('no-node-id-specified (web.edit.edit.action)') logg.exception('exception ignored, no-node-id-specified (web.edit.edit.action)') return
def test_data_get_class_for_typestring(): from contenttypes import Data data_cls = Node.get_class_for_typestring("data") assert data_cls is Data
def FieldDetail(req, pid, id, err=0): _option = "" for key in req.params.keys(): if key.startswith("option_"): _option += key[7] if err == 0 and id == "": # new field field = Metafield(u"") db.session.commit() elif id != "": # edit field field = q(Metadatatype).get(pid).children.filter_by(name=id, type=u'metafield').scalar() else: # error filling values _fieldvalue = "" if req.params.get('mtype', '') + "_value" in req.params.keys(): _fieldvalue = ustr(req.params[req.params.get('mtype', '') + "_value"]) if (req.params.get("mname") == ""): field = Metafield(req.params.get("orig_name")) else: field = Metafield(req.params.get("mname")) field.setLabel(req.params.get("mlabel")) field.setOrderPos(req.params.get("orderpos")) field.setFieldtype(req.params.get("mtype")) field.setOption(_option) field.setValues(_fieldvalue) field.setDescription(req.params.get("mdescription")) db.session.commit() attr = {} metadatatype = getMetaType(pid) for t in metadatatype.getDatatypes(): content_class = Node.get_class_for_typestring(t) node = content_class(name=u'') try: attr.update(node.getTechnAttributes()) except AttributeError: logg.exception("attribute error in FieldDetail, continue") continue metafields = {} for fields in getFieldsForMeta(pid): if fields.getType() != "union": metafields[fields.name] = fields v = getAdminStdVars(req) v["metadatatype"] = metadatatype v["metafield"] = field v["error"] = err v["fieldtypes"] = getMetaFieldTypeNames() v["dateoptions"] = dateoption v["datatypes"] = attr v["requiredoptions"] = requiredoption v["fieldoptions"] = fieldoption v["metafields"] = metafields v["filtertype"] = req.params.get("filtertype", "") v["actpage"] = req.params.get("actpage") v["icons"] = {"externer Link": "/img/extlink.png", "Email": "/img/email.png"} v["url_targets"] = {"selbes Fenster": "same", "neues Fenster": "_blank"} v["valuelist"] = ("", "", "", "") if field.getFieldtype() == "url": v["valuelist"] = field.getValueList() while len(v["valuelist"]) != 4: v["valuelist"].append("") else: v["valuelist"] = field.getValueList() v["field"] = None if field.id: v["field"] = field v["adminfields"] = [] for t in getMetaFieldTypeNames(): f = getMetadataType(t) if 'attr_dict' in inspect.getargspec(f.getMaskEditorHTML).args: attr_dict = dict(field.attrs.items()) v["adminfields"] .append(f.getMaskEditorHTML(v["field"], metadatatype=metadatatype, language=lang(req), attr_dict=attr_dict)) else: v["adminfields"] .append(f.getMaskEditorHTML(v["field"], metadatatype=metadatatype, language=lang(req))) db.session.commit() return req.getTAL("web/admin/modules/metatype_field.html", v, macro="modify_field")
def editModuleActions(req): root = q(Root).one() datatype = req.params.get("datatype", "").lower() try: nodeclass = Node.get_class_for_typestring(datatype) except KeyError: logg.error("type %s not found", datatype) return for key in req.params.keys(): if key == "editmodules_default": root.system_attrs["edit.menu." + datatype] = nodeclass.get_default_edit_menu_tabs() break elif key.startswith("del|"): ret = "" m = key.split("|")[-1][:-2] items = getEditModuleHierarchy(datatype) for k in items: if k >= 0 and not (m.startswith("menu") and items[k][0] == m): i = [item for item in items[k] if item != m] if len(i) > 1: ret += i[0] + "(" + ";".join(i[1:]) + ");" else: ret += i[0] + "();" root.system_attrs["edit.menu." + datatype] = ret[:-1] break elif key.startswith("show|"): # add menu item = key.split("|")[-1][:-2] menu_str = get_edit_menu_tabs(nodeclass) + ";" + item + "()" root.system_attrs["edit.menu." + datatype] = menu_str break elif key.startswith("move|") and req.params.get(key) != "": # move item to menu dest = req.params.get(key) dest_id = -1 mod = key.split("|")[-1] items = getEditModuleHierarchy(datatype) for k in items: if dest in items[k]: dest_id = k if mod in items[k]: items[k].remove(mod) items[dest_id].append(mod) ret = "" for k in items: if len(items[k]) == 0 or k < 0: pass elif items[k][0].startswith("menu"): ret += items[k][0] + "(" + ";".join(items[k][1:]) + ");" root.system_attrs["edit.menu." + datatype] = ret[:-1] break elif key.startswith("up|"): # move module or module item up m = key[:-2].split("|")[-1] items = getEditModuleHierarchy(datatype) for k in items: if m in items[k] and items[k].index(m) == 0: # menu src = items[k] items[k] = items[k - 1] items[k - 1] = src break elif m in items[k] and items[k].index > 0: # menu item src_id = items[k].index(m) items[k][src_id] = items[k][src_id - 1] items[k][src_id - 1] = m break ret = "" for k in items: if len(items[k]) == 0 or k < 0: pass elif items[k][0].startswith("menu"): ret += items[k][0] + "(" + ";".join(items[k][1:]) + ");" root.system_attrs["edit.menu." + datatype] = ret[:-1] break elif key.startswith("down|"): # move module or module item down m = key[:-2].split("|")[-1] items = getEditModuleHierarchy(datatype) for k in items: if m in items[k] and items[k].index(m) == 0: # menu src = items[k] items[k] = items[k + 1] items[k + 1] = src break elif m in items[k] and items[k].index > 0: # menu item src_id = items[k].index(m) items[k][src_id] = items[k][src_id + 1] items[k][src_id + 1] = m break ret = "" for k in items: if len(items[k]) == 0 or k < 0: pass elif items[k][0].startswith("menu"): ret += items[k][0] + "(" + ";".join(items[k][1:]) + ");" root.system_attrs["edit.menu." + datatype] = ret[:-1] break db.session.commit()
def xml_start_element(self, name, attrs): try: node = self.nodes[-1] except: node = None if name == "nodelist": if "exportversion" in attrs: logg.info("starting xml import: %s", attrs) elif name == "node": self.node_already_seen = False parent = node try: datatype = attrs["datatype"] except KeyError: # compatibility for old xml files created with mediatum t = attrs.get("type") if t is not None: datatype = t else: datatype = "directory" if "id" not in attrs: attrs["id"] = ustr(random.random()) old_id = attrs["id"] if old_id in self.id2node: node = self.id2node[old_id] self.node_already_seen = True return elif datatype in ["mapping"]: content_class = Node.get_class_for_typestring(datatype) node = content_class(name=(attrs["name"] + "_imported_" + old_id)) else: content_class = Node.get_class_for_typestring(datatype) node = content_class(name=attrs["name"]) # todo: handle access #if "read" in attrs: # node.setAccess("read", attrs["read"].encode("utf-8")) #if "write" in attrs: # node.setAccess("write", attrs["write"].encode("utf-8")) #if "data" in attrs: # node.setAccess("data", attrs["data"].encode("utf-8")) if self.verbose: logg.info("created node '%s', '%s', '%s', old_id from attr='%s'", node.name, node.type, node.id, attrs["id"]) self.id2node[attrs["id"]] = node node.tmpchilds = [] self.nodes.append(node) if self.root is None: self.root = node return elif name == "attribute" and not self.node_already_seen: attr_name = attrs["name"] if "value" in attrs: if attr_name in ["valuelist"]: node.set(attr_name, attrs["value"].replace("\n\n", "\n").replace("\n", ";").replace(";;", ";")) else: node.set(attr_name, attrs["value"]) else: self.attributename = attr_name elif name == "child" and not self.node_already_seen: nid = attrs["id"] node.tmpchilds += [nid] elif name == "file" and not self.node_already_seen: try: datatype = attrs["type"] except: datatype = None try: mimetype = attrs["mime-type"] except: mimetype = None filename = attrs["filename"] node.files.append(File(path=filename, filetype=datatype, mimetype=mimetype))
def getContent(req, ids): user = users.getUserFromRequest(req) language = lang(req) def get_ids_from_query(): ids = get_ids_from_req(req) return ",".join(ids) if "action" in req.params: state = 'ok' if req.params.get('action') == "removefiles": basenode = q(Node).get(req.params.get('id')) for f in basenode.files: try: os.remove(f.abspath) pass except: state = "error" basenode.files = [] db.session.commit() req.write(json.dumps({'state': state}, ensure_ascii=False)) return None if req.params.get('action') == "buildnode": # create nodes basenode = q(Node).get(req.params.get('id')) newnodes = [] errornodes = [] basenodefiles_processed = [] if req.params.get('uploader', '') == 'plupload': filename2scheme = {} for k in req.params: if k.startswith("scheme_"): filename2scheme[k.replace('scheme_', '', 1)] = req.params.get(k) for f in basenode.files: filename = f.name if filename in filename2scheme: mimetype = getMimeType(filename) if mimetype[1] == "bibtex": # bibtex import handler try: new_node = importBibTeX(f.abspath, basenode) newnodes.append(new_node.id) basenodefiles_processed.append(f) except ValueError, e: errornodes.append((filename, unicode(e))) logg.debug("filename: %s, mimetype: %s", filename, mimetype) logg.debug("__name__=%s, func=%s; _m=%s, _m[1]=%s", __name__, funcname(), mimetype, mimetype[1]) content_class = Node.get_class_for_typestring( mimetype[1]) node = content_class(name=filename, schema=filename2scheme[filename]) basenode.children.append(node) node.set("creator", user.login_name) node.set( "creationtime", unicode( time.strftime('%Y-%m-%dT%H:%M:%S', time.localtime(time.time())))) # set filetype for uploaded file as requested by the content class f.filetype = content_class.get_upload_filetype() node.files.append(f) node.event_files_changed() newnodes.append(node.id) basenodefiles_processed.append(f) basenode.files.remove(f) db.session.commit() logg.info( "%s created new node id=%s (name=%s, type=%s) by uploading file %s, " "node is child of base node id=%s (name=%s, type=%s)", user.login_name, node.id, node.name, node.type, filename, basenode.id, basenode.name, basenode.type) else: for filename in req.params.get('files').split('|'): mimetype = getMimeType(filename) logg.debug("... in %s.%s: getMimeType(filename=%s)=%s", __name__, funcname(), filename, mimetype) if mimetype[1] == req.params.get('type') or req.params.get( 'type') == 'file': for f in basenode.files: # ambiguity here ? if f.abspath.endswith(filename): # bibtex import handler if mimetype[ 1] == "bibtex" and not req.params.get( 'type') == 'file': try: new_node = importBibTeX( f.abspath, basenode) newnodes.append(new_node.id) basenodefiles_processed.append(f) except ValueError, e: errornodes.append( (filename, unicode(e))) db.session.commit() else: logg.debug( "creating new node: filename: %s", filename) logg.debug("files at basenode: %s", [(x.getName(), x.abspath) for x in basenode.files]) content_class = Node.get_class_for_typestring( req.params.get('type')) node = content_class( name=filename, schema=req.params.get('value')) basenode.children.append(node) node.set("creator", user.login_name) node.set( "creationtime", unicode( time.strftime( '%Y-%m-%dT%H:%M:%S', time.localtime(time.time())))) # clones to a file with random name cloned_file = importFileRandom(f.abspath) # set filetype for uploaded file as requested by the content class cloned_file.filetype = content_class.get_upload_filetype( ) node.files.append(cloned_file) node.event_files_changed() newnodes.append(node.id) basenodefiles_processed.append(f) logg.info( "%s created new node id=%s (name=%s, type=%s) by uploading file %s, " "node is child of base node id=%s (name=%s, type=%s)", user.login_name, node.id, node.name, node.type, filename, basenode.id, basenode.name, basenode.type) break # filename may not be unique new_tree_labels = [{ 'id': basenode.id, 'label': getTreeLabel(basenode, lang=language) }] for f in basenodefiles_processed: basenode.files.remove(f) f_path = f.abspath if os.path.exists(f_path): logg.debug("%s going to remove file %s from disk", user.login_name, f_path) os.remove(f_path) mime = getMimeType(filename) scheme_type = {mime[1]: []} for scheme in get_permitted_schemas(): if mime[1] in scheme.getDatatypes(): scheme_type[mime[1]].append(scheme) # break db.session.commit() # standard file content = req.getTAL('web/edit/modules/upload.html', { 'files': [filename], 'schemes': scheme_type }, macro="uploadfileok") res = { 'state': state, 'newnodes': newnodes, 'errornodes': errornodes, 'new_tree_labels': new_tree_labels, 'ret': content } res = json.dumps(res, ensure_ascii=False) req.write(res) return None
def create(self, **kwargs): node = Node(self.__neo__.createNode()) node.update(**kwargs) return node
def show_workflow_step(self, req): typenames = self.get("newnodetype").split(";") wfnode = self.parents[0] redirect = "" message = "" # check existence of metadata types listed in the definition of the start node mdts = q(Metadatatypes).one() for schema in typenames: if not mdts.children.filter_by(name=schema.strip().split("/")[-1]).scalar(): return ('<i>%s: %s </i>') % (schema, t(lang(req), "permission_denied")) if "workflow_start" in req.params: switch_language(req, req.params.get('workflow_language')) content_class = Node.get_class_for_typestring(req.params.get('selected_schema').split('/')[0]) node = content_class(name=u'', schema=req.params.get('selected_schema').split('/')[1]) self.children.append(node) # create user group named '_workflow' if it doesn't exist workflow_group = q(UserGroup).filter_by(name=u'_workflow').scalar() if workflow_group is None: workflow_group = UserGroup(name=u'_workflow', description=u'internal dummy group for nodes in workflows') db.session.add(workflow_group) # create access rule with '_workflow' user group workflow_rule = get_or_add_access_rule(group_ids=[workflow_group.id]) special_access_ruleset = node.get_or_add_special_access_ruleset(ruletype=u'read') special_access_ruleset.rule_assocs.append(AccessRulesetToRule(rule=workflow_rule)) node.set("creator", "workflow-" + self.parents[0].name) node.set("creationtime", date.format_date()) node.set("system.wflanguage", req.params.get('workflow_language', req.Cookies.get('language'))) node.set("key", mkKey()) node.set("system.key", node.get("key")) # initial key identifier req.session["key"] = node.get("key") db.session.commit() return self.forwardAndShow(node, True, req) elif "workflow_start_auth" in req.params: # auth node by id and key try: node = q(Node).get(req.params.get('nodeid')) # startkey, but protected if node.get('system.key') == req.params.get('nodekey') and node.get('key') != req.params.get('nodekey'): message = "workflow_start_err_protected" elif node.get('key') == req.params.get('nodekey'): redirect = "/pnode?id=%s&key=%s" % (node.id, node.get('key')) else: message = "workflow_start_err_wrongkey" except: logg.exception("exception in workflow step start (workflow_start_auth)") message = "workflow_start_err_wrongkey" types = [] for a in typenames: if a: m = getMetaType(a) # we could now check m.isActive(), but for now let's # just take all specified metatypes, so that edit area # and workflow are independent on this types += [(m, a)] cookie_error = t(lang(req), "Your browser doesn't support cookies") js = """ <script language="javascript"> function cookie_test() { if (document.cookie=="") document.cookie = "CookieTest=Erfolgreich"; if (document.cookie=="") { alert("%s"); } } cookie_test(); </script>""" % cookie_error return req.getTAL("workflow/start.html", {'types': types, 'id': self.id, 'js': js, 'starttext': self.get('starttext'), 'languages': self.parents[0].getLanguages(), 'currentlang': lang(req), 'sidebartext': self.getSidebarText(lang(req)), 'redirect': redirect, 'message': message, 'allowcontinue': self.get('allowcontinue'), "csrf": req.csrf_token.current_token,}, macro="workflow_start")
return None # deliver schemes for given contenttype if req.params.get('action') == 'getschemes': ret = [] for scheme in get_permitted_schemas_for_datatype(req.params.get('contenttype')): ret.append({'id': scheme.name, 'name': scheme.getLongName()}) req.write(json.dumps({'schemes': ret}, ensure_ascii=False)) return None # create node with given type/schema if req.params.get('action') == "createobject": schema = req.params.get('schema') ctype = req.params.get('contenttype') node = Node(name=u"", type=ctype, schema=schema) basenode = q(Node).get(req.params.get('id')) basenode.children.append(node) node.set("creator", user.login_name) node.set("creationtime", ustr(time.strftime('%Y-%m-%dT%H:%M:%S', time.localtime(time.time())))) db.session.commit() res = {'newid': node.id, 'id': req.params.get('id')} req.write(json.dumps(res, ensure_ascii=False)) return None # create node using given identifier (doi, ...) if req.params.get('action') == "obj_from_identifier": identifier_importer = req.params.get('identifier_importer') identifier = req.params.get('identifier')
def __init__(self, fi, verbose=True): self.root = None self.nodes = [] self.attributename = None self.id2node = {} self.verbose = verbose self.node_already_seen = False handler = HandlerTarget() handler.start = lambda name, attrs: self.xml_start_element(name, attrs) handler.end = lambda name: self.xml_end_element(name) handler.data = lambda d: self.xml_char_data(d) parser = etree.XMLParser(target=handler) if type(fi) in [unicode, str]: xml = fi elif type(fi) in [file]: xml = fi.read() fi.close() else: raise NotImplementedError() try: result = etree.XML(xml, parser) except Exception as e: logg.exception("\tfile not well-formed. %s", e) return mappings = q(Mappings).scalar() if mappings is None: mappings = q(Root).one().children.append( Node(name="mappings", type="mappings")) logg.info("no mappings root found: added mappings root") for node in self.nodes: if node.type == "mapping": if node.name not in [ n.name for n in mappings.children if n.type == "mapping" ]: mappings.children.append(node) if self.verbose: logg.info( "xml import: added mapping id=%s, type='%s', name='%s'", node.id, node.type, node.name) if self.verbose: logg.info("linking children to parents") for node in self.nodes: d = {} for id in node.tmpchilds: child = self.id2node[id] node.children.append(child) d[child.id] = child if self.verbose and node.tmpchilds: added = [(cid, d[cid].type, d[cid].name) for cid in d.keys()] logg.info( "added %d children to node id='%s', type='%s', name='%s': %s", len(node.tmpchilds), node.id, node.type, node.name, added) for node in self.nodes: if node.type == "maskitem": attr = node.get("attribute") if attr and attr in self.id2node: attr_new = self.id2node[attr].id node.set("attribute", attr_new) if self.verbose: logg.info( "adjusting node attribute for maskitem '%s', name='attribute', value: old='%s' -> new='%s'", node.id, attr, attr_new) mappingfield = node.get("mappingfield") if mappingfield and mappingfield in self.id2node: mappingfield_new = self.id2node[mappingfield].id node.set("mappingfield", ustr(mappingfield_new)) if self.verbose: logg.info( "adjusting node attribute for maskitem '%s', name='mappingfield', value old='%s' -> new='%s'", node.id, mappingfield, mappingfield_new) elif node.type == "mask": exportmapping = node.get("exportmapping") if exportmapping and exportmapping in self.id2node: exportmapping_new = self.id2node[exportmapping].id node.set("exportmapping", ustr(exportmapping_new)) if self.verbose: logg.info( "adjusting node attribute for mask '%s', name='exportmapping':, value old='%s' -> new='%s'", node.id, exportmapping, exportmapping_new) logg.info("xml import done") db.session.commit()
def test_session_commit2(session): """Tests if the session rolls back correctly and leaves no traces, even after two commits...""" from core import Node session.add(Node(u"name")) session.commit() session.commit()
from core import Blockchain, Node from flask import Flask, jsonify, request api = Flask(__name__) blockchain = Blockchain() node = Node() @api.route('/register', methods=['GET']) def register(): node = request.form.get('node') if node is None: return 'Requires `node`', 400 # TO-DO: blockchain.register(node) res = {'message': 'Added node'} return jsonify(res) @api.route('/chain', methods=['GET']) def chain(): chain = [b.serialize() for b in blockchain.chain] res = {'blockchain': chain, 'length': len(blockchain.chain)} return jsonify(res) @api.route('/mine', methods=['GET']) def mine(): prev_block = blockchain.peek() proof = blockchain.mine(prev_block.key)
def test_session_rollback(session): """Tests if the session fixture works even after a rollback and commit...""" from core import Node session.add(Node(u"name")) session.rollback() session.commit()
def getMetaEditor(self, item, req): """ editor mask for field definition """ attr = {} fields = [] pidnode = None if "pid" not in req.params.keys(): for p in item.getParents(): try: if p.getMasktype() == "export": pidnode = p break except: continue metadatatype = req.params.get("metadatatype") for t in metadatatype.getDatatypes(): content_class = Node.get_class_for_typestring(t) node = content_class(name=u'') attr.update(node.getTechnAttributes()) if req.params.get("op", "") == "new": pidnode = q(Node).get(req.params.get("pid")) if hasattr(pidnode, 'getMasktype') and pidnode.getMasktype() in ("vgroup", "hgroup"): # XXX: getAllChildren does not exist anymore, is this dead code? for field in pidnode.getAllChildren(): if field.getType().getName( ) == "maskitem" and field.id != pidnode.id: fields.append(field) else: for m in metadatatype.getMasks(): if ustr(m.id) == ustr(req.params.get("pid")): for field in m.getChildren(): fields.append(field) fields.sort(lambda x, y: cmp(x.getOrderPos(), y.getOrderPos())) add_values = [] val = u"" if item.getField(): val = item.getField().getValues() db.session.commit() for t in getMetaFieldTypeNames(): f = getMetadataType(t) add_values.append( f.getMaskEditorHTML(val, metadatatype=metadatatype, language=lang(req))) metafields = metadatatype.getMetaFields() metafields.sort(lambda x, y: cmp(x.getName().lower(), y.getName().lower())) metafieldtypes = getMetaFieldTypes().values() metafieldtypes.sort(lambda x, y: cmp( translate(x.getName(), request=req).lower(), translate(y.getName(), request=req).lower())) add_descriptions = [] for metafield in metafields: add_descriptions.append( '<div style="display:none" id="div_%d" name="%s" description="%s"/>' % (metafield.id, metafield.name, metafield.getDescription())) v = {} v["op"] = req.params.get("op", "") v["pid"] = req.params.get("pid", "") v["item"] = item v["metafields"] = metafields v["fields"] = fields v["fieldtypes"] = metafieldtypes v["dateoption"] = dateoption v["t_attrs"] = attr v["icons"] = { "externer Link": "/img/extlink.png", "Email": "/img/email.png" } v["add_values"] = add_values v["add_descriptions"] = add_descriptions v["translate"] = translate v["language"] = lang(req) if pidnode and hasattr( pidnode, 'getMasktype') and pidnode.getMasktype() == "export": v["mappings"] = [] for m in pidnode.getExportMapping(): v["mappings"].append(q(Node).get(m)) return req.getTAL("schema/mask/field.html", v, macro="metaeditor_" + pidnode.getMasktype()) else: return req.getTAL("schema/mask/field.html", v, macro="metaeditor")
def editModuleActions(req): root = q(Root).one() datatype = req.params.get("datatype", "").lower() try: nodeclass = Node.get_class_for_typestring(datatype) except KeyError: logg.error("type %s not found", datatype) return for key in req.params.keys(): if key == "editmodules_default": root.system_attrs[ "edit.menu." + datatype] = nodeclass.get_default_edit_menu_tabs() break elif key.startswith("del|"): ret = "" m = key.split("|")[-1][:-2] items = getEditModuleHierarchy(datatype) for k in items: if k >= 0 and not (m.startswith("menu") and items[k][0] == m): i = [item for item in items[k] if item != m] if len(i) > 1: ret += i[0] + "(" + ";".join(i[1:]) + ");" else: ret += i[0] + "();" root.system_attrs["edit.menu." + datatype] = ret[:-1] break elif key.startswith("show|"): # add menu item = key.split("|")[-1][:-2] menu_str = get_edit_menu_tabs(nodeclass) + ";" + item + "()" root.system_attrs["edit.menu." + datatype] = menu_str break elif key.startswith("move|") and req.params.get(key) != "": # move item to menu dest = req.params.get(key) dest_id = -1 mod = key.split("|")[-1] items = getEditModuleHierarchy(datatype) for k in items: if dest in items[k]: dest_id = k if mod in items[k]: items[k].remove(mod) items[dest_id].append(mod) ret = "" for k in items: if len(items[k]) == 0 or k < 0: pass elif items[k][0].startswith("menu"): ret += items[k][0] + "(" + ";".join(items[k][1:]) + ");" root.system_attrs["edit.menu." + datatype] = ret[:-1] break elif key.startswith("up|"): # move module or module item up m = key[:-2].split("|")[-1] items = getEditModuleHierarchy(datatype) for k in items: if m in items[k] and items[k].index(m) == 0: # menu src = items[k] items[k] = items[k - 1] items[k - 1] = src break elif m in items[k] and items[k].index > 0: # menu item src_id = items[k].index(m) items[k][src_id] = items[k][src_id - 1] items[k][src_id - 1] = m break ret = "" for k in items: if len(items[k]) == 0 or k < 0: pass elif items[k][0].startswith("menu"): ret += items[k][0] + "(" + ";".join(items[k][1:]) + ");" root.system_attrs["edit.menu." + datatype] = ret[:-1] break elif key.startswith("down|"): # move module or module item down m = key[:-2].split("|")[-1] items = getEditModuleHierarchy(datatype) for k in items: if m in items[k] and items[k].index(m) == 0: # menu src = items[k] items[k] = items[k + 1] items[k + 1] = src break elif m in items[k] and items[k].index > 0: # menu item src_id = items[k].index(m) items[k][src_id] = items[k][src_id + 1] items[k][src_id + 1] = m break ret = "" for k in items: if len(items[k]) == 0 or k < 0: pass elif items[k][0].startswith("menu"): ret += items[k][0] + "(" + ";".join(items[k][1:]) + ");" root.system_attrs["edit.menu." + datatype] = ret[:-1] break db.session.commit()
def getContent(req, ids): user = current_user node = q(Node).get(ids[0]) if not node.has_write_access( ) or "searchmask" in user.hidden_edit_functions: req.setStatus(httpstatus.HTTP_FORBIDDEN) return req.getTAL("web/edit/edit.html", {}, macro="access_error") p2 = {} for k, v in req.params.items(): if k.endswith(".x") or k.endswith(".y"): p2[k[:-2]] = v else: p2[k] = v req.params = p2 openfield = None delfield = None delsubfield = None for k, v in req.params.items(): if k.startswith("open_"): openfield = k[5:] if k.startswith("del_"): delfield = k[4:] if k.startswith("delsub_"): delsubfield = k[7:] searchtype = req.params.get("searchtype", None) if not searchtype: searchtype = node.get("searchtype") if not searchtype: searchtype = "none" # if a parent has a search mask, use 'inherit' n = node while len(n.parents): n = n.parents[0] if n.get("searchtype") == "own": searchtype = "parent" node.set("searchtype", searchtype) schema = req.params.get("schema", None) schemafield = req.params.get("schemafield", None) selectedfieldid = req.params.get("selectedfield", None) if schema: if schema.endswith(";"): schema = schema[:-1] schema = q(Node).get(schema) if not isinstance(schema, Node): schema = None if schemafield: if schemafield.endswith(";"): schemafield = schemafield[:-1] schemafield = q(Node).get(schemafield) if not isinstance(schemafield, Node): schemafield = None if schema and schemafield and schemafield not in schema.children: schemafield = None if schemafield and schemafield.type != "metafield": schemafield = None fields = None selectedfield = None isnewfield = False createsub = False closefield = False if searchtype == "own": maskname = node.get("searchmaskname") mask = q(Searchmasks).one().children.filter_by(name=maskname).scalar() if not maskname or mask is None: mask = searchmask.generateMask(node) if selectedfieldid: # edit selectedfield = q(Node).get(selectedfieldid) assert selectedfield in mask.children selectedfield.name = req.params["fieldname"] if "createsub" in req.params and schemafield: createsub = True selectedfield.children.append(schemafield) if delsubfield: selectedfield.children.remove(q(Node).get(delsubfield)) if req.params.get("isnewfield", "") == "yes": # create a new field isnewfield = True l = mask.children.count() mask.children.append(Node("Suchfeld %s" % l, "searchmaskitem")) elif delfield: # del a field delfield = q(Node).get(delfield) assert delfield in mask.children mask.children.remove(delfield) elif openfield: # unfold a new field selectedfieldid = openfield elif "close" in req.params: # fold a field closefield = True selectedfieldid = None if selectedfieldid: selectedfield = q(Node).get(selectedfieldid) if selectedfield not in mask.children: # this usually happens if the field was just deleted selectedfield = None else: selectedfield = None fields = mask.children.all() db.session.commit() data = { "idstr": ",".join(ids), "node": node, "searchtype": searchtype, "schemas": q(Metadatatypes).one().children.sort_by_name().all(), "searchfields": fields, "selectedfield": selectedfield, "newfieldlink": "edit_content?id=%s&tab=searchmask" % node.id, "defaultschemaid": None, "defaultfieldid": None, "id": req.params.get("id") } if schema: data["defaultschemaid"] = schema.id if schemafield: data["defaultfieldid"] = schemafield.id data["schema"] = schema def display(schemafield): if not schemafield or schemafield.type != 'metafield': return 0 if not schemafield.Searchfield(): return 0 if schemafield.get('type') == 'union': return 0 return 1 data["display"] = display searchtypechanged = False if req.params.get("searchtypechanged", "") == "true": searchtypechanged = True if any([ openfield, isnewfield, delfield, delsubfield, createsub, schema, searchtypechanged, closefield ]): content = req.getTAL("web/edit/modules/searchmask.html", data, macro="edit_search") s = json.dumps({'content': content}, ensure_ascii=False) req.write(s) return None return req.getTAL("web/edit/modules/searchmask.html", data, macro="edit_search")
from core import Node def change_parameter(master: Node, node_name: str, param_name: str, new_value: str): """ Change a parameter in the parameter server :param master: A node object for the ROS master node :param node_name: The name to give the node to request information :param param_name: The name of the parameter you want to change :param new_value: The new value of the parameter :return: """ master.server.setParam(node_name, param_name, new_value) if __name__ == "__main__": target_master = Node(ip_addr='localhost', port="11311", notes="Master Node") new_value = "pwnd" node_name = "/rosploit" (_, _, param_list) = Node.server.getParamNames(node_name) for param_name in param_list: change_parameter(target_master, node_name, param_name, new_value)
'Try --colors to see all the available colors.') else: color = random.choice(list(Colors)) # Prevent method 2 with start_at if args.method == 2 and args.start_at is not None: parser.error('Method 2 can\'t be used with --start-at') # Prevents too high of a tolerance (that would result in an empty map) if args.method == 1 and (1.0 - args.tolerance) * args.width * args.height < 1: parser.error('Tolerance is too high. An empty map would be generated.') # Configure start position if args.start_at is None: start_options = { 'bottom_left': Node(0, 0), 'bottom_right': Node(args.width - 1, 0), 'top_left': Node(0, args.height - 1), 'top_right': Node(args.width - 1, args.height - 1), } start_position = start_options[args.start] else: x, y = args.start_at[0], args.start_at[1] if not 0 <= x < args.width or not 0 <= y < args.height: parser.error('Start position values for X and Y must be within ' 'boundaries: 0 <= X < WIDTH and 0 <= Y < HEIGHT') start_position = Node(x, y) # Configure random seed if args.seed is None: seed = random.randrange(sys.maxsize)
def FieldDetail(req, pid, id, err=0): _option = "" for key in req.params.keys(): if key.startswith("option_"): _option += key[7] if err == 0 and id == "": # new field field = Metafield(u"") db.session.commit() elif id != "": # edit field field = q(Metadatatype).get(pid).children.filter_by(name=id, type=u'metafield').scalar() else: # error filling values _fieldvalue = "" if req.params.get('mtype', '') + "_value" in req.params.keys(): _fieldvalue = ustr(req.params[req.params.get('mtype', '') + "_value"]) if (req.params.get("mname") == ""): field = Metafield(req.params.get("orig_name")) else: field = Metafield(req.params.get("mname")) field.setLabel(req.params.get("mlabel")) field.setOrderPos(req.params.get("orderpos")) field.setFieldtype(req.params.get("mtype")) field.setOption(_option) field.setValues(_fieldvalue) field.setDescription(req.params.get("mdescription")) db.session.commit() attr = {} metadatatype = getMetaType(pid) for t in metadatatype.getDatatypes(): content_class = Node.get_class_for_typestring(t) node = content_class(name=u'') try: attr.update(node.getTechnAttributes()) except AttributeError: logg.exception("attribute error in FieldDetail, continue") continue metafields = {} for fields in getFieldsForMeta(pid): if fields.getType() != "union": metafields[fields.name] = fields v = getAdminStdVars(req) v["metadatatype"] = metadatatype v["metafield"] = field v["error"] = err v["fieldtypes"] = getMetaFieldTypeNames() v["dateoptions"] = dateoption v["datatypes"] = attr v["requiredoptions"] = requiredoption v["fieldoptions"] = fieldoption v["metafields"] = metafields v["filtertype"] = req.params.get("filtertype", "") v["actpage"] = req.params.get("actpage") v["icons"] = {"externer Link": "/img/extlink.png", "Email": "/img/email.png"} v["url_targets"] = {"selbes Fenster": "same", "neues Fenster": "_blank"} v["valuelist"] = ("", "", "", "") if field.getFieldtype() == "url": v["valuelist"] = field.getValueList() while len(v["valuelist"]) != 4: v["valuelist"].append("") else: v["valuelist"] = field.getValueList() v["field"] = None if field.id: v["field"] = field v["adminfields"] = [] v["csrf"] = req.csrf_token.current_token for t in getMetaFieldTypeNames(): f = getMetadataType(t) if 'attr_dict' in inspect.getargspec(f.getMaskEditorHTML).args: attr_dict = dict(field.attrs.items()) v["adminfields"] .append(f.getMaskEditorHTML(v["field"], metadatatype=metadatatype, language=lang(req), attr_dict=attr_dict)) else: v["adminfields"] .append(f.getMaskEditorHTML(v["field"], metadatatype=metadatatype, language=lang(req))) db.session.commit() return req.getTAL("web/admin/modules/metatype_field.html", v, macro="modify_field")