def getEditModuleHierarchy(typename): _menu = {} menus = {} try: nodeclass = Node.get_class_for_typestring(typename.lower()) except KeyError: return {} if typename == "root": return {} _items = {} menu_str = get_edit_menu_tabs(nodeclass) if menu_str != "": menus[nodeclass.name] = parseMenuString(menu_str) _menu = {} _menu[-1] = [] editModules = getEditModules() # modules installed in system for module in editModules: if module.startswith("menu"): active = -1 for m in menus[nodeclass.name]: if m.getName().endswith(module): active = menus[nodeclass.name].index(m) break if active not in _menu.keys(): _menu[active] = [] _menu[active].append(module) else: active = -1 for m in menus[nodeclass.name]: items = m.getItemList() for item in items: if item == module: active = menus[nodeclass.name].index(m) if active not in _items.keys(): _items[active] = [] _items[active].append((module, items.index(item))) break if active == -1: if active not in _items.keys(): _items[active] = [] _items[active].append((module, 0)) for key in _menu.keys(): if key in _items.keys(): items = _items[key] items.sort(lambda x, y: cmp(x[1], y[1])) for item in items: _menu[key].append(item[0]) return _menu
def getEditModuleHierarchy(typename): _menu = {} menus = {} try: nodeclass = Node.get_class_for_typestring(typename.lower()) except KeyError: return {} if typename == "root": return {} _items = {} menu_str = get_edit_menu_tabs(nodeclass) if menu_str != "": menus[nodeclass.name] = parseMenuString(menu_str) _menu = {} _menu[-1] = [] editModules = getEditModules() # modules installed in system for module in editModules: if module.startswith("menu"): active = -1 for m in menus[nodeclass.name]: if m.getName().endswith(module): active = menus[nodeclass.name].index(m) break if active not in _menu.keys(): _menu[active] = [] _menu[active].append(module) else: active = -1 for m in menus[nodeclass.name]: items = m.getItemList() for item in items: if item == module: active = menus[nodeclass.name].index(m) if active not in _items.keys(): _items[active] = [] _items[active].append((module, items.index(item))) break if active == -1: if active not in _items.keys(): _items[active] = [] _items[active].append((module, 0)) for key in _menu.keys(): if key in _items.keys(): items = _items[key] items.sort(lambda x, y: cmp(x[1], y[1])) for item in items: _menu[key].append(item[0]) return _menu
def getMaskEditorHTML(self, field, metadatatype=None, language=None): try: value = field.getValues().split("\r\n") except AttributeError: #value = u"" value = [] while len(value) < 2: value.append('') attr = {} if metadatatype: for t in metadatatype.getDatatypes(): content_class = Node.get_class_for_typestring(t) node = content_class(name=u'') try: attr.update(node.getTechnAttributes()) attr['IPTC'] = get_wanted_iptc_tags() except AttributeError: logg.exception("attribute error in getMaskEditorHTML, continue") continue return tal.getTAL("metadata/meta.html", {"value": value, "t_attrs": attr}, macro="maskeditor", language=language)
def xml_start_element(self, name, attrs): try: node = self.nodes[-1] except: node = None if name == "nodelist": if "exportversion" in attrs: logg.info("starting xml import: %s", attrs) elif name == "node": self.node_already_seen = False parent = node try: datatype = attrs["datatype"] except KeyError: # compatibility for old xml files created with mediatum t = attrs.get("type") if t is not None: datatype = t else: datatype = "directory" if "id" not in attrs: attrs["id"] = ustr(random.random()) old_id = attrs["id"] if old_id in self.id2node: node = self.id2node[old_id] self.node_already_seen = True return elif datatype in ["mapping"]: content_class = Node.get_class_for_typestring(datatype) node = content_class(name=(attrs["name"] + "_imported_" + old_id)) else: content_class = Node.get_class_for_typestring(datatype) node = content_class(name=attrs["name"]) # todo: handle access #if "read" in attrs: # node.setAccess("read", attrs["read"].encode("utf-8")) #if "write" in attrs: # node.setAccess("write", attrs["write"].encode("utf-8")) #if "data" in attrs: # node.setAccess("data", attrs["data"].encode("utf-8")) if self.verbose: logg.info( "created node '%s', '%s', '%s', old_id from attr='%s'", node.name, node.type, node.id, attrs["id"]) self.id2node[attrs["id"]] = node node.tmpchilds = [] self.nodes.append(node) if self.root is None: self.root = node return elif name == "attribute" and not self.node_already_seen: attr_name = attrs["name"] if "value" in attrs: if attr_name in ["valuelist"]: node.set( attr_name, attrs["value"].replace("\n\n", "\n").replace( "\n", ";").replace(";;", ";")) else: node.set(attr_name, attrs["value"]) else: self.attributename = attr_name elif name == "child" and not self.node_already_seen: nid = attrs["id"] node.tmpchilds += [nid] elif name == "file" and not self.node_already_seen: try: datatype = attrs["type"] except: datatype = None try: mimetype = attrs["mime-type"] except: mimetype = None filename = attrs["filename"] node.files.append( File(path=filename, filetype=datatype, mimetype=mimetype))
def getMetaEditor(self, item, req): """ editor mask for field definition """ attr = {} fields = [] pidnode = None if "pid" not in req.params.keys(): for p in item.getParents(): try: if p.getMasktype() == "export": pidnode = p break except: continue metadatatype = req.params.get("metadatatype") for t in metadatatype.getDatatypes(): content_class = Node.get_class_for_typestring(t) node = content_class(name=u'') attr.update(node.getTechnAttributes()) if req.params.get("op", "") == "new": pidnode = q(Node).get(req.params.get("pid")) if hasattr(pidnode, 'getMasktype') and pidnode.getMasktype() in ("vgroup", "hgroup"): # XXX: getAllChildren does not exist anymore, is this dead code? for field in pidnode.getAllChildren(): if field.getType().getName() == "maskitem" and field.id != pidnode.id: fields.append(field) else: for m in metadatatype.getMasks(): if ustr(m.id) == ustr(req.params.get("pid")): for field in m.getChildren(): fields.append(field) fields.sort(lambda x, y: cmp(x.getOrderPos(), y.getOrderPos())) add_values = [] val = u"" if item.getField(): val = item.getField().getValues() db.session.commit() for t in getMetaFieldTypeNames(): f = getMetadataType(t) add_values.append(f.getMaskEditorHTML(val, metadatatype=metadatatype, language=lang(req))) metafields = metadatatype.getMetaFields() metafields.sort(lambda x, y: cmp(x.getName().lower(), y.getName().lower())) metafieldtypes = getMetaFieldTypes().values() metafieldtypes.sort(lambda x, y: cmp(translate(x.getName(), request=req).lower(), translate(y.getName(), request=req).lower())) add_descriptions = [] for metafield in metafields: add_descriptions.append('<div style="display:none" id="div_%d" name="%s" description="%s"/>' % (metafield.id, metafield.name, metafield.getDescription())) v = {} v["op"] = req.params.get("op", "") v["pid"] = req.params.get("pid", "") v["item"] = item v["metafields"] = metafields v["fields"] = fields v["fieldtypes"] = metafieldtypes v["dateoption"] = dateoption v["t_attrs"] = attr v["icons"] = {"externer Link": "/img/extlink.png", "Email": "/img/email.png"} v["add_values"] = add_values v["add_descriptions"] = add_descriptions v["translate"] = translate v["language"] = lang(req) if pidnode and hasattr(pidnode, 'getMasktype') and pidnode.getMasktype() == "export": v["mappings"] = [] for m in pidnode.getExportMapping(): v["mappings"].append(q(Node).get(m)) return req.getTAL("schema/mask/field.html", v, macro="metaeditor_" + pidnode.getMasktype()) else: return req.getTAL("schema/mask/field.html", v, macro="metaeditor")
def show_workflow_step(self, req): typenames = self.get("newnodetype").split(";") wfnode = self.parents[0] redirect = "" message = "" # check existence of metadata types listed in the definition of the start node mdts = q(Metadatatypes).one() for schema in typenames: if not mdts.children.filter_by(name=schema.strip().split("/")[-1]).scalar(): return ('<i>%s: %s </i>') % (schema, t(lang(req), "permission_denied")) if "workflow_start" in req.params: switch_language(req, req.params.get('workflow_language')) content_class = Node.get_class_for_typestring(req.params.get('selected_schema').split('/')[0]) node = content_class(name=u'', schema=req.params.get('selected_schema').split('/')[1]) self.children.append(node) # create user group named '_workflow' if it doesn't exist workflow_group = q(UserGroup).filter_by(name=u'_workflow').scalar() if workflow_group is None: workflow_group = UserGroup(name=u'_workflow', description=u'internal dummy group for nodes in workflows') db.session.add(workflow_group) # create access rule with '_workflow' user group workflow_rule = get_or_add_access_rule(group_ids=[workflow_group.id]) special_access_ruleset = node.get_or_add_special_access_ruleset(ruletype=u'read') special_access_ruleset.rule_assocs.append(AccessRulesetToRule(rule=workflow_rule)) node.set("creator", "workflow-" + self.parents[0].name) node.set("creationtime", date.format_date()) node.set("system.wflanguage", req.params.get('workflow_language', req.Cookies.get('language'))) node.set("key", mkKey()) node.set("system.key", node.get("key")) # initial key identifier req.session["key"] = node.get("key") db.session.commit() return self.forwardAndShow(node, True, req) elif "workflow_start_auth" in req.params: # auth node by id and key try: node = q(Node).get(req.params.get('nodeid')) # startkey, but protected if node.get('system.key') == req.params.get('nodekey') and node.get('key') != req.params.get('nodekey'): message = "workflow_start_err_protected" elif node.get('key') == req.params.get('nodekey'): redirect = "/pnode?id=%s&key=%s" % (node.id, node.get('key')) else: message = "workflow_start_err_wrongkey" except: logg.exception("exception in workflow step start (workflow_start_auth)") message = "workflow_start_err_wrongkey" types = [] for a in typenames: if a: m = getMetaType(a) # we could now check m.isActive(), but for now let's # just take all specified metatypes, so that edit area # and workflow are independent on this types += [(m, a)] cookie_error = t(lang(req), "Your browser doesn't support cookies") js = """ <script language="javascript"> function cookie_test() { if (document.cookie=="") document.cookie = "CookieTest=Erfolgreich"; if (document.cookie=="") { alert("%s"); } } cookie_test(); </script>""" % cookie_error return req.getTAL("workflow/start.html", {'types': types, 'id': self.id, 'js': js, 'starttext': self.get('starttext'), 'languages': self.parents[0].getLanguages(), 'currentlang': lang(req), 'sidebartext': self.getSidebarText(lang(req)), 'redirect': redirect, 'message': message, 'allowcontinue': self.get('allowcontinue'), "csrf": req.csrf_token.current_token,}, macro="workflow_start")
def getContent(req, ids): user = users.getUserFromRequest(req) language = lang(req) def get_ids_from_query(): ids = get_ids_from_req(req) return ",".join(ids) if "action" in req.params: state = 'ok' if req.params.get('action') == "removefiles": basenode = q(Node).get(req.params.get('id')) for f in basenode.files: try: os.remove(f.abspath) pass except: state = "error" basenode.files = [] db.session.commit() req.write(json.dumps({'state': state}, ensure_ascii=False)) return None if req.params.get('action') == "buildnode": # create nodes basenode = q(Node).get(req.params.get('id')) newnodes = [] errornodes = [] basenodefiles_processed = [] if req.params.get('uploader', '') == 'plupload': filename2scheme = {} for k in req.params: if k.startswith("scheme_"): filename2scheme[k.replace('scheme_', '', 1)] = req.params.get(k) for f in basenode.files: filename = f.name if filename in filename2scheme: mimetype = getMimeType(filename) if mimetype[1] == "bibtex": # bibtex import handler try: new_node = importBibTeX(f.abspath, basenode) newnodes.append(new_node.id) basenodefiles_processed.append(f) except ValueError, e: errornodes.append((filename, unicode(e))) logg.debug("filename: %s, mimetype: %s", filename, mimetype) logg.debug("__name__=%s, func=%s; _m=%s, _m[1]=%s", __name__, funcname(), mimetype, mimetype[1]) content_class = Node.get_class_for_typestring( mimetype[1]) node = content_class(name=filename, schema=filename2scheme[filename]) basenode.children.append(node) node.set("creator", user.login_name) node.set( "creationtime", unicode( time.strftime('%Y-%m-%dT%H:%M:%S', time.localtime(time.time())))) # set filetype for uploaded file as requested by the content class f.filetype = content_class.get_upload_filetype() node.files.append(f) node.event_files_changed() newnodes.append(node.id) basenodefiles_processed.append(f) basenode.files.remove(f) db.session.commit() logg.info( "%s created new node id=%s (name=%s, type=%s) by uploading file %s, " "node is child of base node id=%s (name=%s, type=%s)", user.login_name, node.id, node.name, node.type, filename, basenode.id, basenode.name, basenode.type) else: for filename in req.params.get('files').split('|'): mimetype = getMimeType(filename) logg.debug("... in %s.%s: getMimeType(filename=%s)=%s", __name__, funcname(), filename, mimetype) if mimetype[1] == req.params.get('type') or req.params.get( 'type') == 'file': for f in basenode.files: # ambiguity here ? if f.abspath.endswith(filename): # bibtex import handler if mimetype[ 1] == "bibtex" and not req.params.get( 'type') == 'file': try: new_node = importBibTeX( f.abspath, basenode) newnodes.append(new_node.id) basenodefiles_processed.append(f) except ValueError, e: errornodes.append( (filename, unicode(e))) db.session.commit() else: logg.debug( "creating new node: filename: %s", filename) logg.debug("files at basenode: %s", [(x.getName(), x.abspath) for x in basenode.files]) content_class = Node.get_class_for_typestring( req.params.get('type')) node = content_class( name=filename, schema=req.params.get('value')) basenode.children.append(node) node.set("creator", user.login_name) node.set( "creationtime", unicode( time.strftime( '%Y-%m-%dT%H:%M:%S', time.localtime(time.time())))) # clones to a file with random name cloned_file = importFileRandom(f.abspath) # set filetype for uploaded file as requested by the content class cloned_file.filetype = content_class.get_upload_filetype( ) node.files.append(cloned_file) node.event_files_changed() newnodes.append(node.id) basenodefiles_processed.append(f) logg.info( "%s created new node id=%s (name=%s, type=%s) by uploading file %s, " "node is child of base node id=%s (name=%s, type=%s)", user.login_name, node.id, node.name, node.type, filename, basenode.id, basenode.name, basenode.type) break # filename may not be unique new_tree_labels = [{ 'id': basenode.id, 'label': getTreeLabel(basenode, lang=language) }] for f in basenodefiles_processed: basenode.files.remove(f) f_path = f.abspath if os.path.exists(f_path): logg.debug("%s going to remove file %s from disk", user.login_name, f_path) os.remove(f_path) mime = getMimeType(filename) scheme_type = {mime[1]: []} for scheme in get_permitted_schemas(): if mime[1] in scheme.getDatatypes(): scheme_type[mime[1]].append(scheme) # break db.session.commit() # standard file content = req.getTAL('web/edit/modules/upload.html', { 'files': [filename], 'schemes': scheme_type }, macro="uploadfileok") res = { 'state': state, 'newnodes': newnodes, 'errornodes': errornodes, 'new_tree_labels': new_tree_labels, 'ret': content } res = json.dumps(res, ensure_ascii=False) req.write(res) return None
def FieldDetail(req, pid, id, err=0): _option = "" for key in req.params.keys(): if key.startswith("option_"): _option += key[7] if err == 0 and id == "": # new field field = Metafield(u"") db.session.commit() elif id != "": # edit field field = q(Metadatatype).get(pid).children.filter_by(name=id, type=u'metafield').scalar() else: # error filling values _fieldvalue = "" if req.params.get('mtype', '') + "_value" in req.params.keys(): _fieldvalue = ustr(req.params[req.params.get('mtype', '') + "_value"]) if (req.params.get("mname") == ""): field = Metafield(req.params.get("orig_name")) else: field = Metafield(req.params.get("mname")) field.setLabel(req.params.get("mlabel")) field.setOrderPos(req.params.get("orderpos")) field.setFieldtype(req.params.get("mtype")) field.setOption(_option) field.setValues(_fieldvalue) field.setDescription(req.params.get("mdescription")) db.session.commit() attr = {} metadatatype = getMetaType(pid) for t in metadatatype.getDatatypes(): content_class = Node.get_class_for_typestring(t) node = content_class(name=u'') try: attr.update(node.getTechnAttributes()) except AttributeError: logg.exception("attribute error in FieldDetail, continue") continue metafields = {} for fields in getFieldsForMeta(pid): if fields.getType() != "union": metafields[fields.name] = fields v = getAdminStdVars(req) v["metadatatype"] = metadatatype v["metafield"] = field v["error"] = err v["fieldtypes"] = getMetaFieldTypeNames() v["dateoptions"] = dateoption v["datatypes"] = attr v["requiredoptions"] = requiredoption v["fieldoptions"] = fieldoption v["metafields"] = metafields v["filtertype"] = req.params.get("filtertype", "") v["actpage"] = req.params.get("actpage") v["icons"] = {"externer Link": "/img/extlink.png", "Email": "/img/email.png"} v["url_targets"] = {"selbes Fenster": "same", "neues Fenster": "_blank"} v["valuelist"] = ("", "", "", "") if field.getFieldtype() == "url": v["valuelist"] = field.getValueList() while len(v["valuelist"]) != 4: v["valuelist"].append("") else: v["valuelist"] = field.getValueList() v["field"] = None if field.id: v["field"] = field v["adminfields"] = [] v["csrf"] = req.csrf_token.current_token for t in getMetaFieldTypeNames(): f = getMetadataType(t) if 'attr_dict' in inspect.getargspec(f.getMaskEditorHTML).args: attr_dict = dict(field.attrs.items()) v["adminfields"] .append(f.getMaskEditorHTML(v["field"], metadatatype=metadatatype, language=lang(req), attr_dict=attr_dict)) else: v["adminfields"] .append(f.getMaskEditorHTML(v["field"], metadatatype=metadatatype, language=lang(req))) db.session.commit() return req.getTAL("web/admin/modules/metatype_field.html", v, macro="modify_field")
def FieldDetail(req, pid, id, err=0): _option = "" for key in req.params.keys(): if key.startswith("option_"): _option += key[7] if err == 0 and id == "": # new field field = Metafield(u"") db.session.commit() elif id != "": # edit field field = q(Metadatatype).get(pid).children.filter_by(name=id, type=u'metafield').scalar() else: # error filling values _fieldvalue = "" if req.params.get('mtype', '') + "_value" in req.params.keys(): _fieldvalue = ustr(req.params[req.params.get('mtype', '') + "_value"]) if (req.params.get("mname") == ""): field = Metafield(req.params.get("orig_name")) else: field = Metafield(req.params.get("mname")) field.setLabel(req.params.get("mlabel")) field.setOrderPos(req.params.get("orderpos")) field.setFieldtype(req.params.get("mtype")) field.setOption(_option) field.setValues(_fieldvalue) field.setDescription(req.params.get("mdescription")) db.session.commit() attr = {} metadatatype = getMetaType(pid) for t in metadatatype.getDatatypes(): content_class = Node.get_class_for_typestring(t) node = content_class(name=u'') try: attr.update(node.getTechnAttributes()) except AttributeError: logg.exception("attribute error in FieldDetail, continue") continue metafields = {} for fields in getFieldsForMeta(pid): if fields.getType() != "union": metafields[fields.name] = fields v = getAdminStdVars(req) v["metadatatype"] = metadatatype v["metafield"] = field v["error"] = err v["fieldtypes"] = getMetaFieldTypeNames() v["dateoptions"] = dateoption v["datatypes"] = attr v["requiredoptions"] = requiredoption v["fieldoptions"] = fieldoption v["metafields"] = metafields v["filtertype"] = req.params.get("filtertype", "") v["actpage"] = req.params.get("actpage") v["icons"] = {"externer Link": "/img/extlink.png", "Email": "/img/email.png"} v["url_targets"] = {"selbes Fenster": "same", "neues Fenster": "_blank"} v["valuelist"] = ("", "", "", "") if field.getFieldtype() == "url": v["valuelist"] = field.getValueList() while len(v["valuelist"]) != 4: v["valuelist"].append("") else: v["valuelist"] = field.getValueList() v["field"] = None if field.id: v["field"] = field v["adminfields"] = [] for t in getMetaFieldTypeNames(): f = getMetadataType(t) if 'attr_dict' in inspect.getargspec(f.getMaskEditorHTML).args: attr_dict = dict(field.attrs.items()) v["adminfields"] .append(f.getMaskEditorHTML(v["field"], metadatatype=metadatatype, language=lang(req), attr_dict=attr_dict)) else: v["adminfields"] .append(f.getMaskEditorHTML(v["field"], metadatatype=metadatatype, language=lang(req))) db.session.commit() return req.getTAL("web/admin/modules/metatype_field.html", v, macro="modify_field")
def editModuleActions(req): root = q(Root).one() datatype = req.params.get("datatype", "").lower() try: nodeclass = Node.get_class_for_typestring(datatype) except KeyError: logg.error("type %s not found", datatype) return for key in req.params.keys(): if key == "editmodules_default": root.system_attrs[ "edit.menu." + datatype] = nodeclass.get_default_edit_menu_tabs() break elif key.startswith("del|"): ret = "" m = key.split("|")[-1][:-2] items = getEditModuleHierarchy(datatype) for k in items: if k >= 0 and not (m.startswith("menu") and items[k][0] == m): i = [item for item in items[k] if item != m] if len(i) > 1: ret += i[0] + "(" + ";".join(i[1:]) + ");" else: ret += i[0] + "();" root.system_attrs["edit.menu." + datatype] = ret[:-1] break elif key.startswith("show|"): # add menu item = key.split("|")[-1][:-2] menu_str = get_edit_menu_tabs(nodeclass) + ";" + item + "()" root.system_attrs["edit.menu." + datatype] = menu_str break elif key.startswith("move|") and req.params.get(key) != "": # move item to menu dest = req.params.get(key) dest_id = -1 mod = key.split("|")[-1] items = getEditModuleHierarchy(datatype) for k in items: if dest in items[k]: dest_id = k if mod in items[k]: items[k].remove(mod) items[dest_id].append(mod) ret = "" for k in items: if len(items[k]) == 0 or k < 0: pass elif items[k][0].startswith("menu"): ret += items[k][0] + "(" + ";".join(items[k][1:]) + ");" root.system_attrs["edit.menu." + datatype] = ret[:-1] break elif key.startswith("up|"): # move module or module item up m = key[:-2].split("|")[-1] items = getEditModuleHierarchy(datatype) for k in items: if m in items[k] and items[k].index(m) == 0: # menu src = items[k] items[k] = items[k - 1] items[k - 1] = src break elif m in items[k] and items[k].index > 0: # menu item src_id = items[k].index(m) items[k][src_id] = items[k][src_id - 1] items[k][src_id - 1] = m break ret = "" for k in items: if len(items[k]) == 0 or k < 0: pass elif items[k][0].startswith("menu"): ret += items[k][0] + "(" + ";".join(items[k][1:]) + ");" root.system_attrs["edit.menu." + datatype] = ret[:-1] break elif key.startswith("down|"): # move module or module item down m = key[:-2].split("|")[-1] items = getEditModuleHierarchy(datatype) for k in items: if m in items[k] and items[k].index(m) == 0: # menu src = items[k] items[k] = items[k + 1] items[k + 1] = src break elif m in items[k] and items[k].index > 0: # menu item src_id = items[k].index(m) items[k][src_id] = items[k][src_id + 1] items[k][src_id + 1] = m break ret = "" for k in items: if len(items[k]) == 0 or k < 0: pass elif items[k][0].startswith("menu"): ret += items[k][0] + "(" + ";".join(items[k][1:]) + ");" root.system_attrs["edit.menu." + datatype] = ret[:-1] break db.session.commit()
def test_data_get_class_for_typestring(): from contenttypes import Data data_cls = Node.get_class_for_typestring("data") assert data_cls is Data
def show_workflow_step(self, req): typenames = self.get("newnodetype").split(";") wfnode = self.parents[0] redirect = "" message = "" # check existence of metadata types listed in the definition of the start node mdts = q(Metadatatypes).one() for schema in typenames: if not mdts.children.filter_by( name=schema.strip().split("/")[-1]).scalar(): return ('<i>%s: %s </i>') % (schema, t(lang(req), "permission_denied")) if "workflow_start" in req.params: switch_language(req, req.params.get('workflow_language')) content_class = Node.get_class_for_typestring( req.params.get('selected_schema').split('/')[0]) node = content_class( name=u'', schema=req.params.get('selected_schema').split('/')[1]) self.children.append(node) # create user group named '_workflow' if it doesn't exist workflow_group = q(UserGroup).filter_by(name=u'_workflow').scalar() if workflow_group is None: workflow_group = UserGroup( name=u'_workflow', description=u'internal dummy group for nodes in workflows') db.session.add(workflow_group) # create access rule with '_workflow' user group workflow_rule = get_or_add_access_rule( group_ids=[workflow_group.id]) special_access_ruleset = node.get_or_add_special_access_ruleset( ruletype=u'read') special_access_ruleset.rule_assocs.append( AccessRulesetToRule(rule=workflow_rule)) node.set("creator", "workflow-" + self.parents[0].name) node.set("creationtime", date.format_date()) node.set( "system.wflanguage", req.params.get('workflow_language', req.Cookies.get('language'))) node.set("key", mkKey()) node.set("system.key", node.get("key")) # initial key identifier req.session["key"] = node.get("key") db.session.commit() return self.forwardAndShow(node, True, req) elif "workflow_start_auth" in req.params: # auth node by id and key try: node = q(Node).get(req.params.get('nodeid')) # startkey, but protected if node.get('system.key') == req.params.get( 'nodekey' ) and node.get('key') != req.params.get('nodekey'): message = "workflow_start_err_protected" elif node.get('key') == req.params.get('nodekey'): redirect = "/pnode?id=%s&key=%s" % (node.id, node.get('key')) else: message = "workflow_start_err_wrongkey" except: logg.exception( "exception in workflow step start (workflow_start_auth)") message = "workflow_start_err_wrongkey" types = [] for a in typenames: if a: m = getMetaType(a) # we could now check m.isActive(), but for now let's # just take all specified metatypes, so that edit area # and workflow are independent on this types += [(m, a)] cookie_error = t(lang(req), "Your browser doesn't support cookies") js = """ <script language="javascript"> function cookie_test() { if (document.cookie=="") document.cookie = "CookieTest=Erfolgreich"; if (document.cookie=="") { alert("%s"); } } cookie_test(); </script>""" % cookie_error return req.getTAL("workflow/start.html", { 'types': types, 'id': self.id, 'js': js, 'starttext': self.get('starttext'), 'languages': self.parents[0].getLanguages(), 'currentlang': lang(req), 'sidebartext': self.getSidebarText(lang(req)), 'redirect': redirect, 'message': message, 'allowcontinue': self.get('allowcontinue'), "csrf": req.csrf_token.current_token, }, macro="workflow_start")
def getContent(req, ids): user = users.getUserFromRequest(req) language = lang(req) def get_ids_from_query(): ids = get_ids_from_req(req) return ",".join(ids) if "action" in req.params: state = 'ok' if req.params.get('action') == "removefiles": basenode = q(Node).get(req.params.get('id')) for f in basenode.files: try: os.remove(f.abspath) pass except: state = "error" basenode.files = [] db.session.commit() req.write(json.dumps({'state': state}, ensure_ascii=False)) return None if req.params.get('action') == "buildnode": # create nodes basenode = q(Node).get(req.params.get('id')) newnodes = [] errornodes = [] basenodefiles_processed = [] if req.params.get('uploader', '') == 'plupload': filename2scheme = {} for k in req.params: if k.startswith("scheme_"): filename2scheme[ k.replace('scheme_', '', 1)] = req.params.get(k) for f in basenode.files: filename = f.name if filename in filename2scheme: mimetype = getMimeType(filename) if mimetype[1] == "bibtex": # bibtex import handler try: new_node = importBibTeX(f.abspath, basenode, req=req) newnodes.append(new_node.id) basenodefiles_processed.append(f) except ValueError, e: errornodes.append((filename, unicode(e))) logg.debug("filename: %s, mimetype: %s", filename, mimetype) logg.debug("__name__=%s, func=%s; _m=%s, _m[1]=%s", __name__, funcname(), mimetype, mimetype[1]) content_class = Node.get_class_for_typestring(mimetype[1]) node = content_class(name=filename, schema=filename2scheme[filename]) basenode.children.append(node) node.set("creator", user.login_name) node.set("creationtime", unicode(time.strftime('%Y-%m-%dT%H:%M:%S', time.localtime(time.time())))) # set filetype for uploaded file as requested by the content class f.filetype = content_class.get_upload_filetype() node.files.append(f) node.event_files_changed() newnodes.append(node.id) basenodefiles_processed.append(f) basenode.files.remove(f) db.session.commit() logg.info("%s created new node id=%s (name=%s, type=%s) by uploading file %s, " "node is child of base node id=%s (name=%s, type=%s)", user.login_name, node.id, node.name, node.type, filename, basenode.id, basenode.name, basenode.type) else: for filename in req.params.get('files').split('|'): mimetype = getMimeType(filename) logg.debug("... in %s.%s: getMimeType(filename=%s)=%s", __name__, funcname(), filename, mimetype) if mimetype[1] == req.params.get('type') or req.params.get('type') == 'file': for f in basenode.files: # ambiguity here ? if f.abspath.endswith(filename): # bibtex import handler if mimetype[1] == "bibtex" and not req.params.get('type') == 'file': try: new_node = importBibTeX(f.abspath, basenode, req=req) newnodes.append(new_node.id) basenodefiles_processed.append(f) except ValueError, e: errornodes.append((filename, unicode(e))) db.session.commit() else: logg.debug("creating new node: filename: %s", filename) logg.debug("files at basenode: %s", [(x.getName(), x.abspath) for x in basenode.files]) content_class = Node.get_class_for_typestring(req.params.get('type')) node = content_class(name=filename, schema=req.params.get('value')) basenode.children.append(node) node.set("creator", user.login_name) node.set("creationtime", unicode(time.strftime('%Y-%m-%dT%H:%M:%S', time.localtime(time.time())))) # clones to a file with random name cloned_file = importFileRandom(f.abspath) # set filetype for uploaded file as requested by the content class cloned_file.filetype = content_class.get_upload_filetype() node.files.append(cloned_file) node.event_files_changed() newnodes.append(node.id) basenodefiles_processed.append(f) logg.info("%s created new node id=%s (name=%s, type=%s) by uploading file %s, " "node is child of base node id=%s (name=%s, type=%s)", user.login_name, node.id, node.name, node.type, filename, basenode.id, basenode.name, basenode.type) break # filename may not be unique new_tree_labels = [{'id': basenode.id, 'label': getTreeLabel(basenode, lang=language)}] for f in basenodefiles_processed: basenode.files.remove(f) f_path = f.abspath if os.path.exists(f_path): logg.debug("%s going to remove file %s from disk", user.login_name, f_path) os.remove(f_path) mime = getMimeType(filename) scheme_type = {mime[1]: []} for scheme in get_permitted_schemas(): if mime[1] in scheme.getDatatypes(): scheme_type[mime[1]].append(scheme) # break db.session.commit() # standard file content = req.getTAL('web/edit/modules/upload.html', {'files': [filename], 'schemes': scheme_type}, macro="uploadfileok") res = {'state': state, 'newnodes': newnodes, 'errornodes': errornodes, 'new_tree_labels': new_tree_labels, 'ret': content} res = json.dumps(res, ensure_ascii=False) req.write(res) return None
def test_data_get_class_for_typestring(): from contenttypes import Data data_cls = Node.get_class_for_typestring("data") assert data_cls is Data
def xml_start_element(self, name, attrs): try: node = self.nodes[-1] except: node = None if name == "nodelist": if "exportversion" in attrs: logg.info("starting xml import: %s", attrs) elif name == "node": self.node_already_seen = False parent = node try: datatype = attrs["datatype"] except KeyError: # compatibility for old xml files created with mediatum t = attrs.get("type") if t is not None: datatype = t else: datatype = "directory" if "id" not in attrs: attrs["id"] = ustr(random.random()) old_id = attrs["id"] if old_id in self.id2node: node = self.id2node[old_id] self.node_already_seen = True return elif datatype in ["mapping"]: content_class = Node.get_class_for_typestring(datatype) node = content_class(name=(attrs["name"] + "_imported_" + old_id)) else: content_class = Node.get_class_for_typestring(datatype) node = content_class(name=attrs["name"]) # todo: handle access #if "read" in attrs: # node.setAccess("read", attrs["read"].encode("utf-8")) #if "write" in attrs: # node.setAccess("write", attrs["write"].encode("utf-8")) #if "data" in attrs: # node.setAccess("data", attrs["data"].encode("utf-8")) if self.verbose: logg.info("created node '%s', '%s', '%s', old_id from attr='%s'", node.name, node.type, node.id, attrs["id"]) self.id2node[attrs["id"]] = node node.tmpchilds = [] self.nodes.append(node) if self.root is None: self.root = node return elif name == "attribute" and not self.node_already_seen: attr_name = attrs["name"] if "value" in attrs: if attr_name in ["valuelist"]: node.set(attr_name, attrs["value"].replace("\n\n", "\n").replace("\n", ";").replace(";;", ";")) else: node.set(attr_name, attrs["value"]) else: self.attributename = attr_name elif name == "child" and not self.node_already_seen: nid = attrs["id"] node.tmpchilds += [nid] elif name == "file" and not self.node_already_seen: try: datatype = attrs["type"] except: datatype = None try: mimetype = attrs["mime-type"] except: mimetype = None filename = attrs["filename"] node.files.append(File(path=filename, filetype=datatype, mimetype=mimetype))
def getMetaEditor(self, item, req): """ editor mask for field definition """ attr = {} fields = [] pidnode = None if "pid" not in req.params.keys(): for p in item.getParents(): try: if p.getMasktype() == "export": pidnode = p break except: continue metadatatype = req.params.get("metadatatype") for t in metadatatype.getDatatypes(): content_class = Node.get_class_for_typestring(t) node = content_class(name=u'') attr.update(node.getTechnAttributes()) if req.params.get("op", "") == "new": pidnode = q(Node).get(req.params.get("pid")) if hasattr(pidnode, 'getMasktype') and pidnode.getMasktype() in ("vgroup", "hgroup"): # XXX: getAllChildren does not exist anymore, is this dead code? for field in pidnode.getAllChildren(): if field.getType().getName( ) == "maskitem" and field.id != pidnode.id: fields.append(field) else: for m in metadatatype.getMasks(): if ustr(m.id) == ustr(req.params.get("pid")): for field in m.getChildren(): fields.append(field) fields.sort(lambda x, y: cmp(x.getOrderPos(), y.getOrderPos())) add_values = [] val = u"" if item.getField(): val = item.getField().getValues() db.session.commit() for t in getMetaFieldTypeNames(): f = getMetadataType(t) add_values.append( f.getMaskEditorHTML(val, metadatatype=metadatatype, language=lang(req))) metafields = metadatatype.getMetaFields() metafields.sort(lambda x, y: cmp(x.getName().lower(), y.getName().lower())) metafieldtypes = getMetaFieldTypes().values() metafieldtypes.sort(lambda x, y: cmp( translate(x.getName(), request=req).lower(), translate(y.getName(), request=req).lower())) add_descriptions = [] for metafield in metafields: add_descriptions.append( '<div style="display:none" id="div_%d" name="%s" description="%s"/>' % (metafield.id, metafield.name, metafield.getDescription())) v = {} v["op"] = req.params.get("op", "") v["pid"] = req.params.get("pid", "") v["item"] = item v["metafields"] = metafields v["fields"] = fields v["fieldtypes"] = metafieldtypes v["dateoption"] = dateoption v["t_attrs"] = attr v["icons"] = { "externer Link": "/img/extlink.png", "Email": "/img/email.png" } v["add_values"] = add_values v["add_descriptions"] = add_descriptions v["translate"] = translate v["language"] = lang(req) if pidnode and hasattr( pidnode, 'getMasktype') and pidnode.getMasktype() == "export": v["mappings"] = [] for m in pidnode.getExportMapping(): v["mappings"].append(q(Node).get(m)) return req.getTAL("schema/mask/field.html", v, macro="metaeditor_" + pidnode.getMasktype()) else: return req.getTAL("schema/mask/field.html", v, macro="metaeditor")
def editModuleActions(req): root = q(Root).one() datatype = req.params.get("datatype", "").lower() try: nodeclass = Node.get_class_for_typestring(datatype) except KeyError: logg.error("type %s not found", datatype) return for key in req.params.keys(): if key == "editmodules_default": root.system_attrs["edit.menu." + datatype] = nodeclass.get_default_edit_menu_tabs() break elif key.startswith("del|"): ret = "" m = key.split("|")[-1][:-2] items = getEditModuleHierarchy(datatype) for k in items: if k >= 0 and not (m.startswith("menu") and items[k][0] == m): i = [item for item in items[k] if item != m] if len(i) > 1: ret += i[0] + "(" + ";".join(i[1:]) + ");" else: ret += i[0] + "();" root.system_attrs["edit.menu." + datatype] = ret[:-1] break elif key.startswith("show|"): # add menu item = key.split("|")[-1][:-2] menu_str = get_edit_menu_tabs(nodeclass) + ";" + item + "()" root.system_attrs["edit.menu." + datatype] = menu_str break elif key.startswith("move|") and req.params.get(key) != "": # move item to menu dest = req.params.get(key) dest_id = -1 mod = key.split("|")[-1] items = getEditModuleHierarchy(datatype) for k in items: if dest in items[k]: dest_id = k if mod in items[k]: items[k].remove(mod) items[dest_id].append(mod) ret = "" for k in items: if len(items[k]) == 0 or k < 0: pass elif items[k][0].startswith("menu"): ret += items[k][0] + "(" + ";".join(items[k][1:]) + ");" root.system_attrs["edit.menu." + datatype] = ret[:-1] break elif key.startswith("up|"): # move module or module item up m = key[:-2].split("|")[-1] items = getEditModuleHierarchy(datatype) for k in items: if m in items[k] and items[k].index(m) == 0: # menu src = items[k] items[k] = items[k - 1] items[k - 1] = src break elif m in items[k] and items[k].index > 0: # menu item src_id = items[k].index(m) items[k][src_id] = items[k][src_id - 1] items[k][src_id - 1] = m break ret = "" for k in items: if len(items[k]) == 0 or k < 0: pass elif items[k][0].startswith("menu"): ret += items[k][0] + "(" + ";".join(items[k][1:]) + ");" root.system_attrs["edit.menu." + datatype] = ret[:-1] break elif key.startswith("down|"): # move module or module item down m = key[:-2].split("|")[-1] items = getEditModuleHierarchy(datatype) for k in items: if m in items[k] and items[k].index(m) == 0: # menu src = items[k] items[k] = items[k + 1] items[k + 1] = src break elif m in items[k] and items[k].index > 0: # menu item src_id = items[k].index(m) items[k][src_id] = items[k][src_id + 1] items[k][src_id + 1] = m break ret = "" for k in items: if len(items[k]) == 0 or k < 0: pass elif items[k][0].startswith("menu"): ret += items[k][0] + "(" + ";".join(items[k][1:]) + ");" root.system_attrs["edit.menu." + datatype] = ret[:-1] break db.session.commit()
def action(req): global editModules language = lang(req) user = current_user trashdir = user.trash_dir uploaddir = user.upload_dir trashdir_parents = trashdir.parents action = req.params.get("action", "") changednodes = {} if not user.is_editor: req.write("""permission denied""") req.setStatus(httpstatus.HTTP_FORBIDDEN) return if "tab" in req.params: tab = req.params.get("tab").split("_")[-1] return editModules[tab].getContent(req, [req.params.get("id")]) if action == "getlabels": nids = req.params.get('ids', []) nids = [nid.strip() for nid in nids.split(',') if nid.strip()] for nid in set(nids + [_n.id for _n in [trashdir, uploaddir]]): try: changednodes[nid] = getTreeLabel(q(Node).get(nid), language) except: logg.exception("exception ignored: could not make fancytree label for node %s", nid) res_dict = {'changednodes': changednodes} req.write(json.dumps(res_dict, indent=4, ensure_ascii=False)) return else: # all 'action's except 'getlabels' require a base dir (src) # but expanding of a subdir in the edit-tree via fancytree has # not a srcid, so no action is necessary srcid = req.params.get("src") if not srcid: return try: src = q(Node).get(srcid) except: req.writeTAL( "web/edit/edit.html", {"edit_action_error": srcid}, macro="edit_action_error") return if req.params.get('action') == 'addcontainer': node = q(Node).get(srcid) if not node.has_write_access(): # deliver errorlabel req.writeTALstr( '<tal:block i18n:translate="edit_nopermission"/>', {}) return # create new container newnode_type = req.params.get('type') if newnode_type in ['bare_collection', 'bare_directory']: newnode_type = newnode_type.replace('bare_', '') translated_label = t(lang(req), 'edit_add_' + newnode_type) if translated_label.startswith('edit_add_'): translated_label = t( lang(req), 'edit_add_container_default') + newnode_type content_class = Node.get_class_for_typestring(newnode_type) newnode = content_class(name=translated_label) node.children.append(newnode) newnode.set("creator", user.login_name) newnode.set("creationtime", unicode( time.strftime('%Y-%m-%dT%H:%M:%S', time.localtime(time.time())))) newnode.set("nodename", translated_label) # set attribute named "nodename" to label text # place newnode at top of the children by setting the orderpos to the lowest orderpos - 1 # if the orderpos gets negative, shift the oderpos of all children by incrementing with a positive number # make this number large enough, to avoid the next shifting of orderpos if more containers are added if len(node.children) == 1: # newnode is the only one child newnode.orderpos = 1000 else: newnode.orderpos = node.children[0].orderpos newnode.orderpos = min([c.orderpos for c in node.children]) - 1 while newnode.orderpos < 0: # in order to avoid negative orderpos, add a positive number to the orderpos of all children # make this number large enough, so there is no shift of orderpos is necessary if the next # container is added to the children for c in node.children: c.orderpos += 1000 db.session.commit() req.params["dest"] = newnode.id label = getTreeLabel(newnode, lang=language) fancytree_nodedata = { 'title': label, 'key': newnode.id, 'isLazy': False, 'isFolder': True, 'icon': getEditorIconPath(newnode), 'readonly': 0, 'tooltip': '%s (%s)' % (label, newnode.id), 'children': [], } req.write(json.dumps(fancytree_nodedata, ensure_ascii=False)) logg.info("%s adding new container %s (%s) to %s (%s, %s)", user.login_name, newnode.id, newnode.type, node.id, node.name, node.type) return try: destid = req.params.get("dest", None) dest = q(Node).get(destid) folderid = destid except: destid = None dest = None folderid = srcid idlist = getIDs(req) mysrc = None errorobj = None # try: if action == "clear_trash": for n in trashdir.children: # if trashdir is it's sole parent, remove file from disk # attn: this will not touch files from children of deleted # containers if len(n.parents) == 1: logg.info("%s going to remove files from disk for node %s (%s, %s)", user.login_name, n.id, n.name, n.type) for f in n.files: # dangerous ??? check this f_path = f.abspath if os.path.exists(f_path): logg.info("%s going to remove file %r from disk", user.login_name, f_path) os.remove(f_path) trashdir.children.remove(n) db.session.commit() dest = trashdir changednodes[trashdir.id] = 1 _parent_descr = [(p.name, p.id, p.type) for p in trashdir_parents] logg.info("%s cleared trash folder with id %s, child of %s", user.login_name, trashdir.id, _parent_descr) # return else: for id in idlist: obj = q(Node).get(id) mysrc = src if isDirectory(obj) or isCollection(obj): mysrc = obj.parents[0] if action == "delete": if mysrc.has_write_access() and obj.has_write_access(): if mysrc.id != trashdir.id: mysrc.children.remove(obj) changednodes[mysrc.id] = 1 trashdir.children.append(obj) db.session.commit() changednodes[trashdir.id] = 1 logg.info("%s moved to trash bin %s (%s, %s) from %s (%s, %s)", user.login_name, obj.id, obj.name, obj.type, mysrc.id, mysrc.name, mysrc.type) dest = mysrc else: logg.info("%s has no write access for node %s", user.login_name, mysrc.id) req.writeTALstr( '<tal:block i18n:translate="edit_nopermission"/>', {}) dest = mysrc elif action in ["move", "copy"]: if (dest != mysrc) and \ mysrc.has_write_access() and \ dest.has_write_access() and \ obj.has_write_access() and \ isinstance(dest, Container): if not dest.is_descendant_of(obj): if action == "move": mysrc.children.remove(obj) changednodes[mysrc.id] = 1 # getLabel(mysrc) dest.children.append(obj) changednodes[dest.id] = 1 # getLabel(dest) db.session.commit() if logg.isEnabledFor(logging.INFO): _what = "%s %s %r (%s, %s) " % ( user.login_name, action, obj.id, obj.name, obj.type) _from = "from %s (%s, %s) " % ( mysrc.id, mysrc.name, mysrc.type) _to = "to %s (%s, %s)" % ( dest.id, dest.name, dest.type) logg.info(_what + _from + _to) else: logg.error("%s could not %s %s from %s to %s", user.login_name, action, obj.id, mysrc.id, dest.id) else: return mysrc = None if not mysrc: mysrc = src if action in ["move", "copy", "delete", "clear_trash"]: for nid in changednodes: try: changednodes[nid] = getTreeLabel( q(Node).get(nid), lang=language) except: logg.exception("exception ignored: could not make fancytree label for node %s", nid) res_dict = {'changednodes': changednodes} req.write(json.dumps(res_dict, indent=4, ensure_ascii=False)) else: try: if dest is not None: req.write(dest.id) else: req.write('no-node-id-specified (web.edit.edit.action)') except: req.write('no-node-id-specified (web.edit.edit.action)') logg.exception('exception ignored, no-node-id-specified (web.edit.edit.action)') return
def action(req): global editModules language = lang(req) user = current_user trashdir = user.trash_dir uploaddir = user.upload_dir trashdir_parents = trashdir.parents action = req.params.get("action", "") changednodes = {} if not user.is_editor: req.write("""permission denied""") req.setStatus(httpstatus.HTTP_FORBIDDEN) return if "tab" in req.params: tab = req.params.get("tab").split("_")[-1] return editModules[tab].getContent(req, [req.params.get("id")]) if action == "getlabels": nids = req.params.get('ids', []) nids = [nid.strip() for nid in nids.split(',') if nid.strip()] for nid in set(nids + [_n.id for _n in [trashdir, uploaddir]]): try: changednodes[nid] = getTreeLabel(q(Node).get(nid), language) except: logg.exception( "exception ignored: could not make fancytree label for node %s", nid) res_dict = {'changednodes': changednodes} req.write(json.dumps(res_dict, indent=4, ensure_ascii=False)) return else: # all 'action's except 'getlabels' require a base dir (src) # but expanding of a subdir in the edit-tree via fancytree has # not a srcid, so no action is necessary srcid = req.params.get("src") if not srcid: return try: src = q(Node).get(srcid) except: req.writeTAL("web/edit/edit.html", {"edit_action_error": srcid}, macro="edit_action_error") return if req.params.get('action') == 'addcontainer': node = q(Node).get(srcid) if not node.has_write_access(): # deliver errorlabel req.writeTALstr('<tal:block i18n:translate="edit_nopermission"/>', {}) return # create new container newnode_type = req.params.get('type') if newnode_type in ['bare_collection', 'bare_directory']: newnode_type = newnode_type.replace('bare_', '') translated_label = t(lang(req), 'edit_add_' + newnode_type) if translated_label.startswith('edit_add_'): translated_label = t(lang(req), 'edit_add_container_default') + newnode_type content_class = Node.get_class_for_typestring(newnode_type) newnode = content_class(name=translated_label) node.children.append(newnode) newnode.set("creator", user.login_name) newnode.set( "creationtime", unicode( time.strftime('%Y-%m-%dT%H:%M:%S', time.localtime(time.time())))) newnode.set( "nodename", translated_label) # set attribute named "nodename" to label text # place newnode at top of the children by setting the orderpos to the lowest orderpos - 1 # if the orderpos gets negative, shift the oderpos of all children by incrementing with a positive number # make this number large enough, to avoid the next shifting of orderpos if more containers are added if len(node.children) == 1: # newnode is the only one child newnode.orderpos = 1000 else: newnode.orderpos = node.children[0].orderpos newnode.orderpos = min([c.orderpos for c in node.children]) - 1 while newnode.orderpos < 0: # in order to avoid negative orderpos, add a positive number to the orderpos of all children # make this number large enough, so there is no shift of orderpos is necessary if the next # container is added to the children for c in node.children: c.orderpos += 1000 db.session.commit() req.params["dest"] = newnode.id label = getTreeLabel(newnode, lang=language) fancytree_nodedata = { 'title': label, 'key': newnode.id, 'isLazy': False, 'isFolder': True, 'icon': getEditorIconPath(newnode), 'readonly': 0, 'tooltip': '%s (%s)' % (label, newnode.id), 'children': [], } req.write(json.dumps(fancytree_nodedata, ensure_ascii=False)) logg.info("%s adding new container %s (%s) to %s (%s, %s)", user.login_name, newnode.id, newnode.type, node.id, node.name, node.type) return try: destid = req.params.get("dest", None) dest = q(Node).get(destid) folderid = destid except: destid = None dest = None folderid = srcid idlist = getIDs(req) mysrc = None errorobj = None # try: if action == "clear_trash": for n in trashdir.children: # if trashdir is it's sole parent, remove file from disk # attn: this will not touch files from children of deleted # containers if len(n.parents) == 1: logg.info( "%s going to remove files from disk for node %s (%s, %s)", user.login_name, n.id, n.name, n.type) for f in n.files: # dangerous ??? check this f_path = f.abspath if os.path.exists(f_path): logg.info("%s going to remove file %r from disk", user.login_name, f_path) os.remove(f_path) trashdir.children.remove(n) db.session.commit() dest = trashdir changednodes[trashdir.id] = 1 _parent_descr = [(p.name, p.id, p.type) for p in trashdir_parents] logg.info("%s cleared trash folder with id %s, child of %s", user.login_name, trashdir.id, _parent_descr) # return else: for id in idlist: obj = q(Node).get(id) mysrc = src if isDirectory(obj) or isCollection(obj): mysrc = obj.parents[0] if action == "delete": if mysrc.has_write_access() and obj.has_write_access(): if mysrc.id != trashdir.id: mysrc.children.remove(obj) changednodes[mysrc.id] = 1 trashdir.children.append(obj) db.session.commit() changednodes[trashdir.id] = 1 logg.info( "%s moved to trash bin %s (%s, %s) from %s (%s, %s)", user.login_name, obj.id, obj.name, obj.type, mysrc.id, mysrc.name, mysrc.type) dest = mysrc else: logg.info("%s has no write access for node %s", user.login_name, mysrc.id) req.writeTALstr( '<tal:block i18n:translate="edit_nopermission"/>', {}) dest = mysrc elif action in ["move", "copy"]: if (dest != mysrc) and \ mysrc.has_write_access() and \ dest.has_write_access() and \ obj.has_write_access() and \ isinstance(dest, Container): if not dest.is_descendant_of(obj): if action == "move": mysrc.children.remove(obj) changednodes[mysrc.id] = 1 # getLabel(mysrc) dest.children.append(obj) changednodes[dest.id] = 1 # getLabel(dest) db.session.commit() if logg.isEnabledFor(logging.INFO): _what = "%s %s %r (%s, %s) " % (user.login_name, action, obj.id, obj.name, obj.type) _from = "from %s (%s, %s) " % ( mysrc.id, mysrc.name, mysrc.type) _to = "to %s (%s, %s)" % (dest.id, dest.name, dest.type) logg.info(_what + _from + _to) else: logg.error("%s could not %s %s from %s to %s", user.login_name, action, obj.id, mysrc.id, dest.id) else: return mysrc = None if not mysrc: mysrc = src if action in ["move", "copy", "delete", "clear_trash"]: for nid in changednodes: try: changednodes[nid] = getTreeLabel(q(Node).get(nid), lang=language) except: logg.exception( "exception ignored: could not make fancytree label for node %s", nid) res_dict = {'changednodes': changednodes} req.write(json.dumps(res_dict, indent=4, ensure_ascii=False)) else: try: req.write(dest.id) except: req.write('no-node-id-specified (web.edit.edit.action)') logg.exception( 'exception ignored, no-node-id-specified (web.edit.edit.action)' ) return