def retrieveFile(self): for f in filehandlers: if hasattr(f, "retrieveFile"): try: path = f.retrieveFile(self) if path: return path except: logException("file handler retrieveFile() failed") if os.path.exists(self._path): return self._path if os.path.exists(config.basedir + "/" + self._path): return config.basedir + "/" + self._path if not os.path.exists(config.settings["paths.datadir"] + self._path): for f in self.node.getFiles(): if f.getType().startswith("presentati"): try: #_n = os.path.dirname(f.retrieveFile())+"/"+self._path _n = os.path.dirname(f._path) + "/" + self._path if os.path.exists(_n): return _n except: pass return config.settings["paths.datadir"] + self._path
def updateNodesIndex(self, nodelist): print "updating node index for", len(nodelist), "nodes..." err = {} schemas = {} #t1 = time.time() for node in nodelist: try: schema = node.getSchema() if schema not in schemas.keys(): schemas[schema] = node if schema not in self.schemas: self.addSchema(schema) err = self.updateNodeIndex(node, schema) except core.tree.NoSuchNodeError: # we ignore this exception, and mark the node # non-dirty anyway, to prevent it from blocking # updates of other nodes logException('error during updating {}'.format(node.id)) print "error for id", node.id node.cleanDirty() for key in schemas: self.nodeToSchemaDef(schemas[key], key) return err
def _delete(self): for f in filehandlers: if hasattr(f, "delete"): try: if f.delete(self): return except: logException("file handler delete() failed")
def _add(self): for f in filehandlers: if hasattr(f, "add"): try: if f.add(self): return except: logException("file handler add() failed")
def nodeToExtSearch(self, node, schema): # build extended search index from node if len(node.getSearchFields() ) == 0: # stop if schema has no searchfields return True self.nodeToSchemaDef(node, schema) # save definition keyvalue = [] i = 1 for field in node.getSearchFields(): key = "field%d" % i i += 1 value = "" if field.getFieldtype() == "union": for item in field.get("valuelist").split(";"): value += node.get(item) + '|' else: value = node.get(field.getName()) keyvalue += [(key, modify_tex(u(protect(value)), 'strip'))] sql0 = 'SELECT id FROM searchmeta where id=\'{}\''.format(node.id) sql1 = 'UPDATE searchmeta SET ' sql2 = 'INSERT INTO searchmeta (id, type, schema, updatetime' for key, value in keyvalue: sql1 += key + "='" + normalize_utf8(value) + "', " sql2 += ", " sql2 += key sql1 += "type='" + node.getContentType( ) + "', schema='" + schema + "', updatetime='" + node.get( "updatetime") + "'" sql2 += ") VALUES(" sql2 += '\'{}\', "{}", "{}", "{}"'.format(node.id, node.getContentType(), schema, node.get("updatetime")) for key, value in keyvalue: sql2 += ", '" + normalize_utf8(value) + "'" sql1 += " WHERE id='{}'".format(node.id) sql2 += ")" sql = "" try: sql = sql0 if self.execute(sql0, schema, 'ext'): # select sql = sql1 self.execute(sql1, schema, 'ext') # do update else: sql = sql2 self.execute(sql2, schema, 'ext') # do insert return True except: logException('error in sqlite insert/update: ' + sql) return False
def getHash(self): for f in filehandlers: if hasattr(f, "getHash"): try: h = f.getHash(self) if h: return h except: logException("file handler getHash() failed") return get_hash(self.retrieveFile())
def nodeToExtSearch(self, node, schema): # build extended search index from node if len(node.getSearchFields()) == 0: # stop if schema has no searchfields return True self.nodeToSchemaDef(node, schema) # save definition keyvalue = [] i = 1 for field in node.getSearchFields(): key = "field%d" % i i += 1 value = "" if field.getFieldtype() == "union": for item in field.get("valuelist").split(";"): value += node.get(item) + '|' else: value = node.get(field.getName()) keyvalue += [(key, modify_tex(u(protect(value)), 'strip'))] sql0 = 'SELECT id FROM searchmeta where id=\'{}\''.format(node.id) sql1 = 'UPDATE searchmeta SET ' sql2 = 'INSERT INTO searchmeta (id, type, schema, updatetime' for key, value in keyvalue: sql1 += key + "='" + normalize_utf8(value) + "', " sql2 += ", " sql2 += key sql1 += "type='" + node.getContentType() + "', schema='" + schema + "', updatetime='" + node.get("updatetime") + "'" sql2 += ") VALUES(" sql2 += '\'{}\', "{}", "{}", "{}"'.format(node.id, node.getContentType(), schema, node.get("updatetime")) for key, value in keyvalue: sql2 += ", '" + normalize_utf8(value) + "'" sql1 += " WHERE id='{}'".format(node.id) sql2 += ")" sql = "" try: sql = sql0 if self.execute(sql0, schema, 'ext'): # select sql = sql1 self.execute(sql1, schema, 'ext') # do update else: sql = sql2 self.execute(sql2, schema, 'ext') # do insert return True except: logException('error in sqlite insert/update: ' + sql) return False
def getSize(self): for f in filehandlers: if hasattr(f, "getSize"): try: size = f.getSize(self) if size: return size else: return 0 except: logException("file handler getSize() failed") return -1 return get_filesize(self.retrieveFile())
def updateMetaField(parent, name, label, orderpos, fieldtype, option="", description="", fieldvalues="", fieldvaluenum="", fieldid="", filenode=None, attr_dict={}): metatype = getMetaType(parent) try: field = tree.getNode(fieldid) field.setName(name) except tree.NoSuchNodeError: field = tree.Node(name=name, type="metafield") metatype.addChild(field) field.setOrderPos(len(metatype.getChildren()) - 1) #<----- Begin: For fields of list type -----> if filenode: # all files of the field will be removed before a new file kann be added for fnode in field.getFiles(): field.removeFile(fnode) # remove the file from the node tree try: os.remove(fnode.retrieveFile()) # delete the file from the hard drive except Exception as e: logException(e) field.addFile(filenode) if fieldvalues.startswith("multiple"): field.set("multiple", True) fieldvalues = fieldvalues.replace("multiple;", "", 1) else: field.removeAttribute("multiple") if fieldvalues.endswith("delete"): # the checkbox 'delete' was checked # all files of the field will be removed for fnode in field.getFiles(): field.removeFile(fnode) # remove the file from the node tree try: os.remove(fnode.retrieveFile()) # delete the file from the hard drive except Exception as e: logException(e) fieldvalues = fieldvalues.replace(";delete", "", 1) #<----- End: For fields of list type -----> field.set("label", label) field.set("type", fieldtype) field.set("opts", option) field.set("valuelist", fieldvalues.replace("\r\n", ";")) field.set("valuelistnum", fieldvaluenum) field.set("description", description) for attr_name, attr_value in attr_dict.items(): field.set(attr_name, attr_value)
def getSearchField(self, i, width=174): try: f = None if self.names[i] and self.names[i] != "full": f = tree.getNode(self.names[i]).getFirstField() g = None if f is None: # All Metadata # quick&dirty f = g = getMetadataType("text") return f.getSearchHTML(Context(g, value=self.values[i], width=width, name="query" + str(i), language=lang(self.req), collection=self.collection, user=users.getUserFromRequest(self.req), ip=self.req.ip)) except: # workaround for unknown error logException("error during getSearchField(i)") return ""
def _checkRights(self, rights, fnode): # fnode is the node against which certain rules # (like e.g. user_has_paid_this_image) are checked. # It's not necessarily the node from which the # rule originates, though. try: for clause in rights.split(","): clause = clause.strip() rule = getRule(clause) if rule.getParsedRule().has_access(self, fnode): return 1 return 0 except ACLParseException: logException("Error while parsing ACL clause") # if we can't parse the acl rule, we assume no access return 0
def nodeToSimpleSearch(self, node, schema, type=""): # build simple search index from node sql_upd = "UPDATE fullsearchmeta SET type='{}', schema='{}', value='{}| ".format( node.getContentType(), node.getSchema(), node.name) sql_ins = "INSERT INTO fullsearchmeta (id, type, schema, value) VALUES('{}', '{}', '{}', '{}| ".format( node.id, node.getContentType(), node.getSchema(), node.name) # attributes val = '' for key, value in node.items(): if key not in SYSTEMATTRS: # ignore system attributes val += protect(u(value)) + '| ' for v in val.split(" "): v = u(v) if normalize_utf8(v) != v.lower(): val += ' ' + normalize_utf8(v) val = val.replace(chr(0), "") + ' ' # remove tex markup val = modify_tex(val, 'strip') # files for file in node.getFiles(): val += protect( u(file.getName() + '| ' + file.getType() + '| ' + file.getMimeType()) + '| ') sql_upd += val + '\' WHERE id=\'{}\''.format(node.id) sql_ins += val + '\')' sql = "" try: sql = 'SELECT id from fullsearchmeta WHERE id=\'{}\''.format( node.id) if self.execute(sql, schema, 'full'): # check existance sql = sql_upd # do update else: sql = sql_ins # do insert self.execute(sql, schema, 'full') return True except: logException('error in sqlite insert/update: ' + sql) return False
def nodeToSimpleSearch(self, node, schema, type=""): # build simple search index from node sql_upd ="UPDATE fullsearchmeta SET type='{}', schema='{}', value='{}| ".format(node.getContentType(), node.getSchema(), node.name) sql_ins = "INSERT INTO fullsearchmeta (id, type, schema, value) VALUES('{}', '{}', '{}', '{}| ".format(node.id, node.getContentType(), node.getSchema(), node.name) # attributes val = '' for key, value in node.items(): if key not in SYSTEMATTRS: # ignore system attributes val += protect(u(value)) + '| ' for v in val.split(" "): v = u(v) if normalize_utf8(v) != v.lower(): val += ' ' + normalize_utf8(v) val = val.replace(chr(0), "") + ' ' # remove tex markup val = modify_tex(val, 'strip') # files for file in node.getFiles(): val += protect(u(file.getName() + '| ' + file.getType() + '| ' + file.getMimeType()) + '| ') sql_upd += val + '\' WHERE id=\'{}\''.format(node.id) sql_ins += val + '\')' sql = "" try: sql = 'SELECT id from fullsearchmeta WHERE id=\'{}\''.format(node.id) if self.execute(sql, schema, 'full'): # check existance sql = sql_upd # do update else: sql = sql_ins # do insert self.execute(sql, schema, 'full') return True except: logException('error in sqlite insert/update: ' + sql) return False
def searchIndexCorrupt(self): try: from core.tree import searcher search_def = [] for node in node_getSearchFields(self): search_def.append(node.getName()) search_def = set(search_def) index_def = searcher.getDefForSchema(self.name) index_def = set(index_def.values()) if len(search_def) > len(index_def) and len(self.getAllItems()) > 0: return True else: if search_def.union(index_def) == set([]) or index_def.difference(search_def) == set([]): return False return True except: logException("error in searchIndexCorrupt") return False
def getText(self, accessdata): try: if self.node.getContentType() == "directory": if self.node.ccount == -1: self.node.ccount = tree.getAllContainerChildren(self.node) self.count = self.node.ccount if self.hide_empty and self.count == 0: return "" # hide empty entries else: if hasattr(self.node, "childcount"): self.count = self.node.childcount() if self.count > 0: return "%s <small>(%s)</small>" % (self.node.getLabel(lang=self.lang), str(self.count)) else: return self.node.getLabel(lang=self.lang) except: logException("error during NavTreeEntry.getText()") return "Node (0)"
def getFormatedValue(self, field, node, language=None, html=1, template_from_caller=None, mask=None): value = node.get(field.getName()).replace(";", "; ") # ignore trailing newlines for textfields value = value.rstrip("\r\n") if value.find('\n') != -1: valuesList = value.split('\n') if any(lang in valuesList for lang in system_languages): # treat as multilingual index = 0 try: index = valuesList.index(language) value = valuesList[index + 1] except ValueError as e: logException(e) log = logging.getLogger("errors") msg = "Exception in getFormatedValue for textfield:\n" msg += " valuesList=%r\n" % valuesList msg += " node.name=%r, node.id=%r, node.type=%r\n" % (node.name, node.id, node.type) msg += " field.name=%r, field.id=%r, field.type=%r\n" % (field.name, field.id, field.type) msg += " language=%r, mask=%r" % (language, mask) log.error(msg) value = "" else: # treat as monolingual pass unescaped_value = value if html: value = esc(value) # replace variables # substitute TeX sub/super-scripts with <sub>/<sup> html tags value = modify_tex(value, 'html') for var in re.findall(r'<(.+?)>', value): if var == "att:id": value = value.replace("<" + var + ">", node.id) elif var.startswith("att:"): val = node.get(var[4:]) if val == "": val = "____" value = value.replace("<" + var + ">", val) value = value.replace("<", "<").replace(">", ">") maskitem = getMaskitemForField(field, language=language, mask=mask) if not maskitem: return (field.getLabel(), value) # use default value from mask if value is empty if value == '': value = maskitem.getDefault() if template_from_caller and template_from_caller[0] and maskitem and str(maskitem.id) == template_from_caller[3]: value = template_from_caller[0] context = {'node': node, 'host': "http://" + config.get("host.name")} if (template_from_caller and template_from_caller[0]) and (not node.get(field.getName())): value = runTALSnippet(value, context) else: try: value = runTALSnippet(value, context) except: value = runTALSnippet(unescaped_value, context) return (field.getLabel(), value)
def upload_for_html(req): user = users.getUserFromRequest(req) datatype = req.params.get("datatype", "image") id = req.params.get("id") node = tree.getNode(id) access = AccessData(req) if not (access.hasAccess(node, 'read') and access.hasAccess(node, 'write') and access.hasAccess(node, 'data')): return 403 for key in req.params.keys(): if key.startswith("delete_"): filename = key[7:-2] for file in n.getFiles(): if file.getName() == filename: n.removeFile(file) if "file" in req.params.keys(): # file # file upload via (possibly disabled) upload form in custom image # browser file = req.params["file"] del req.params["file"] if hasattr(file, "filesize") and file.filesize > 0: try: logger.info( user.name + " upload " + file.filename + " (" + file.tempname + ")") nodefile = importFile(file.filename, file.tempname) node.addFile(nodefile) req.request["Location"] = req.makeLink( "nodefile_browser/%s/" % id, {}) except EncryptionException: req.request["Location"] = req.makeLink("content", { "id": id, "tab": "tab_editor", "error": "EncryptionError_" + datatype[:datatype.find("/")]}) except: logException("error during upload") req.request["Location"] = req.makeLink("content", { "id": id, "tab": "tab_editor", "error": "PostprocessingError_" + datatype[:datatype.find("/")]}) return send_nodefile_tal(req) if "upload" in req.params.keys(): # NewFile # file upload via CKeditor Image Properties / Upload tab file = req.params["upload"] del req.params["upload"] if hasattr(file, "filesize") and file.filesize > 0: try: logger.info( user.name + " upload via ckeditor " + file.filename + " (" + file.tempname + ")") nodefile = importFile(file.filename, file.tempname) node.addFile(nodefile) except EncryptionException: req.request["Location"] = req.makeLink("content", { "id": id, "tab": "tab_editor", "error": "EncryptionError_" + datatype[:datatype.find("/")]}) except: logException("error during upload") req.request["Location"] = req.makeLink("content", { "id": id, "tab": "tab_editor", "error": "PostprocessingError_" + datatype[:datatype.find("/")]}) url = '/file/' + id + '/' + file.tempname.split('/')[-1] res = """<script type="text/javascript"> // Helper function to get parameters from the query string. function getUrlParam(paramName) { var reParam = new RegExp('(?:[\?&]|&)' + paramName + '=([^&]+)', 'i') ; var match = window.location.search.match(reParam) ; return (match && match.length > 1) ? match[1] : '' ; } funcNum = getUrlParam('CKEditorFuncNum'); window.parent.CKEDITOR.tools.callFunction(funcNum, "%(fileUrl)s","%(customMsg)s"); </script>;""" % { 'fileUrl': url.replace('"', '\\"'), 'customMsg': (t(lang(req), "edit_fckeditor_cfm_uploadsuccess")), } return res return send_nodefile_tal(req)
def getFormatedValue(self, field, node, language=None, html=1, template_from_caller=None, mask=None): value = node.get(field.getName()).replace(";", "; ") # ignore trailing newlines for textfields value = value.rstrip("\r\n") if value.find('\n') != -1: valuesList = value.split('\n') if any(lang in valuesList for lang in system_languages): # treat as multilingual index = 0 try: index = valuesList.index(language) value = valuesList[index + 1] except ValueError as e: logException(e) log = logging.getLogger("errors") msg = "Exception in getFormatedValue for textfield:\n" msg += " valuesList=%r\n" % valuesList msg += " node.name=%r, node.id=%r, node.type=%r\n" % ( node.name, node.id, node.type) msg += " field.name=%r, field.id=%r, field.type=%r\n" % ( field.name, field.id, field.type) msg += " language=%r, mask=%r" % (language, mask) log.error(msg) value = "" else: # treat as monolingual pass unescaped_value = value if html: value = esc(value) # replace variables # substitute TeX sub/super-scripts with <sub>/<sup> html tags value = modify_tex(value, 'html') for var in re.findall(r'<(.+?)>', value): if var == "att:id": value = value.replace("<" + var + ">", node.id) elif var.startswith("att:"): val = node.get(var[4:]) if val == "": val = "____" value = value.replace("<" + var + ">", val) value = value.replace("<", "<").replace(">", ">") maskitem = getMaskitemForField(field, language=language, mask=mask) if not maskitem: return (field.getLabel(), value) # use default value from mask if value is empty if value == '': value = maskitem.getDefault() if template_from_caller and template_from_caller[ 0] and maskitem and str( maskitem.id) == template_from_caller[3]: value = template_from_caller[0] context = {'node': node, 'host': "http://" + config.get("host.name")} if (template_from_caller and template_from_caller[0]) and (not node.get(field.getName())): value = runTALSnippet(value, context) else: try: value = runTALSnippet(value, context) except: value = runTALSnippet(unescaped_value, context) return (field.getLabel(), value)
def upload_for_html(req): user = users.getUserFromRequest(req) datatype = req.params.get("datatype", "image") id = req.params.get("id") node = tree.getNode(id) access = AccessData(req) if not (access.hasAccess(node, 'read') and access.hasAccess(node, 'write') and access.hasAccess(node, 'data')): return 403 for key in req.params.keys(): if key.startswith("delete_"): filename = key[7:-2] for file in n.getFiles(): if file.getName() == filename: n.removeFile(file) if "file" in req.params.keys(): # file # file upload via (possibly disabled) upload form in custom image # browser file = req.params["file"] del req.params["file"] if hasattr(file, "filesize") and file.filesize > 0: try: logger.info(user.name + " upload " + file.filename + " (" + file.tempname + ")") nodefile = importFile(file.filename, file.tempname) node.addFile(nodefile) req.request["Location"] = req.makeLink( "nodefile_browser/%s/" % id, {}) except EncryptionException: req.request["Location"] = req.makeLink( "content", { "id": id, "tab": "tab_editor", "error": "EncryptionError_" + datatype[:datatype.find("/")] }) except: logException("error during upload") req.request["Location"] = req.makeLink( "content", { "id": id, "tab": "tab_editor", "error": "PostprocessingError_" + datatype[:datatype.find("/")] }) return send_nodefile_tal(req) if "upload" in req.params.keys(): # NewFile # file upload via CKeditor Image Properties / Upload tab file = req.params["upload"] del req.params["upload"] if hasattr(file, "filesize") and file.filesize > 0: try: logger.info(user.name + " upload via ckeditor " + file.filename + " (" + file.tempname + ")") nodefile = importFile(file.filename, file.tempname) node.addFile(nodefile) except EncryptionException: req.request["Location"] = req.makeLink( "content", { "id": id, "tab": "tab_editor", "error": "EncryptionError_" + datatype[:datatype.find("/")] }) except: logException("error during upload") req.request["Location"] = req.makeLink( "content", { "id": id, "tab": "tab_editor", "error": "PostprocessingError_" + datatype[:datatype.find("/")] }) url = '/file/' + id + '/' + file.tempname.split('/')[-1] res = """<script type="text/javascript"> // Helper function to get parameters from the query string. function getUrlParam(paramName) { var reParam = new RegExp('(?:[\?&]|&)' + paramName + '=([^&]+)', 'i') ; var match = window.location.search.match(reParam) ; return (match && match.length > 1) ? match[1] : '' ; } funcNum = getUrlParam('CKEditorFuncNum'); window.parent.CKEDITOR.tools.callFunction(funcNum, "%(fileUrl)s","%(customMsg)s"); </script>;""" % { 'fileUrl': url.replace('"', '\\"'), 'customMsg': (t(lang(req), "edit_fckeditor_cfm_uploadsuccess")), } return res return send_nodefile_tal(req)
elif "file" in req.params.keys(): file = req.params["file"] del req.params["file"] if hasattr(file,"filesize") and file.filesize>0: try: bibtex.importBibTeX(file.tempname, importdir, req) req.request["Location"] = req.makeLink("content", {"id":importdir.id}) except ValueError, e: req.request["Location"] = req.makeLink("content", {"id":importdir.id, "error":str(e)}) req.params["error"] = str(e) except bibtex.MissingMapping,e: req.request["Location"] = req.makeLink("content", {"id":importdir.id, "error":str(e)}) req.params["error"] = str(e) except: logException("error during upload") req.request["Location"] = req.makeLink("content", {"id":importdir.id, "error":"PostprocessingError"}) req.params["error"] = "file_processingerror" msg_t = (user.getName(), importdir.id, importdir.name, importdir.type, req.params) msg = "%s used import module for bibtex import for node %r (%r, %r): %r" % msg_t logg.info(msg) return getContent(req, [importdir.id]) elif req.params["doi"]: doi = req.params["doi"] logg.info("processing DOI import for: %s", doi) try: doi_extracted = citeproc.extract_and_check_doi(doi) citeproc.import_doi(doi_extracted, importdir) except citeproc.InvalidDOI: logg.error("Invalid DOI: '%s'", doi)