def getACL(pagename, request, fresh=False): acl_dict = None got_from_mc = False pagename = pagename.lower() if (not fresh and request.req_cache['acls'].has_key((pagename, request.config.wiki_id))): return request.req_cache['acls'][(pagename, request.config.wiki_id)] if not fresh and config.memcache: acl_dict = request.mc.get('acl:%s' % mc_quote(pagename)) if acl_dict is None: acl_dict = {} d = {'pagename': pagename, 'wiki_id': request.config.wiki_id} request.cursor.execute( """SELECT groupname, may_read, may_edit, may_delete, may_admin from pageAcls where pagename=%(pagename)s and wiki_id=%(wiki_id)s""", d) results = request.cursor.fetchall() if results: for groupname, may_read, may_edit, may_delete, may_admin in results: acl_dict[groupname] = (may_read, may_edit, may_delete, may_admin) else: got_from_mc = True if acl_dict: acl = AccessControlList(request, dict=acl_dict) else: acl = AccessControlList(request) if config.memcache and not got_from_mc: request.mc.add('acl:%s' % mc_quote(pagename), acl_dict) request.req_cache['acls'][(pagename, request.config.wiki_id)] = acl return acl
def setACL(pagename, groupdict, request): from Sycamore.Page import Page pagename = pagename.lower() d = {'pagename': pagename, 'wiki_id': request.config.wiki_id} page = Page(pagename, request) for groupname in groupdict: group = Group(groupname, request, fresh=True) d['groupname'] = groupname d['may_read'] = groupdict[groupname][ACL_RIGHTS_TABLE['read']] d['may_edit'] = groupdict[groupname][ACL_RIGHTS_TABLE['edit']] d['may_delete'] = groupdict[groupname][ACL_RIGHTS_TABLE['delete']] d['may_admin'] = groupdict[groupname][ACL_RIGHTS_TABLE['admin']] request.cursor.execute( """SELECT groupname from pageAcls where pagename=%(pagename)s and wiki_id=%(wiki_id)s and groupname=%(groupname)s""", d) if request.cursor.fetchone(): request.cursor.execute( """UPDATE pageAcls set may_read=%(may_read)s, may_edit=%(may_edit)s, may_delete=%(may_delete)s, may_admin=%(may_admin)s where groupname=%(groupname)s and pagename=%(pagename)s and wiki_id=%(wiki_id)s""", d, isWrite=True) else: request.cursor.execute( """INSERT into pageAcls (groupname, pagename, may_read, may_edit, may_delete, may_admin, wiki_id) values (%(groupname)s, %(pagename)s, %(may_read)s, %(may_edit)s, %(may_delete)s, %(may_admin)s, %(wiki_id)s)""", d, isWrite=True) # if the settings are the same then we clear them out so master # changes can propagate through if _sameAsDefaults(groupdict, request): request.cursor.execute("""DELETE from pageAcls where pagename=%(pagename)s and wiki_id=%(wiki_id)s""", d, isWrite=True) if config.memcache: # want to clear out when it's the same as the global defaults. # this way changes to global settings will affect the page if # it's not special in any way, priv-wise. if _sameAsDefaults(groupdict, request): request.mc.set('acl:%s' % mc_quote(pagename), {}) else: request.mc.set('acl:%s' % mc_quote(pagename), groupdict) # set for this request if groupdict: request.req_cache['acls'][ (pagename, request.config.wiki_id)] = AccessControlList(request, dict=groupdict) else: request.req_cache['acls'][ (pagename, request.config.wiki_id)] = AccessControlList(request)
def save(self): d = {'groupname':self.name, 'wiki_id': self.request.config.wiki_id} if self.groupdict is not None: for username in self.add_users: d['username'] = username self.request.cursor.execute( """INSERT into userGroups (username, groupname, wiki_id) values (%(username)s, %(groupname)s, %(wiki_id)s)""", d, isWrite=True) for username in self.remove_users: d['username'] = username self.request.cursor.execute( """DELETE from userGroups where username=%(username)s and groupname=%(groupname)s and wiki_id=%(wiki_id)s""", d, isWrite=True) if config.memcache: self.request.mc.set('groupdict:%s' % mc_quote(self.name), self.groupdict) self.request.req_cache['group_dict'][ (self.name, self.request.config.wiki_id)] = self.groupdict if self.ips is not None: for ip in self.add_ips: d['ip'] = ip self.request.cursor.execute( """INSERT into userGroupsIPs (ip, groupname, wiki_id) values (%(ip)s, %(groupname)s, %(wiki_id)s)""", d, isWrite=True) for ip in self.remove_ips: d['ip'] = ip self.request.cursor.execute( """DELETE from userGroupsIPs where ip=%(ip)s and groupname=%(groupname)s and wiki_id=%(wiki_id)s""", d, isWrite=True) if config.memcache: self.request.mc.set('groupips:%s' % mc_quote(self.name), self.ips) self.request.req_cache['group_ips'][ (self.name, self.request.config.wiki_id)] = self.ips if self.name not in self.request.config.acl_rights_default: self.request.config.acl_rights_default[self.name] = \ self.request.config.acl_rights_default["Known"] # sets the config. becomes active as soon as this line is executed! self.request.config.set_config(self.request.config.wiki_name, self.request.config.get_dict(), self.request)
def assemble_query(): from Sycamore.wikiutil import mc_quote # let's assemble the query and key if we use memcache key = None query = '' if not deleted and not thumbnail and not version: if config.memcache: key = "files:%s,%s" % (mc_quote(dict['filename']), mc_quote(dict['page_name'])) query = """SELECT file, uploaded_time from files where name=%(filename)s and attached_to_pagename=%(page_name)s and wiki_id=%(wiki_id)s""" elif thumbnail: if not ticket: if config.memcache: key = "thumbnails:%s,%s" % (mc_quote(dict['filename']), mc_quote(dict['page_name'])) query = """SELECT image, last_modified from thumbnails where name=%(filename)s and attached_to_pagename=%(page_name)s and wiki_id=%(wiki_id)s""" else: if config.memcache: size_encoded, ticket_encoded = size, ticket if size: size_encoded = size.encode(config.charset) if ticket: ticket_encoded = ticket.encode(config.charset) key = "thumbnails:%s,%s" % (mc_quote(dict['filename']), size_encoded or ticket_encoded) elif deleted: # default behavior is to grab the latest backup # version of the image if not version: if config.memcache: key = "oldfiles:%s,%s" % (mc_quote(dict['filename']), mc_quote(dict['page_name'])) query = """SELECT file, uploaded_time from oldFiles where name=%(filename)s and attached_to_pagename=%(page_name)s and wiki_id=%(wiki_id)s order by uploaded_time desc;""" elif version: if config.memcache: key = "oldfiles:%s,%s,%s" % (mc_quote(dict['filename']), mc_quote(dict['page_name']), version) query = """SELECT file, uploaded_time from oldFiles where name=%(filename)s and attached_to_pagename=%(page_name)s and uploaded_time=%(file_version)s and wiki_id=%(wiki_id)s""" return query, key
def set_cache_for_file(): """ Sets the memory cache for the new file. """ if not config.memcache: return if not do_delete: if not thumbnail: table = 'files' else: table = 'thumbnails' if not temporary: key = "%s:%s,%s" % (table, mc_quote(dict['filename']), mc_quote(dict['pagename'].lower())) else: key = "%s,%s,%s" % (table, mc_quote(dict['filename']), ticket) image_obj = (raw_image, uploaded_time) request.mc.set(key, image_obj) else: if not thumbnail: key = "files:%s,%s" % (mc_quote(dict['filename']), mc_quote(dict['pagename'].lower())) request.mc.set(key, False) if is_image and thumbnail: key = "thumbnails:%s,%s" % (mc_quote(dict['filename']), mc_quote(dict['pagename'].lower())) request.mc.set(key, False) # set new file dict if not replaced_image: get_filedict(request, dict['pagename'], fresh=True, set=True)
def get_interwiki_recent_changes(): """ we're doing an interwiki recent changes so we get rc for each wiki in on_wikis list. we use get_multi here. """ changes = [] original_wiki = request.config.wiki_name if not userFavoritesFor: wiki_keys = {} id_to_name = {} for wiki_name in on_wikis: request.switch_wiki(wiki_name) id_to_name[request.config.wiki_id] = wiki_name wiki_keys['%src:%s' % (request.config.wiki_id, mc_quote(page))] = None # switch back to our original wiki if request.config.wiki_name != original_wiki: request.switch_wiki(original_wiki) if config.memcache and not fresh: values = request.mc.get_multi(wiki_keys.keys(), wiki_global=True) for k, got_changes in values.iteritems(): if got_changes is not None: if total_changes_limit: got_changes = got_changes[:total_changes_limit] if changes_since: got_changes = _get_changes_since(changes_since, got_changes) changes += got_changes wiki_keys[k] = got_changes for key in wiki_keys: if wiki_keys[key] is None: wiki_id = int(key[:key.find('rc:')]) request.switch_wiki(id_to_name[wiki_id]) changes += getRecentChanges(request, wiki_global=False, changes_since=changes_since, check_acl=check_acl) else: request.switch_wiki(wiki_name) changes += getRecentChanges(request, wiki_global=False, changes_since=changes_since, check_acl=check_acl) # switch back to our original wiki if request.config.wiki_name != original_wiki: request.switch_wiki(original_wiki) changes = _sort_changes_by_time(changes) changes = changes[:ABSOLUTE_RC_CHANGES_LIMIT] # for consistency's sake if check_acl: changes = filter_may_read(changes, request) return changes
def deleteNewerFileInfo(filename, pagename, version, request): """ Delete newer version of the cached file. """ if config.memcache: d = {'filename':filename, 'pagename':pagename, 'version': version, 'wiki_id':request.config.wiki_id} # we do set() rather than delete() to avoid possible race conditions request.cursor.execute("""SELECT uploaded_time from oldFiles where name=%(filename)s and attached_to_pagename=%(pagename)s and wiki_id=%(wiki_id)s and uploaded_time>%(version)s""", d) results = request.cursor.fetchall() for result in results: newer_version = result[0] # we do set() rather than delete() to avoid possible # race conditions request.mc.set("oldfiles:%s,%s,%s" % (wikiutil.mc_quote(filename), wikiutil.mc_quote(pagename), repr(newer_version)), False)
def _get_config(self, wiki_name, request, process_config, fresh=False): from Sycamore.wikiutil import mc_quote if not fresh and request.req_cache['wiki_config'].has_key(wiki_name): return request.req_cache['wiki_config'][wiki_name] d = { 'name': wiki_name , 'wiki_name': wiki_name } settings_dict = None # set each config object to have the same basic configuration variables d.update(reduce_to_local_config(CONFIG_VARS)) if process_config: if not fresh and memcache: settings_dict = request.mc.get("settings:%s" % ( mc_quote(wiki_name)), wiki_global=True) if settings_dict is None: request.cursor.execute("""SELECT id, is_disabled, sitename, domain, other_settings from wikis where name=%(name)s""", d) result = request.cursor.fetchone() if result: (id, is_disabled, sitename, domain, other_settings_pickled) = result other_settings = cPickle.loads( _binaryToString(other_settings_pickled)) d.update(other_settings) d['wiki_id'] = id d['is_disabled'] = is_disabled d['sitename'] = sitename d['domain'] = domain d['wiki_name'] = wiki_name d['name'] = wiki_name settings_dict = d else: settings_dict = {} if memcache: request.mc.add("settings:%s" % mc_quote(wiki_name), settings_dict, wiki_global=True) else: d.update(settings_dict) request.req_cache['wiki_config'][wiki_name] = d return d
def deleteAllPageInfo(pagename, request): """ Delete all of the cached information associated with the page / it's past versions. """ if config.memcache: d = {'pagename':pagename, 'wiki_id':request.config.wiki_id} request.cursor.execute("""SELECT editTime from allPages where name=%(pagename)s and wiki_id=%(wiki_id)s""", d) results = request.cursor.fetchall() for result in results: version = result[0] # we do set() rather than delete() to avoid possible race conditions request.mc.set("page_text:%s,%s" % (wikiutil.mc_quote(pagename), repr(version)), False)
def _init_groupdict(self): if self.groupdict is None: if (self.name == 'All') or (self.name == 'Known'): self.groupdict = {} return groupdict = None if not self.fresh: if self.request.req_cache['group_dict'].has_key( (self.name, self.request.config.wiki_id)): self.groupdict = self.request.req_cache['group_dict'][ (self.name, self.request.config.wiki_id)] return if config.memcache: groupdict = self.request.mc.get('groupdict:%s' % ( mc_quote(self.name))) if groupdict is None: groupdict = {} d = {'groupname':self.name, 'wiki_id': self.request.config.wiki_id} self.request.cursor.execute( """SELECT username from userGroups where groupname=%(groupname)s and wiki_id=%(wiki_id)s""", d) results = self.request.cursor.fetchall() if results: for item in results: groupdict[item[0]] = None if config.memcache and not self.fresh: self.request.mc.add('groupdict:%s' % mc_quote(self.name), groupdict) self.request.req_cache['group_dict'][ (self.name, self.request.config.wiki_id)] = groupdict self.groupdict = groupdict
def _init_ips(self): if self.ips is None: if (self.name == 'All') or (self.name == 'Known'): self.ips = {} return ips = None if not self.fresh: if self.request.req_cache['group_ips'].has_key( (self.name, self.request.config.wiki_id)): self.ips = self.request.req_cache['group_ips'][ (self.name, self.request.config.wiki_id)] return if config.memcache: ips = self.request.mc.get( 'groupips:%s' % mc_quote(self.name)) if ips is None: ips = {} d = {'groupname':self.name, 'wiki_id': self.request.config.wiki_id} self.request.cursor.execute( """SELECT ip from userGroupsIPs where groupname=%(groupname)s and wiki_id=%(wiki_id)s""", d) results = self.request.cursor.fetchall() if results: for item in results: ips[item[0]] = None if config.memcache and not self.fresh: self.request.mc.add('groupips:%s' % mc_quote(self.name), ips) self.request.req_cache['group_ips'][ (self.name, self.request.config.wiki_id)] = ips self.ips = ips
def update(self, content, links): links = self._consider_talk_link(links) cached_time = time.time() self.request.cursor.execute("""UPDATE curPages set cachedText=%(cached_content)s, cachedTime=%(cached_time)s where name=%(key)s and wiki_id=%(wiki_id)s""", {'cached_content':wikidb.dbapi.Binary(content), 'cached_time':cached_time, 'key':self.key, 'wiki_id':self.request.config.wiki_id}, isWrite=True) self.request.cursor.execute("""DELETE from links where source_pagename=%(key)s and wiki_id=%(wiki_id)s""", {'key':self.key, 'wiki_id':self.request.config.wiki_id}, isWrite=True) for link in links: self.request.cursor.execute("""INSERT into links (source_pagename, destination_pagename, destination_pagename_propercased, wiki_id) values (%(key)s, %(link)s, %(link_propercased)s, %(wiki_id)s)""", {'key':self.key, 'link':link.lower(), 'link_propercased':link, 'wiki_id':self.request.config.wiki_id}, isWrite=True) page_info = pageInfo(Page(self.key, self.request), get_from_cache=False, cached_content=content, cached_time=cached_time) text = wikidb.binaryToString(content) page_info.cached_text = (text, cached_time) if config.memcache: if self.request.set_cache: self.request.mc.set("page_info:%s" % wikiutil.mc_quote(self.key), page_info) else: self.request.mc.add("page_info:%s" % wikiutil.mc_quote(self.key), page_info) self.request.req_cache['page_info'][(wikiutil.quoteFilename(self.key), self.request.config.wiki_id)] = page_info
def getPageLinks(pagename, request, update=False): """ Caches all of the page links on page pagename. Subsequent calls to page.exists() will be much faster if they're a link. Returns a list of the properly cased links (pagenames). """ links = None got_from_memcache = False lower_pagename = pagename.lower() if config.memcache: mc_key = 'links:%s' % wikiutil.mc_quote(lower_pagename) links = request.mc.get(mc_key) if links is None: # get from database request.cursor.execute("""SELECT destination_pagename_propercased, curPages.name from links left join curPages on (destination_pagename=curPages.name and links.wiki_id=%(wiki_id)s and curPages.wiki_id=%(wiki_id)s) where source_pagename=%(pagename)s and links.wiki_id=%(wiki_id)s""", {'pagename': lower_pagename, 'wiki_id': request.config.wiki_id}) result = request.cursor.fetchall() links = {} for link, exists in result: if exists: links[link.lower()] = (True, link) else: links[link.lower()] = (False, link) else: got_from_memcache = True for link in links: exists, proper_name = links[link] key = proper_name.lower() if exists: request.req_cache['pagenames'][ (key, request.config.wiki_name)] = proper_name else: request.req_cache['pagenames'][ (key, request.config.wiki_name)] = False if config.memcache and not got_from_memcache: if update: request.mc.set(mc_key, links) else: request.mc.add(mc_key, links) return [info[1] for link, info in links.iteritems()]
def setRecentChanges(request, max_days=False, total_changes_limit=0, per_page_limit='', page='', changes_since=0, userFavoritesFor='', wiki_global=False): """ We call this method to set the recent changes cache/s after a change has occured. """ from wikiutil import mc_quote if config.memcache: if page: total_changes_limit = 100 changes = getRecentChanges( request, max_days=max_days, total_changes_limit=total_changes_limit, per_page_limit=per_page_limit, page=page, changes_since=changes_since, userFavoritesFor=userFavoritesFor, wiki_global=wiki_global, fresh=True, add_to_cache=False, check_acl=False) request.mc.set('rc:%s' % mc_quote(page), changes)
def clear(self, type=None): key = wikiutil.mc_quote(self.key) #clears the content of the cache regardless of whether or not the page # needs an update self.request.cursor.execute("""UPDATE curPages set cachedText=NULL, cachedTime=NULL where name=%(key)s and wiki_id=%(wiki_id)s""", {'key':self.key, 'wiki_id':self.request.config.wiki_id}, isWrite=True) if type == 'page save delete': if config.memcache: page_info = pageInfo(Page(self.key, self.request), get_from_cache=False) self.request.mc.set("page_info:%s" % key, page_info) self.request.mc.set("pagename:%s" % key, False) self.request.mc.set("page_text:%s" % key, False) self.request.mc.delete("links:%s" % key) else: if config.memcache: self.request.mc.delete("page_info:%s" % key) self.request.mc.delete("pagename:%s" % key) self.request.mc.delete("page_text:%s" % key) self.request.mc.delete("links:%s" % key) if self.request.req_cache['page_info'].has_key(( key,self.request.config.wiki_id)): del self.request.req_cache['page_info'][ (key, self.request.config.wiki_id)] if self.request.req_cache['pagenames'].has_key( (self.key, self.request.config.wiki_id)): del self.request.req_cache['pagenames'][ (self.key, self.request.config.wiki_id)] if (config.memcache and self.key == self.request.config.interwikimap.lower()): self.request.mc.delete('interwiki')
def copy_files(oldpagename, newpagename, request): """ copies files from oldpagename to newpagename. keeps the files on oldpagename for manual deletion. if there is an file on the page newpagename that has the same name as a file on oldpagename, then the file from newpagename superseeds the old file, and the old file is deleted (but kept as a deleted file as per usual delete file/is accessable via the info tab) """ from Sycamore.action.Files import get_filelist old_page_files = get_filelist(request, oldpagename) new_page_files = get_filelist(request, newpagename) for filename in old_page_files: is_image = wikiutil.isImage(filename) request.cursor.execute( """SELECT file, uploaded_time, uploaded_by, uploaded_by_ip, attached_to_pagename_propercased from files where name=%(filename)s and attached_to_pagename=%(oldpagename)s and wiki_id=%(wiki_id)s""", { 'filename': filename, 'oldpagename': oldpagename.lower(), 'wiki_id': request.config.wiki_id }) result = request.cursor.fetchone() if result: old_page_file_dict = { 'filename': filename, 'uploaded_time': result[1], 'uploaded_by': result[2], 'attached_to_pagename_propercased': result[4], 'oldpagename': oldpagename.lower(), 'uploaded_by_ip': result[3], 'newpagename': newpagename.lower(), 'newpagename_propercased': Page(newpagename, request).proper_name(), 'timenow': time.time(), 'userid': request.user.id, 'userip': request.remote_addr, 'wiki_id': request.config.wiki_id } if is_image: request.cursor.execute( """SELECT xsize, ysize from imageInfo where name=%(filename)s and attached_to_pagename=%(oldpagename)s and wiki_id=%(wiki_id)s""", { 'filename': filename, 'oldpagename': oldpagename.lower(), 'wiki_id': request.config.wiki_id }) result = request.cursor.fetchone() if result: old_page_file_dict['xsize'] = result[0] old_page_file_dict['ysize'] = result[1] if filename not in new_page_files: request.cursor.execute("""INSERT into files (name, file, uploaded_time, uploaded_by, uploaded_by_ip, attached_to_pagename, attached_to_pagename_propercased, wiki_id) values (%(filename)s, (select file from files where name=%(filename)s and attached_to_pagename=%(oldpagename)s and wiki_id=%(wiki_id)s ), %(uploaded_time)s, %(uploaded_by)s, %(uploaded_by_ip)s, %(newpagename)s, %(newpagename_propercased)s, %(wiki_id)s )""", old_page_file_dict, isWrite=True) if is_image: if old_page_file_dict.has_key('xsize'): request.cursor.execute("""INSERT into imageInfo (name, attached_to_pagename, xsize, ysize, wiki_id) values (%(filename)s, %(newpagename)s, %(xsize)s, %(ysize)s, %(wiki_id)s) """, old_page_file_dict, isWrite=True) else: request.cursor.execute("""INSERT into oldFiles (name, file, uploaded_time, uploaded_by, uploaded_by_ip, attached_to_pagename, attached_to_pagename_propercased, deleted_by, deleted_by_ip, deleted_time, wiki_id) values (%(filename)s, (SELECT file from files where name=%(filename)s and attached_to_pagename=%(newpagename)s and wiki_id=%(wiki_id)s ), (SELECT uploaded_time from files where name=%(filename)s and attached_to_pagename=%(newpagename)s and wiki_id=%(wiki_id)s ), (SELECT uploaded_by from files where name=%(filename)s and attached_to_pagename=%(newpagename)s and wiki_id=%(wiki_id)s ), (SELECT uploaded_by_ip from files where name=%(filename)s and attached_to_pagename=%(newpagename)s and wiki_id=%(wiki_id)s ), %(newpagename)s, %(newpagename_propercased)s, %(userid)s, %(userip)s, %(timenow)s, %(wiki_id)s)""", old_page_file_dict, isWrite=True) # clear out cached version of image if config.memcache: request.mc.delete("files:%s,%s" % (wikiutil.mc_quote(filename), wikiutil.mc_quote(newpagename.lower()))) if is_image: request.cursor.execute("""INSERT into oldImageInfo (name, attached_to_pagename, xsize, ysize, uploaded_time, wiki_id) values (%(filename)s, %(newpagename)s, (SELECT xsize from imageInfo where name=%(filename)s and attached_to_pagename=%(newpagename)s and wiki_id=%(wiki_id)s ), (SELECT ysize from imageInfo where name=%(filename)s and attached_to_pagename=%(newpagename)s and wiki_id=%(wiki_id)s ), (SELECT uploaded_time from files where name=%(filename)s and attached_to_pagename=%(newpagename)s and wiki_id=%(wiki_id)s ), %(wiki_id)s)""", old_page_file_dict, isWrite=True) request.cursor.execute("""DELETE from thumbnails where name=%(filename)s and attached_to_pagename=%(newpagename)s and wiki_id=%(wiki_id)s""", old_page_file_dict, isWrite=True) request.cursor.execute( """SELECT name from files where name=%(filename)s and attached_to_pagename=%(newpagename)s and wiki_id=%(wiki_id)s""", old_page_file_dict) result = request.cursor.fetchone() if result: request.cursor.execute("""UPDATE files set file=(select file from files where name=%(filename)s and attached_to_pagename=%(oldpagename)s and wiki_id=%(wiki_id)s ), uploaded_time=%(timenow)s, uploaded_by=%(userid)s, uploaded_by_ip=%(userip)s where name=%(filename)s and attached_to_pagename=%(newpagename)s and wiki_id=%(wiki_id)s""", old_page_file_dict, isWrite=True) if is_image and old_page_file_dict.has_key('xsize'): request.cursor.execute("""UPDATE imageInfo set xsize=%(xsize)s, ysize=%(ysize)s where name=%(filename)s and attached_to_pagename=%(newpagename)s and wiki_id=%(wiki_id)s""", old_page_file_dict, isWrite=True) else: request.cursor.execute("""INSERT into files (name, file, uploaded_time, uploaded_by, uploaded_by_ip, xsize, ysize, attached_to_pagename, attached_to_pagename_propercased, wiki_id) values (%(filename)s, (select file from files where name=%(filename)s and attached_to_pagename=%(oldpagename)s and wiki_id=%(wiki_id)s ), %(uploaded_time)s, %(uploaded_by)s, %(uploaded_by_ip)s, %(xsize)s, %(ysize)s, %(newpagename)s, %(newpagename_propercased)s, %(wiki_id)s)""", old_page_file_dict, isWrite=True) if is_image and old_page_file_dict.has_key('xsize'): request.cursor.execute("""INSERT into imageInfo (name, attached_to_pagename, xsize, ysize, wiki_id) values (%(filename)s, %(newpagename)s, %(xsize)s, %(ysize)s, %(wiki_id)s)""", old_page_file_dict, isWrite=True)
def pageInfo(page, get_from_cache=True, cached_content=None, cached_time=None): """ Gets a group of related items for a page: last edited information, page cached text, meta-text (such as #redirect), and has_map. Returns an object with attributes edit_info, cached_text, meta_text, has_map. """ pagename_key = wikiutil.mc_quote(page.page_name.lower()) if page.prev_date: key = "%s,%s" % (pagename_key, repr(page.prev_date)) else: key = pagename_key if get_from_cache: # check per-request cache if page.request.req_cache['page_info'].has_key( (key, page.request.config.wiki_id)): return page.request.req_cache['page_info'][ (key, page.request.config.wiki_id)] # check memcache if config.memcache: page_info = page.request.mc.get("page_info:%s" % key) if page_info: page.request.req_cache['page_info'][ (key, page.request.config.wiki_id)] = page_info return page_info # memcache failed, this means we have to get all the information # from the database # last edit information editUserID = None editTimeUnix = 0 has_map = None if page.exists(): if not page.prev_date: page.cursor.execute("""SELECT editTime, userEdited from curPages where name=%(page_name)s and wiki_id=%(wiki_id)s""", {'page_name':page.page_name, 'wiki_id':page.request.config.wiki_id}) result = page.cursor.fetchone() if result: editTimeUnix = result[0] if result[1]: editUserID = result[1].strip() else: editUserID = result[1] else: page.cursor.execute("""SELECT userEdited from allPages where name=%(page_name)s and editTime=%(date)s and wiki_id=%(wiki_id)s""", {'page_name':page.page_name, 'date':page.prev_date, 'wiki_id':page.request.config.wiki_id}) result = page.cursor.fetchone() editUserID = result[0] editTimeUnix = page.prev_date edit_info = (editTimeUnix, editUserID) # cached text cached_text = ('', 0) if not page.prev_date: if not cached_content or not cached_time: page.cursor.execute("""SELECT cachedText, cachedTime from curPages where name=%(page)s and wiki_id=%(wiki_id)s""", {'page':page.page_name, 'wiki_id':page.request.config.wiki_id}) result = page.cursor.fetchone() if result: if result[0] and result[1]: text = wikidb.binaryToString(result[0]) cached_time = result[1] cached_text = (text, cached_time) else: cached_text = cached_content # meta_text meta_text = find_meta_text(page, fresh=True) else: # set some defaults. These shouldn't be accessed. edit_info = (None, None) cached_text = ('', 0) meta_text = None has_map = False has_acl = True if not page.prev_date: if not config.has_old_wiki_map: currently_has_map = False page.cursor.execute("""SELECT count(pagename) from mapPoints where pagename=%(page_name)s and wiki_id=%(wiki_id)s""", {'page_name':page.page_name, 'wiki_id':page.request.config.wiki_id}) result = page.cursor.fetchone() if result: if result[0]: currently_has_map = True if page.request.save_time: # we are in a 'saving' request if page.request.addresses: has_map = True else: has_map = False else: has_map = currently_has_map else: page.cursor.execute("""SELECT count(pagename) from mapPoints where pagename=%(page_name)s and wiki_id=%(wiki_id)s""", {'page_name':page.page_name, 'wiki_id':page.request.config.wiki_id}) result = page.cursor.fetchone() if result: if result[0]: has_map = True if not page.exists(): page.cursor.execute("""SELECT latestEdit.editTime, allPages.userEdited from ( SELECT max(editTime) as editTime from allPages where name=%(page_name)s and wiki_id=%(wiki_id)s) as latestEdit, allPages where allPages.name=%(page_name)s and allPages.editTime=latestEdit.editTime and allPages.wiki_id=%(wiki_id)s""", {'page_name':page.page_name, 'wiki_id':page.request.config.wiki_id}) result = page.cursor.fetchone() if result: editUserID = result[1] editTimeUnix = result[0] edit_info = (editTimeUnix, editUserID) else: page.cursor.execute("""SELECT userEdited from allPages where name=%(page_name)s and editTime=%(date)s and wiki_id=%(wiki_id)s""", {'page_name':page.page_name, 'date':page.prev_date, 'wiki_id':page.request.config.wiki_id}) result = page.cursor.fetchone() editUserID = result[0] editTimeUnix = page.prev_date edit_info = (editTimeUnix, editUserID) has_map = None page.cursor.execute("""SELECT groupname, may_read, may_edit, may_delete, may_admin from pageAcls where pagename=%(pagename)s and wiki_id=%(wiki_id)s""", {'pagename':page.page_name, 'wiki_id':page.request.config.wiki_id}) if page.cursor.fetchone(): has_acl = True else: has_acl = False page_info = pageInfoObj(edit_info, cached_text, meta_text, has_acl, has_map) if config.memcache and not page.request.set_cache: page.request.mc.add("page_info:%s" % key, page_info) elif config.memcache and page.request.set_cache: page.request.mc.set("page_info:%s" % key, page_info) page.request.req_cache['page_info'][ (key, page.request.config.wiki_id)] = page_info return page_info
cursor.execute("DELETE from imageCaptions where wiki_id=%(wiki_id)s", d, isWrite=True) cursor.execute("DELETE from imageInfo where wiki_id=%(wiki_id)s", d, isWrite=True) cursor.execute("DELETE from links where wiki_id=%(wiki_id)s", d, isWrite=True) cursor.execute("DELETE from mapCategoryDefinitions where wiki_id=%(wiki_id)s", d, isWrite=True) cursor.execute("DELETE from mapPointCategories where wiki_id=%(wiki_id)s", d, isWrite=True) cursor.execute("DELETE from mapPoints where wiki_id=%(wiki_id)s", d, isWrite=True) cursor.execute("DELETE from metadata where wiki_id=%(wiki_id)s", d, isWrite=True) cursor.execute("DELETE from oldFiles where wiki_id=%(wiki_id)s", d, isWrite=True) cursor.execute("DELETE from oldImageInfo where wiki_id=%(wiki_id)s", d, isWrite=True) cursor.execute("DELETE from oldMapPointCategories where wiki_id=%(wiki_id)s", d, isWrite=True) cursor.execute("DELETE from oldMapPoints where wiki_id=%(wiki_id)s", d, isWrite=True) cursor.execute("DELETE from pageAcls where wiki_id=%(wiki_id)s", d, isWrite=True) cursor.execute("DELETE from pageDependencies where wiki_id=%(wiki_id)s", d, isWrite=True) cursor.execute("DELETE from thumbnails where wiki_id=%(wiki_id)s", d, isWrite=True) cursor.execute("DELETE from userFavorites where wiki_name=%(wiki_name)s", d, isWrite=True) cursor.execute("DELETE from userGroups where wiki_id=%(wiki_id)s", d, isWrite=True) cursor.execute("DELETE from userGroupsIPs where wiki_id=%(wiki_id)s", d, isWrite=True) cursor.execute("DELETE from userWatchedWikis where wiki_name=%(wiki_name)s", d, isWrite=True) cursor.execute("DELETE from userPageOnWikis where wiki_name=%(wiki_name)s", d, isWrite=True) cursor.execute("DELETE from userWikiInfo where wiki_id=%(wiki_id)s", d, isWrite=True) cursor.execute("DELETE from wikis where id=%(wiki_id)s", d, isWrite=True) # nuke the memcache'd configuration req.mc.delete("settings:%s" % wikiutil.mc_quote(wiki_name), wiki_global=True) # clear the recent wikis display, because this wiki is history! req.mc.delete('recentwikis', wiki_global=True) req.db_disconnect()
def getRecentChanges(request, max_days=False, total_changes_limit=0, per_page_limit='', page='', changes_since=0, userFavoritesFor='', wiki_global=False, fresh=False, on_wikis=None, add_to_cache=True, check_acl=True): """ This is our recent changes-getting method! The recentchanges.py macro does the pretty formatting. @param request: request object. @param max_days: integer number of max days to show on rc defaults to False if not given and we use RC_MAX_DAYS. @param total_changes_limit: integer limit on the total number of changes to return. Defaults to 0 and we use ABSOLUTE_RC_CHANGES_LIMIT. @param per_page_limit: integer max for the number of changes per-page. Currently only supports the limit of 1. Defaults to '', which means no intrinsic page limit. @param page: page object to limit the changes to. Defaults to '', which means we grab changes on all pages. @param changes_since: grab only changes since this time (given as a unix timestamp). Defaults to 0, which means we grab change back as far in time as makes sense. @param userFavoritesFor: user id of a user. Providing this will grab all of the changes on the user's bookmarked pages. Defaults to '', which means we just grab changes like usual. @param wiki_global: boolean. If True, we grab changes across multiple wikis. If False, we just grab changes on the current wiki. @param fresh: boolean. If True then we don't grab any data from the cache. Defaults to False. @param on_wikis: a list of wiki names. If provided then we grab changes on all of the wikis provided and mash them together. Defaults to None, which means we grab only the current wiki's changes. @param add_to_cache: boolean. Defaults to True. If True, we add the results of calling getRecentChanges() to the memory cache. @param check_acl: boolean. Defaults to True. If True, then we return only pages the user can read. @returns: None. """ from wikiutil import mc_quote def addQueryConditions(view, query, max_days_ago, total_changes_limit, per_page_limit, page, changes_since, userFavoritesFor, wiki_global): """ Limits the query in various ways. """ add_query = [] if per_page_limit: if view != 'eventChanges': add_query.append( """(SELECT %(view)s.propercased_name as name, max(%(view)s.changeTime) as changeTime, %(view)s.id as id, %(view)s.editType as editType, %(view)s.comment as comment, %(view)s.userIP as userIP from %(view)s""" % {'view': view}) else: add_query.append( """(SELECT %(view)s.name as name, max(%(view)s.changeTime) as changeTime, %(view)s.id as id, %(view)s.editType as editType, %(view)s.comment as comment, %(view)s.userIP as userIP from %(view)s""" % {'view': view}) else: if view != 'eventChanges': add_query.append( """(SELECT %(view)s.propercased_name as name, %(view)s.changeTime as changeTime, %(view)s.id as id, %(view)s.editType as editType, %(view)s.comment as comment, %(view)s.userIP as userIP from %(view)s""" % {'view': view}) else: add_query.append( """(SELECT %(view)s.name as name, %(view)s.changeTime as changeTime, %(view)s.id as id, %(view)s.editType as editType, %(view)s.comment as comment, %(view)s.userIP as userIP from %(view)s""" % {'view': view}) printed_where = False if page and not userFavoritesFor: add_query.append( """ where %(view)s.name=%%(pagename)s and %(view)s.changeTime is not NULL and wiki_id=%(wiki_id)s""" % {'view':view, 'wiki_id':request.config.wiki_id}) printed_where = True if not printed_where: printed_where = True add_query.append(' where') else: add_query.append(' and') if max_days_ago: if changes_since: add_query.append(' changeTime >= %(changes_since)s and') else: if changes_since: add_query.append(' changeTime >= %(changes_since)s and') add_query.append(' wiki_id=%(wiki_id)s') if total_changes_limit and not per_page_limit: if not printed_where: add_query.append(' where') else: add_query.append(' and') add_query.append( """ changeTime is not NULL and id is not NULL and wiki_id=%%(wiki_id)s order by changeTime desc limit %s)""" % total_changes_limit) elif not total_changes_limit and per_page_limit: pass else: add_query.append(')') query += add_query def buildQuery(max_days_ago, total_changes_limit, per_page_limit, page, changes_since, userFavoritesFor, wiki_global, request): """ Build up the big old recent changes query. """ # we use a select statement on the outside here, though not needed, # so that MySQL will cache the statement. MySQL does not cache # non-selects, so we have to do this. if per_page_limit: if config.db_type == 'postgres': query = [ 'SELECT distinct on (name) name, changeTime, id, editType,' 'comment, userIP from ( SELECT * from ( '] elif config.db_type == 'mysql': query = [ """SELECT distinct (name), changeTime, id, editType, comment, userIP from ( SELECT * from ( """] else: query = [ 'SELECT name, changeTime, id, editType, comment,' 'userIP from ('] printed_where = False addQueryConditions('pageChanges', query, max_days_ago, total_changes_limit, per_page_limit, page, changes_since, userFavoritesFor, wiki_global) query.append(' UNION ALL ') addQueryConditions('currentFileChanges', query, max_days_ago, total_changes_limit, per_page_limit, page, changes_since, userFavoritesFor, wiki_global) query.append(' UNION ALL ') addQueryConditions('oldFileChanges', query, max_days_ago, total_changes_limit, per_page_limit, page, changes_since, userFavoritesFor, wiki_global) query.append(' UNION ALL ') addQueryConditions('deletedFileChanges', query, max_days_ago, total_changes_limit, per_page_limit, page, changes_since, userFavoritesFor, wiki_global) query.append(' UNION ALL ') addQueryConditions('eventChanges', query, max_days_ago, total_changes_limit, per_page_limit, page, changes_since, userFavoritesFor, wiki_global) query.append(' UNION ALL ') addQueryConditions('oldMapChanges', query, max_days_ago, total_changes_limit, per_page_limit, page, changes_since, userFavoritesFor, wiki_global) query.append(' UNION ALL ') addQueryConditions('currentMapChanges', query, max_days_ago, total_changes_limit, per_page_limit, page, changes_since, userFavoritesFor, wiki_global) if request.config.has_old_wiki_map: query.append(' UNION ALL ') addQueryConditions('deletedMapChanges', query, max_days_ago, total_changes_limit, per_page_limit, page, changes_since, userFavoritesFor, wiki_global) if not per_page_limit: query.append(' order by changeTime desc') if total_changes_limit: query.append(' limit %(limit)s) as result') else: if per_page_limit: query.append( " ) as sortedChanges order by changeTime desc ) as result") else: query.append(') as result') return ''.join(query) def get_interwiki_recent_changes(): """ we're doing an interwiki recent changes so we get rc for each wiki in on_wikis list. we use get_multi here. """ changes = [] original_wiki = request.config.wiki_name if not userFavoritesFor: wiki_keys = {} id_to_name = {} for wiki_name in on_wikis: request.switch_wiki(wiki_name) id_to_name[request.config.wiki_id] = wiki_name wiki_keys['%src:%s' % (request.config.wiki_id, mc_quote(page))] = None # switch back to our original wiki if request.config.wiki_name != original_wiki: request.switch_wiki(original_wiki) if config.memcache and not fresh: values = request.mc.get_multi(wiki_keys.keys(), wiki_global=True) for k, got_changes in values.iteritems(): if got_changes is not None: if total_changes_limit: got_changes = got_changes[:total_changes_limit] if changes_since: got_changes = _get_changes_since(changes_since, got_changes) changes += got_changes wiki_keys[k] = got_changes for key in wiki_keys: if wiki_keys[key] is None: wiki_id = int(key[:key.find('rc:')]) request.switch_wiki(id_to_name[wiki_id]) changes += getRecentChanges(request, wiki_global=False, changes_since=changes_since, check_acl=check_acl) else: request.switch_wiki(wiki_name) changes += getRecentChanges(request, wiki_global=False, changes_since=changes_since, check_acl=check_acl) # switch back to our original wiki if request.config.wiki_name != original_wiki: request.switch_wiki(original_wiki) changes = _sort_changes_by_time(changes) changes = changes[:ABSOLUTE_RC_CHANGES_LIMIT] # for consistency's sake if check_acl: changes = filter_may_read(changes, request) return changes def get_user_favorites(): from Sycamore import user changes = [] original_wiki = request.config.wiki_name favlist = user.User(request, userFavoritesFor).getFavoriteList( wiki_global=wiki_global) for favorite in favlist: wiki_name = favorite.wiki_name request.switch_wiki(wiki_name) changes += getRecentChanges(request, page=favorite.page_name, total_changes_limit=1, wiki_global=False, check_acl=check_acl) # switch back to our original wiki if request.config.wiki_name != original_wiki: request.switch_wiki(original_wiki) changes = _sort_changes_by_time(changes) if check_acl: changes = filter_may_read(changes, request) return changes if not userFavoritesFor and on_wikis is None and not fresh: changes = None if config.memcache: changes = request.mc.get('rc:%s' % mc_quote(page)) if changes is not None: if total_changes_limit: changes = changes[:total_changes_limit] if changes_since: changes = _get_changes_since(changes_since, changes) if check_acl: changes = filter_may_read(changes, request) return changes elif on_wikis is not None: return get_interwiki_recent_changes() elif userFavoritesFor: return get_user_favorites() # We have to get the 'normal' recent changes, as this isn't a special # case and it wasn't in the cache. lines = [] right_now = time.gmtime() # we limit recent changes to display at most the last max_days of edits. if max_days: # the subtraction of max days is okay here, as mktime # will do the right thing oldest_displayed_time_tuple = (right_now[0], right_now[1], right_now[2]-max_days, 0, 0, 0, 0, 0, 0) max_days_ago = time.mktime(oldest_displayed_time_tuple) else: max_days_ago = False # still grab all the maximum days, and then limit them # after grabing (more efficient on the whole) if not userFavoritesFor and not page: query_max_days = time.mktime((right_now[0], right_now[1], right_now[2]-RC_MAX_DAYS, 0, 0, 0, 0, 0, 0)) else: query_max_days = max_days_ago query_total_changes_limit = total_changes_limit # by default for a per-page, grab total_changes_limit = 100 if page and not total_changes_limit: query_total_changes_limit = 100 total_changes_limit = 100 elif not page and total_changes_limit and not userFavoritesFor: # we're doing RC or something close, so let's query for all query_total_changes_limit = 0 elif total_changes_limit <= ABSOLUTE_RC_CHANGES_LIMIT: query_total_changes_limit = ABSOLUTE_RC_CHANGES_LIMIT # so, let's compile all the different types of changes together! query = buildQuery(query_max_days, query_total_changes_limit, per_page_limit, page, None, userFavoritesFor, wiki_global, request) request.cursor.execute(query, {'max_days_ago': query_max_days, 'limit': query_total_changes_limit, 'userFavoritesFor': userFavoritesFor, 'pagename': page, 'changes_since':None, 'wiki_id': request.config.wiki_id}) edit = request.cursor.fetchone() while edit: editline = EditLine(edit) editline.wiki_name = request.config.wiki_name lines.append(editline) edit = request.cursor.fetchone() lines = _sort_changes_by_time(lines) if config.memcache and add_to_cache: request.mc.add('rc:%s' % mc_quote(page), lines) # deepcopy here because sometimes people do # lines.comment = something, and we want to add the # 'real' data to the cache (postCommit, so it happens way later) lines = copy.deepcopy(lines) if total_changes_limit: lines = lines[:total_changes_limit] if changes_since: lines = _get_changes_since(changes_since, lines) if check_acl: lines = filter_may_read(lines, request) return lines
def copy_files(oldpagename, newpagename, request): """ copies files from oldpagename to newpagename. keeps the files on oldpagename for manual deletion. if there is an file on the page newpagename that has the same name as a file on oldpagename, then the file from newpagename superseeds the old file, and the old file is deleted (but kept as a deleted file as per usual delete file/is accessable via the info tab) """ from Sycamore.action.Files import get_filelist old_page_files = get_filelist(request, oldpagename) new_page_files = get_filelist(request, newpagename) for filename in old_page_files: is_image = wikiutil.isImage(filename) request.cursor.execute( """SELECT file, uploaded_time, uploaded_by, uploaded_by_ip, attached_to_pagename_propercased from files where name=%(filename)s and attached_to_pagename=%(oldpagename)s and wiki_id=%(wiki_id)s""", {'filename':filename, 'oldpagename':oldpagename.lower(), 'wiki_id':request.config.wiki_id}) result = request.cursor.fetchone() if result: old_page_file_dict = { 'filename': filename, 'uploaded_time': result[1], 'uploaded_by': result[2], 'attached_to_pagename_propercased': result[4], 'oldpagename': oldpagename.lower(), 'uploaded_by_ip': result[3], 'newpagename': newpagename.lower(), 'newpagename_propercased': Page(newpagename, request).proper_name(), 'timenow': time.time(), 'userid': request.user.id, 'userip': request.remote_addr, 'wiki_id': request.config.wiki_id } if is_image: request.cursor.execute( """SELECT xsize, ysize from imageInfo where name=%(filename)s and attached_to_pagename=%(oldpagename)s and wiki_id=%(wiki_id)s""", {'filename': filename, 'oldpagename': oldpagename.lower(), 'wiki_id': request.config.wiki_id}) result = request.cursor.fetchone() if result: old_page_file_dict['xsize'] = result[0] old_page_file_dict['ysize'] = result[1] if filename not in new_page_files: request.cursor.execute( """INSERT into files (name, file, uploaded_time, uploaded_by, uploaded_by_ip, attached_to_pagename, attached_to_pagename_propercased, wiki_id) values (%(filename)s, (select file from files where name=%(filename)s and attached_to_pagename=%(oldpagename)s and wiki_id=%(wiki_id)s ), %(uploaded_time)s, %(uploaded_by)s, %(uploaded_by_ip)s, %(newpagename)s, %(newpagename_propercased)s, %(wiki_id)s )""", old_page_file_dict, isWrite=True) if is_image: if old_page_file_dict.has_key('xsize'): request.cursor.execute( """INSERT into imageInfo (name, attached_to_pagename, xsize, ysize, wiki_id) values (%(filename)s, %(newpagename)s, %(xsize)s, %(ysize)s, %(wiki_id)s) """, old_page_file_dict, isWrite=True) else: request.cursor.execute( """INSERT into oldFiles (name, file, uploaded_time, uploaded_by, uploaded_by_ip, attached_to_pagename, attached_to_pagename_propercased, deleted_by, deleted_by_ip, deleted_time, wiki_id) values (%(filename)s, (SELECT file from files where name=%(filename)s and attached_to_pagename=%(newpagename)s and wiki_id=%(wiki_id)s ), (SELECT uploaded_time from files where name=%(filename)s and attached_to_pagename=%(newpagename)s and wiki_id=%(wiki_id)s ), (SELECT uploaded_by from files where name=%(filename)s and attached_to_pagename=%(newpagename)s and wiki_id=%(wiki_id)s ), (SELECT uploaded_by_ip from files where name=%(filename)s and attached_to_pagename=%(newpagename)s and wiki_id=%(wiki_id)s ), %(newpagename)s, %(newpagename_propercased)s, %(userid)s, %(userip)s, %(timenow)s, %(wiki_id)s)""", old_page_file_dict, isWrite=True) # clear out cached version of image if config.memcache: request.mc.delete("files:%s,%s" % (wikiutil.mc_quote(filename), wikiutil.mc_quote(newpagename.lower()))) if is_image: request.cursor.execute( """INSERT into oldImageInfo (name, attached_to_pagename, xsize, ysize, uploaded_time, wiki_id) values (%(filename)s, %(newpagename)s, (SELECT xsize from imageInfo where name=%(filename)s and attached_to_pagename=%(newpagename)s and wiki_id=%(wiki_id)s ), (SELECT ysize from imageInfo where name=%(filename)s and attached_to_pagename=%(newpagename)s and wiki_id=%(wiki_id)s ), (SELECT uploaded_time from files where name=%(filename)s and attached_to_pagename=%(newpagename)s and wiki_id=%(wiki_id)s ), %(wiki_id)s)""", old_page_file_dict, isWrite=True) request.cursor.execute( """DELETE from thumbnails where name=%(filename)s and attached_to_pagename=%(newpagename)s and wiki_id=%(wiki_id)s""", old_page_file_dict, isWrite=True) request.cursor.execute( """SELECT name from files where name=%(filename)s and attached_to_pagename=%(newpagename)s and wiki_id=%(wiki_id)s""", old_page_file_dict) result = request.cursor.fetchone() if result: request.cursor.execute( """UPDATE files set file=(select file from files where name=%(filename)s and attached_to_pagename=%(oldpagename)s and wiki_id=%(wiki_id)s ), uploaded_time=%(timenow)s, uploaded_by=%(userid)s, uploaded_by_ip=%(userip)s where name=%(filename)s and attached_to_pagename=%(newpagename)s and wiki_id=%(wiki_id)s""", old_page_file_dict, isWrite=True) if is_image and old_page_file_dict.has_key('xsize'): request.cursor.execute( """UPDATE imageInfo set xsize=%(xsize)s, ysize=%(ysize)s where name=%(filename)s and attached_to_pagename=%(newpagename)s and wiki_id=%(wiki_id)s""", old_page_file_dict, isWrite=True) else: request.cursor.execute( """INSERT into files (name, file, uploaded_time, uploaded_by, uploaded_by_ip, xsize, ysize, attached_to_pagename, attached_to_pagename_propercased, wiki_id) values (%(filename)s, (select file from files where name=%(filename)s and attached_to_pagename=%(oldpagename)s and wiki_id=%(wiki_id)s ), %(uploaded_time)s, %(uploaded_by)s, %(uploaded_by_ip)s, %(xsize)s, %(ysize)s, %(newpagename)s, %(newpagename_propercased)s, %(wiki_id)s)""", old_page_file_dict, isWrite=True) if is_image and old_page_file_dict.has_key('xsize'): request.cursor.execute( """INSERT into imageInfo (name, attached_to_pagename, xsize, ysize, wiki_id) values (%(filename)s, %(newpagename)s, %(xsize)s, %(ysize)s, %(wiki_id)s)""", old_page_file_dict, isWrite=True)