def execute(macro, args, formatter): if not args: return "<em>Please provide an address.</em>" if not formatter: formatter = macro.formatter # re for old format Address("address","description") oldformat = re.compile(r"^\s*\"(.+)\"\s*,\s*\"(.+)\"\s*$") # re for new format Address("address","lat","long") newformat = re.compile(r"^\s*\"(.+)\"\s*,\s*\"(.+)\"\s*,\s*\"(.+)\"\s*$") lat = None long = None if newformat.search(args): (address, lat, long) = newformat.search(args).groups() elif oldformat.search(args): (address, parm1) = oldformat.search(args).groups() else: address = args address = address.strip('"') # allow links in the address to work properly wikified_address = wikiutil.stripOuterParagraph(wikiutil.wikifyString(address, macro.request, formatter.page)) address = wikiutil.simpleStrip(macro.request, wikified_address).strip() if macro.request.config.address_locale and address.find(",") == -1: # add the address locale if it's lacking full_address = "%s, %s" % (address, macro.request.config.address_locale) else: full_address = address if macro.request.config.has_old_wiki_map: # we just ignore [[address]] on davis wiki return wikified_address if lat is None: place = Location(macro, formatter, full_address) else: place = Location(macro, formatter, full_address, lat, long) if place.latitude is None: return wikified_address else: out = wikified_address nearby = place.getNearby() out += mapHTML(macro, place, nearby) ignore = formatter.name != "text_python" or formatter.page.prev_date if not ignore: if macro.request.addresses.has_key(formatter.page.page_name): macro.request.addresses[formatter.page.page_name].append(place) else: macro.request.addresses[formatter.page.page_name] = [place] return out
def doRSS(request): """ set up the RSS file """ rss_init_text = ( '<?xml version="1.0" ?>\n' '<rss version="2.0" xmlns:dc="http://purl.org/dc/elements/1.1/">\n' '<channel><title>%s Events Board</title><link>%s</link>' '<description>' 'Events occuring soon, taken from the %s Events Board.' '</description><language>en-us</language>\n' '</channel>\n' '</rss>\n' % (request.config.sitename, Page("Events Board", request).link_to(), request.config.sitename)) creator_text = 'The %s Robot' % request.config.sitename rss_dom = xml.dom.minidom.parseString(rss_init_text) channel = rss_dom.getElementsByTagName("channel")[0] # Check to see if the event has already passed import string, re current_time = request.user.getFormattedDateTime(time.time(), global_time=True) year_cut = string.split(current_time," ")[0] current_year = string.split(year_cut, "-")[0] month_cut = string.split(current_time," ")[0] current_month = string.split(month_cut,"-")[1] day_cut = string.split(current_time," ")[0] current_day = string.split(day_cut,"-")[2] hour_cut = string.split(current_time," ")[1] current_hour = string.split(hour_cut,":")[0] string_month = findMonth(current_month) rss_text = [] events = [] timenow = time.time() today_struct = time.gmtime(timenow+request.config.tz_offset) today = list(today_struct[0:3]) + [0,0,0,0,0,0] today = calendar.timegm(today) - request.config.tz_offset tomorrow_struct = time.gmtime(timenow+60*60*24*7+request.config.tz_offset) # added *7 to show an entire week 2008/05/12 rtucker tomorrow = list(tomorrow_struct[0:3]) + [0,0,0,0,0,0] tomorrow = calendar.timegm(tomorrow) - request.config.tz_offset request.cursor.execute( """SELECT uid, event_time, posted_by, text, location, event_name from events where event_time >= %(today)s and event_time < %(tomorrow)s and wiki_id=%(wiki_id)s""", {'today':today, 'tomorrow':tomorrow, 'wiki_id':request.config.wiki_id}) result = request.cursor.fetchone() while result: events.append(result) result = request.cursor.fetchone() for event in events: event_time_unix = event[1] # stupid date stuff time_struct = time.gmtime(event_time_unix+request.config.tz_offset) year = time_struct[0] month = time_struct[1] day = time_struct[2] hour = time_struct[3] minute = time_struct[4] posted_by = event[2] event_location = event[4] event_name = event[5] id = event[0] text = event[3] if event_name: processed_name = wikiutil.simpleStrip(request,event_name) else: processed_name = '' processed_text = doParse(text,request) processed_location = doParse(event_location,request) if int(hour) > 12 : read_hour = int(hour) - 12 if not int(minute) == 0: ptime = str(read_hour) + ":" + str(minute) + " PM" else: ptime = str(read_hour) + ":00" + " PM" elif int(hour) == 0: if not int(minute) == 0: ptime = "12:" + str(minute) + " AM" else: ptime = "12:00 AM" elif int(hour) == 12: if not int(minute) == 0: ptime = "12:" + str(minute) + " PM" else: ptime = "12:00 PM" else: if not int(minute) == 0: ptime = str(hour) + ":" + str(minute) + " AM" else: ptime = str(hour) + ":00 AM" total_date = "%s, %s %s" % ( datetoday(int(day), int(month), int(year)), findMonth(month), day) item = rss_dom.createElement("item") rss_text = [] rss_text.append('<b>Date:</b> %s<br>\n' '<b>Time:</b> %s<br>\n' '<b>Location:</b> %s<br><br>\n' '%s (Posted by %s)\n' % (total_date, ptime, processed_location, processed_text, user.getUserLink(request, user.User(request, name=posted_by), absolute=True))) item_guid = rss_dom.createElement("guid") item_guid.setAttribute("isPermaLink","false") item_guid.appendChild(rss_dom.createTextNode(''.join(str(id)))) item.appendChild(item_guid) item_description = rss_dom.createElement("description") item_description.appendChild(rss_dom.createTextNode(''.join(rss_text))) item_title = rss_dom.createElement("title") item_title.appendChild(rss_dom.createTextNode(processed_name)) item.appendChild(item_title) item_link = rss_dom.createElement("link") item_link.appendChild(rss_dom.createTextNode( Page("Events Board", request).url(relative=False))) item.appendChild(item_link) item_date = rss_dom.createElement("dc:date") item_date.appendChild(rss_dom.createTextNode( "%s-%s-%s" % (current_year,current_month,current_day))) item.appendChild(item_date) creator = rss_dom.createElement("dc:creator") creator.appendChild(rss_dom.createTextNode(creator_text)) item.appendChild(creator) item.appendChild(item_description) channel.appendChild(item) the_xml = rss_dom.toxml() return the_xml
def execute(macro, args, formatter=None, test=None): if not formatter: formatter = macro.formatter # Require memcache if config.memcache: mc = macro.request.mc else: return formatter.rawHTML('<!-- Twitter macro requires memcache for performance reasons -->') if args: tokens = args.split(',') query = ' OR '.join('"%s"' % p.strip() for p in tokens) nicequery = ' or '.join("''%s''" % p.strip() for p in tokens) isdefault = False else: # If no query is specified, default to the domain part of our domain # name (e.g. turbowiki.org -> turbowiki) and attach usage message query = config.wiki_base_domain.split('.')[0] nicequery = query isdefault = True # get the info from search.twitter.com class AppURLopener(urllib.FancyURLopener): version = "SycamoreTwitterMacro/1.8 (http://github.org/rtucker/sycamore/)" urllib._urlopener = AppURLopener() quotedquery = urllib.quote_plus(query + ' ' + config.twitter_params) fromcache = 'yes' response_dict = mc.get("twitter-18-" + urllib.quote(query)) if not response_dict: fromcache = 'no' try: response_dict = simplejson.loads(urllib.urlopen('http://search.twitter.com/search.json?q=%s&rpp=%i' % (quotedquery, config.twitter_maxlines)).read()) mc.set("twitter-18-" + urllib.quote(query), response_dict, time=3600) except IOError: response_dict = None display_list = ["||<bgcolor='#E0E0FF'>'''Local Twitter search results for [http://search.twitter.com/search?q=%s %s]'''||" % (quotedquery, nicequery)] outputting = False if not response_dict: outputting = True display_list.append("||An error occurred during processing...||") elif len(response_dict['results']) > 0: for i in response_dict['results']: name = i['from_user'] text = wikiutil.simpleStrip(macro.request, i['text']).replace('&','&').replace('\n',' ') try: location = i['location'] except KeyError: location = '' created = time.mktime(time.strptime(i['created_at'], '%a, %d %b %Y %H:%M:%S +0000')) created_seconds_ago = int(time.mktime(time.gmtime()) - created) id = i['id'] link = 'http://twitter.com/%s/statuses/%i' % (name, id) namelink = 'http://twitter.com/%s' % name if created_seconds_ago < config.twitter_maxtime: display_list.append('||%s ^[%s %s], %s, [%s %s ago]^||' % (text, namelink, name, location, link, elapsed_time(created_seconds_ago,separator=', '))) outputting = True if not outputting: display_list.append('||Nothing on [http://twitter.com/ Twitter] in the local area... maybe you should go stir something up.||') if isdefault: display_list.append("||''The Twitter macro searches Twitter for recent local traffic about a topic! Usage: {{{[[Twitter(search string)]]}}}''||") outstring = '\n'.join(p for p in display_list) return wikiutil.wikifyString(outstring, macro.request, formatter.page, strong=True)
def execute(macro, args, formatter=None): if not formatter: formatter = macro.formatter # A regular expression that borders on incomprehensible heading = re.compile(r"^\s*(?P<hmarker>=+)\s*(.*)\s*(?P=hmarker)") result = [] baseindent = 0 indent = 0 lineno = 0 titles = {} right = False left = False if args: if args.lower() == 'right': right = True elif args.lower() == 'left': left = True try: mindepth = int(macro.request.getPragma('section-numbers', 1)) except (ValueError, TypeError): mindepth = 1 try: maxdepth = max(int(args), 1) except (ValueError, TypeError): maxdepth = 99 for line in macro.parser.lines: # Filter out the headings lineno = lineno + 1 # FIXME this also finds "headlines" in {{{ code sections }}}: match = heading.match(line) if not match: continue title_text = match.group(2).strip() # A slightly questionable strip if not title_text: continue wikified_title = wikiutil.wikifyString(title_text, macro.request, formatter.page) title_text = wikiutil.simpleStrip(macro.request, wikiutil.stripOuterParagraph(wikified_title)) titles.setdefault(title_text, 0) titles[title_text] += 1 # Get new indent level newindent = len(match.group(1)) if newindent > maxdepth: continue if newindent < mindepth: continue # Close lists for i in range(0,indent-newindent): result.append(macro.formatter.number_list(0)) # Open Lists for i in range(0,newindent-indent): result.append(macro.formatter.number_list(1)) # Add the heading unique_id = '' if titles[title_text] > 1: unique_id = '-%d' % titles[title_text] result.append(macro.formatter.listitem(1)) result.append(macro.formatter.anchorlink( "head-" + sha.new(title_text.encode('utf-8')).hexdigest() + unique_id, title_text, escape=False)) result.append(macro.formatter.listitem(0)) # Set new indent level indent = newindent # Close pending lists for i in range(baseindent, indent): result.append(macro.formatter.number_list(0)) html_class = 'tableOfContents' if not result: return '' if right: html_class += ' floatRightTOC' elif left: html_class += ' floatLeftTOC' return ('<table class="%s"><tr><td>' % html_class + ''.join(result) + '</td></tr></table>')
def doRSS(request): """ set up the RSS file """ rss_init_text = ( '<?xml version="1.0" ?>\n' '<rss version="2.0" xmlns:dc="http://purl.org/dc/elements/1.1/">\n' '<channel><title>%s Events Board</title><link>%s</link>' '<description>' 'Events occuring soon, taken from the %s Events Board.' '</description><language>en-us</language>\n' '</channel>\n' '</rss>\n' % (request.config.sitename, Page("Events Board", request).link_to(), request.config.sitename)) creator_text = 'The %s Robot' % request.config.sitename rss_dom = xml.dom.minidom.parseString(rss_init_text) channel = rss_dom.getElementsByTagName("channel")[0] # Check to see if the event has already passed import string, re current_time = request.user.getFormattedDateTime(time.time(), global_time=True) year_cut = string.split(current_time," ")[0] current_year = string.split(year_cut, "-")[0] month_cut = string.split(current_time," ")[0] current_month = string.split(month_cut,"-")[1] day_cut = string.split(current_time," ")[0] current_day = string.split(day_cut,"-")[2] hour_cut = string.split(current_time," ")[1] current_hour = string.split(hour_cut,":")[0] string_month = findMonth(current_month) rss_text = [] events = [] timenow = time.time() today_struct = time.gmtime(timenow+request.config.tz_offset) today = list(today_struct[0:3]) + [0,0,0,0,0,0] today = calendar.timegm(today) - request.config.tz_offset tomorrow_struct = time.gmtime(timenow+60*60*24+request.config.tz_offset) tomorrow = list(tomorrow_struct[0:3]) + [0,0,0,0,0,0] tomorrow = calendar.timegm(tomorrow) - request.config.tz_offset request.cursor.execute( """SELECT uid, event_time, posted_by, text, location, event_name from events where event_time >= %(today)s and event_time < %(tomorrow)s and wiki_id=%(wiki_id)s""", {'today':today, 'tomorrow':tomorrow, 'wiki_id':request.config.wiki_id}) result = request.cursor.fetchone() while result: events.append(result) result = request.cursor.fetchone() for event in events: event_time_unix = event[1] # stupid date stuff time_struct = time.gmtime(event_time_unix+request.config.tz_offset) year = time_struct[0] month = time_struct[1] day = time_struct[2] hour = time_struct[3] minute = time_struct[4] posted_by = event[2] event_location = event[4] event_name = event[5] id = event[0] text = event[3] if event_name: processed_name = wikiutil.simpleStrip(request,event_name) else: processed_name = '' processed_text = doParse(text,request) processed_location = doParse(event_location,request) if int(hour) > 12 : read_hour = int(hour) - 12 if not int(minute) == 0: ptime = str(read_hour) + ":" + str(minute) + " PM" else: ptime = str(read_hour) + ":00" + " PM" elif int(hour) == 0: if not int(minute) == 0: ptime = "12:" + str(minute) + " AM" else: ptime = "12:00 AM" elif int(hour) == 12: if not int(minute) == 0: ptime = "12:" + str(minute) + " PM" else: ptime = "12:00 PM" else: if not int(minute) == 0: ptime = str(hour) + ":" + str(minute) + " AM" else: ptime = str(hour) + ":00 AM" total_date = "%s, %s %s" % ( datetoday(int(day), int(month), int(year)), findMonth(month), day) item = rss_dom.createElement("item") rss_text = [] rss_text.append('<b>Date:</b> %s<br>\n' '<b>Time:</b> %s<br>\n' '<b>Location:</b> %s<br><br>\n' '%s (Posted by %s)\n' % (total_date, ptime, processed_location, processed_text, user.getUserLink(request, user.User(request, name=posted_by), absolute=True))) item_guid = rss_dom.createElement("guid") item_guid.setAttribute("isPermaLink","false") item_guid.appendChild(rss_dom.createTextNode(''.join(str(id)))) item.appendChild(item_guid) item_description = rss_dom.createElement("description") item_description.appendChild(rss_dom.createTextNode(''.join(rss_text))) item_title = rss_dom.createElement("title") item_title.appendChild(rss_dom.createTextNode(processed_name)) item.appendChild(item_title) item_link = rss_dom.createElement("link") item_link.appendChild(rss_dom.createTextNode( Page("Events Board", request).url(relative=False))) item.appendChild(item_link) item_date = rss_dom.createElement("dc:date") item_date.appendChild(rss_dom.createTextNode( "%s-%s-%s" % (current_year,current_month,current_day))) item.appendChild(item_date) creator = rss_dom.createElement("dc:creator") creator.appendChild(rss_dom.createTextNode(creator_text)) item.appendChild(creator) item.appendChild(item_description) channel.appendChild(item) the_xml = rss_dom.toxml() return the_xml
def execute(macro, args, formatter): if not args: return "<em>Please provide an address.</em>" if not formatter: formatter = macro.formatter # re for old format Address("address","description") oldformat = re.compile(r'^\s*\"(.+)\"\s*,\s*\"(.+)\"\s*$') # re for new format Address("address","lat","long") newformat = re.compile(r'^\s*\"(.+)\"\s*,\s*\"(.+)\"\s*,\s*\"(.+)\"\s*$') lat = None long = None parm1 = None if newformat.search(args): (address,lat,long) = newformat.search(args).groups() elif oldformat.search(args): (address,parm1) = oldformat.search(args).groups() else: address = args address = address.strip('"') # allow links in the address to work properly wikified_address = wikiutil.stripOuterParagraph( wikiutil.wikifyString(address, macro.request, formatter.page)) address = wikiutil.simpleStrip(macro.request, wikified_address).strip() if macro.request.config.address_locale and address.find(',') == -1: # add the address locale if it's lacking full_address = '%s, %s' % (address, macro.request.config.address_locale) else: full_address = address if macro.request.config.has_old_wiki_map: # we just ignore [[address]] on davis wiki return wikified_address if lat is None: place = Location(macro,formatter,full_address) else: place = Location(macro,formatter,full_address,lat,long) if place.latitude is None: return wikified_address else: if parm1: out = wikiutil.stripOuterParagraph( wikiutil.wikifyString(parm1, macro.request, formatter.page)) else: out = wikified_address out += ' <a href="http://maps.google.com/maps?f=d&hl=en&daddr=' out += urllib.quote_plus(address) out += '"><img class="dd_icon" src="' out += "%s%s/dd-start.png" % (config.web_dir, config.url_prefix) out += '" alt="[Directions]" title="Click for driving, walking, or bus ' out += 'directions from Google" height="17" width="10"></a>' addresscount = place.getAddressCount() if addresscount > 5: # There's more addresses than usual; let's ratchet back the nearbys maxnearby = int(75/addresscount) out += ' <!-- limiting to %i of %i nearby locations -->' % ( maxnearby, addresscount) else: maxnearby=15 nearby = place.getNearby(max=maxnearby) out += mapHTML(macro,place,nearby) ignore = formatter.name != 'text_python' or formatter.page.prev_date if not ignore: if macro.request.addresses.has_key(formatter.page.page_name): macro.request.addresses[formatter.page.page_name].append(place) else: macro.request.addresses[formatter.page.page_name] = [place] return out