def fetch(pagina): data=net.pageWiki(pagina) if not data: return "\x0304Page not found.\x0f" data=data.replace("\r", "·") data=data.replace("\n", "·") data=re.sub("\[\[[Ii]mage?:.*?\]\]","",data) data=re.sub("'''(.*?)'''","\x02\x0301\g<1>\x0f",data) data=re.sub("\[\[([^\]]*?)\|(.*?)\]\]","\x0312\g<2>\x0f",data) data=re.sub("\[\[(.*?)\]\]","\x0312\g<1>\x0f",data) if re.search("^(?:.*:)?special:statistics$", pagina.lower()): #cawiki 11-10-08 +/-18:00 CEST total=321076;good=135139;views=171;edits=2748283;users=19445;activeusers=1108;admins=19;images=3985;jobs=20 data = data.replace("good","articles").replace("views","views").replace("edits","edits").replace("activeusers","active users").replace("users","users")\ .replace("images","images").replace("jobs","jobs").replace("admins","admins") data=data data=data.replace("=",": $l") data=data.replace(";","$N, ") data=data.decode("utf-8") data=data[:-1]+"$N." data=colors(data) data=data.encode("utf-8") elif "Wikipedia:" in pagina: pass elif " " in data: if len(data)>350: data=data[:350] data=data.split(" ")[:-1] data=" ".join(data) data=data+"..." if data.startswith("#REDIRECT \x0312"): link = net.pageURL(pagina).split("/wiki/")[0]+"/wiki/" link = link+data[13:-1].replace(" ","_") data += " ( %s )." % link return data[0:350]
def tam(pagina): t1=time.time() data = net.pageWiki(pagina) salida = net.pageURL(pagina) if len(data)>1024: salida+= " is %i bytes (%iKb) long." % (len(data), len(data)/1024) else: salida+= " is %i bytes long." % (len(data)) salida+= "Took %f seconds." % (time.time()-t1) return salida