Exemplo n.º 1
0
    def get(self):
        wikipedia.set_lang(u'ru')
        try:
            page = wikipedia.page(u'Проект:Города_России/Списки_улиц/Казани')
            streets = []
            for link in page.links:
                nlink = unicode(link).encode('utf-8').strip().decode('utf-8')
                norm_name = normalize(nlink)
                try:
                    street_info = StreetInfo.get_by_norm_name(norm_name)
                    if not street_info:
                        street_info = StreetInfo()

                    street_page = wikipedia.page(nlink)
                    street_info.name = nlink
                    street_info.norm_name = norm_name
                    street_info.info = unicode(street_page.summary).encode('utf-8').strip()
                    street_info.images = [Image(url=x) for x in street_page.images]
                    street_info.city = u'Казань'.encode('utf-8').strip()
                    street_info.lang=u'ru'.encode('utf-8').strip()
                    
                    street_info.put()
                    
                except Exception, e:
                    print nlink.encode('utf-8')
        except DeadlineExceededError:
            pass
        
        self.response.headers['Content-Type'] = "text/html; charset=utf-8"
        self.response.write(json.dumps({'success':True}))
Exemplo n.º 2
0
def _search_street_infos(street, city, lang=u'ru'):
    street_infos = StreetInfo.query()
    for info in street_infos:
        if normalize(street) in normalize(info.name):
            return info
    return None