コード例 #1
0
    def clean(self):
        self._clean_captcha()
        cleaned_data = super(NewsItemForm, self).clean()

        # Reverse-geocode if we need to.
        if not cleaned_data['location_name']:
            if cleaned_data['location']:
                from ebpub.geocoder.reverse import reverse_geocode
                try:
                    block, distance = reverse_geocode(cleaned_data['location'])
                    cleaned_data['location_name'] = block.pretty_name
                except ReverseGeocodeErrror:
                    logger.info(
                        "Saving NewsItem with no location_name because reverse-geocoding %(location)s failed"
                        % cleaned_data)

        # Note, any NewsItem fields that aren't listed in self._meta.fields
        # have to be manually saved here, because that's the list that's
        # normally consulted when setting attributes on the instance.
        # ... And yes, clean() is normally responsible for setting
        # attributes on the bound instance.
        for key in forms.fields_for_model(self._meta.model):
            if key in cleaned_data.keys():
                setattr(self.instance, key, cleaned_data[key])

        return cleaned_data
コード例 #2
0
    def geocode_if_needed(self,
                          point,
                          location_name,
                          address_text='',
                          **kwargs):
        """
        If either ``point`` or ``location_name`` is not set, try to
        geocode / reverse-geocode as needed to derive one from the
        other.  Returns (point, location_name).

        If neither one is set, try to parse addresses out of
        ``address_text`` and derive both.

        Either value may be None if it can't be determined.

        Any other keyword args are passed to ``full_geocode()``.
        """
        if not point:
            text = convert_entities(location_name or address_text)
            self.logger.debug("...Falling back on geocoding from '%s...'" %
                              text[:50])
            addrs = parse_addresses(text)
            for addr, unused in addrs:
                try:
                    result = self.geocode(addr, **kwargs)
                    if result is not None:
                        point = result['point']
                        self.logger.debug("internally geocoded %r" % addr)
                        # TODO: what if it's a Place?
                        if not location_name:
                            location_name = result['address']
                        break
                except:
                    self.logger.exception(
                        'uncaught geocoder exception on %r\n' % addr)
                    continue

        if point and not location_name:
            # Fall back to reverse-geocoding.
            from ebpub.geocoder import reverse
            try:
                block, distance = reverse.reverse_geocode(point)
                self.logger.debug(" Reverse-geocoded point to %r" %
                                  block.pretty_name)
                location_name = block.pretty_name
            except reverse.ReverseGeocodeError:
                location_name = None

        return (point, location_name)
コード例 #3
0
def fix_newsitem_loc_name(item, dry_run=True):
    # Fall back to reverse-geocoding.
    from ebpub.geocoder import reverse
    fixed = False
    try:
        block, distance = reverse.reverse_geocode(item.location)
        print " Reverse-geocoded point to %r" % block.pretty_name
        item.location_name = block.pretty_name
        fixed = True
    except reverse.ReverseGeocodeError:
        print " Failed to reverse geocode %s for %s" % (item.location.wkt, item)
        item.location_name = u''
    if fixed and not dry_run:
        print "Saving %s" % item
        item.save()
    if not fixed:
        print "Couldn't fix %s" % item
コード例 #4
0
def fix_newsitem_loc_name(item, dry_run=True):
    # Fall back to reverse-geocoding.
    from ebpub.geocoder import reverse
    fixed = False
    try:
        block, distance = reverse.reverse_geocode(item.location)
        print " Reverse-geocoded point to %r" % block.pretty_name
        item.location_name = block.pretty_name
        fixed = True
    except reverse.ReverseGeocodeError:
        print " Failed to reverse geocode %s for %s" % (item.location.wkt,
                                                        item)
        item.location_name = u''
    if fixed and not dry_run:
        print "Saving %s" % item
        item.save()
    if not fixed:
        print "Couldn't fix %s" % item
コード例 #5
0
    def clean_list_record(self, record):
        # clean up a record dict
        # Item date, in timezone of the photo owner.
        # Not sure how to determine what that is, so we'll leave it.
        cleaned = {}
        cleaned['item_date'] = datetime.datetime.strptime(
            record['datetaken'], '%Y-%m-%d %H:%M:%S')
        cleaned['item_date'] = cleaned['item_date'].date()
        # Posted date, UTC timestamp.
        pub_date = datetime.datetime.fromtimestamp(float(record['dateupload']),
                                                   utc)
        cleaned['pub_date'] = pub_date.astimezone(local_tz)

        description = record['description']['_content']
        cleaned['description'] = convert_entities(description.strip())

        cleaned['title'] = convert_entities(record['title'])
        x, y = record['longitude'], record['latitude']
        cleaned['location'] = Point((float(x), float(y)))

        # Possibly we could figure out flickr's geo API and resolve
        # the photo's place_id and/or woeid to the place name?  But
        # those are probably not specific enough; reverse-geocode
        # instead.
        try:
            block, distance = reverse_geocode(cleaned['location'])
            cleaned['location_name'] = block.pretty_name
        except ReverseGeocodeError:
            raise SkipRecord("Could not geocode location %s, %s" % (x, y))

        # Don't think any of the urls returned by the API's "extras"
        # correspond to the page? not sure.
        cleaned[
            'url'] = 'http://www.flickr.com/photos/%(owner)s/%(id)s' % record

        attributes = {}
        attributes['sourcename'] = 'Flickr'
        #attributes['photo_id'] = record['id']
        attributes['user_id'] = record['owner']
        attributes['username'] = record['ownername']
        # Thumbnail. 'Small square' photos are 75x75.
        attributes['photo_href'] = record['url_sq']
        cleaned['_attributes'] = attributes
        return cleaned
コード例 #6
0
    def clean_list_record(self, record):
        # clean up a record dict
        # Item date, in timezone of the photo owner.
        # Not sure how to determine what that is, so we'll leave it.
        cleaned = {}
        cleaned['item_date'] = datetime.datetime.strptime(record['datetaken'],
                                                          '%Y-%m-%d %H:%M:%S')
        cleaned['item_date'] = cleaned['item_date'].date()
        # Posted date, UTC timestamp.
        pub_date = datetime.datetime.fromtimestamp(
            float(record['dateupload']), utc)
        cleaned['pub_date'] = pub_date.astimezone(local_tz)

        description = record['description']['_content']
        cleaned['description'] = convert_entities(description.strip())

        cleaned['title'] = convert_entities(record['title'])
	
        x, y = record['longitude'], record['latitude']
        cleaned['location'] = Point((float(x), float(y)))

        # Possibly we could figure out flickr's geo API and resolve
        # the photo's place_id and/or woeid to the place name?  But
        # those are probably not specific enough; reverse-geocode
        # instead.
        try:
            block, distance = reverse_geocode(cleaned['location'])
            cleaned['location_name'] = block.pretty_name
        except ReverseGeocodeError:
            raise SkipRecord("Could not geocode location %s, %s" % (x, y))

        # Don't think any of the urls returned by the API's "extras"
        # correspond to the page? not sure.
        cleaned['url'] = 'http://www.flickr.com/photos/%(owner)s/%(id)s' % record

        attributes = {}
        attributes['sourcename'] = 'Flickr'
        #attributes['photo_id'] = record['id']
        attributes['user_id'] = record['owner']
        attributes['username'] = record['ownername']
        # Thumbnail. 'Small square' photos are 75x75.
        attributes['photo_href'] = record['url_m']
        cleaned['_attributes'] = attributes
        return cleaned
コード例 #7
0
ファイル: base.py プロジェクト: DotNetWebs/openblock
    def geocode_if_needed(self, point, location_name, address_text='',
                          **kwargs):
        """
        If either ``point`` or ``location_name`` is not set, try to
        geocode / reverse-geocode as needed to derive one from the
        other.  Returns (point, location_name).

        If neither one is set, try to parse addresses out of
        ``address_text`` and derive both.

        Either value may be None if it can't be determined.

        Any other keyword args are passed to ``full_geocode()``.
        """
        if not point:
            text = convert_entities(location_name or address_text)
            self.logger.debug("...Falling back on geocoding from '%s...'" % text[:50])
            addrs = parse_addresses(text)
            for addr, unused in addrs:
                try:
                    result = self.geocode(addr, **kwargs)
                    if result is not None:
                        point = result['point']
                        self.logger.debug("internally geocoded %r" % addr)
                        # TODO: what if it's a Place?
                        if not location_name:
                            location_name = result['address']
                        break
                except:
                    self.logger.exception('uncaught geocoder exception on %r\n' % addr)
                    continue

        if point and not location_name:
            # Fall back to reverse-geocoding.
            from ebpub.geocoder import reverse
            try:
                block, distance = reverse.reverse_geocode(point)
                self.logger.debug(" Reverse-geocoded point to %r" % block.pretty_name)
                location_name = block.pretty_name
            except reverse.ReverseGeocodeError:
                location_name = None

        return (point, location_name)
コード例 #8
0
ファイル: forms.py プロジェクト: slinkp/openblock
    def clean(self):
        self._clean_captcha()
        cleaned_data = super(NewsItemForm, self).clean()
        # Reverse-geocode if we need to - eg. user clicked map
        # but didn't give an address.
        if not cleaned_data.get('location_name'):
            if cleaned_data.get('location'):
                from ebpub.geocoder.reverse import reverse_geocode
                from ebpub.geocoder.reverse import ReverseGeocodeError
                try:
                    block, distance = reverse_geocode(cleaned_data['location'])
                    cleaned_data['location_name'] = block.pretty_name
                except ReverseGeocodeError:
                    logger.info(
                        "Saving NewsItem with no location_name because reverse-geocoding %(location)s failed"
                        % cleaned_data)

        # Geocode if we can, and need to.
        # Should not be necessary, but this is groundwork for #284.
        if not cleaned_data.get('location'):
            if cleaned_data.get('location_name'):
                from ebpub.geocoder.base import full_geocode
                try:
                    geocoded = full_geocode(
                        cleaned_data['location_name'].strip(), guess=True)
                    cleaned_data['location'] = geocoded['result'].location.wkt
                except (IndexError, KeyError, AttributeError):
                    logger.info(
                        "Saving NewsItem with no location because geocoding %(location_name)s failed"
                        % cleaned_data)

        # Note, any NewsItem fields that aren't listed in self._meta.fields
        # have to be manually saved here, because that's the list that's
        # normally consulted when setting attributes on the instance.
        # ... And yes, clean() is normally responsible for setting
        # attributes on the bound instance.
        for key in forms.fields_for_model(self._meta.model):
            if key in cleaned_data.keys():
                setattr(self.instance, key, cleaned_data[key])

        return cleaned_data
コード例 #9
0
ファイル: forms.py プロジェクト: DotNetWebs/openblock
    def clean(self):
        self._clean_captcha()
        cleaned_data = super(NewsItemForm, self).clean()
        # Reverse-geocode if we need to - eg. user clicked map
        # but didn't give an address.
        if not cleaned_data.get('location_name'):
            if cleaned_data.get('location'):
                from ebpub.geocoder.reverse import reverse_geocode
                from ebpub.geocoder.reverse import ReverseGeocodeError
                try:
                    block, distance = reverse_geocode(cleaned_data['location'])
                    cleaned_data['location_name'] = block.pretty_name
                except ReverseGeocodeError:
                    logger.info("Saving NewsItem with no location_name because reverse-geocoding %(location)s failed" % cleaned_data)

        # Geocode if we can, and need to.
        # Should not be necessary, but this is groundwork for #284.
        if not cleaned_data.get('location'):
            if cleaned_data.get('location_name'):
                from ebpub.geocoder.base import full_geocode
                try:
                    geocoded = full_geocode(cleaned_data['location_name'].strip(),
                                            guess=True)
                    cleaned_data['location'] = geocoded['result'].location.wkt
                except (IndexError, KeyError, AttributeError):
                    logger.info("Saving NewsItem with no location because geocoding %(location_name)s failed" % cleaned_data)

        # Note, any NewsItem fields that aren't listed in self._meta.fields
        # have to be manually saved here, because that's the list that's
        # normally consulted when setting attributes on the instance.
        # ... And yes, clean() is normally responsible for setting
        # attributes on the bound instance.
        for key in forms.fields_for_model(self._meta.model):
            if key in cleaned_data.keys():
                setattr(self.instance, key, cleaned_data[key])

        return cleaned_data
コード例 #10
0
    def clean(self):
        self._clean_captcha()
        cleaned_data = super(NewsItemForm, self).clean()

        # Reverse-geocode if we need to.
        if not cleaned_data['location_name']:
            if cleaned_data['location']:
                from ebpub.geocoder.reverse import reverse_geocode
                try:
                    block, distance = reverse_geocode(cleaned_data['location'])
                    cleaned_data['location_name'] = block.pretty_name
                except ReverseGeocodeErrror:
                    logger.info("Saving NewsItem with no location_name because reverse-geocoding %(location)s failed" % cleaned_data)

        # Note, any NewsItem fields that aren't listed in self._meta.fields
        # have to be manually saved here, because that's the list that's
        # normally consulted when setting attributes on the instance.
        # ... And yes, clean() is normally responsible for setting
        # attributes on the bound instance.
        for key in forms.fields_for_model(self._meta.model):
            if key in cleaned_data.keys():
                setattr(self.instance, key, cleaned_data[key])

        return cleaned_data
コード例 #11
0
ファイル: georeportv2.py プロジェクト: peudadayusuf/openblock
    def _update_service_request(self, sreq):
        service_request_id = self._get_request_field(sreq, 'service_request_id')

        if not service_request_id:
            log.info("Skipping request with no request id (may be in progress)!")
            return


        # pull out the location first, if we can't do this, we don't want it.
        try:
            point = Point(float(sreq.find('long').text), 
                          float(sreq.find('lat').text),
                          srid=4326)
        except: 
            log.debug("Skipping request with invalid location (%s)" % service_request_id)
            return
        if self.bounds is not None:
            if not self.bounds.intersects(point):
                log.debug("Skipping request at %s, outside bounds" % point)
                return
        try:
            ni = NewsItem.objects.filter(schema=self.schema).by_attribute(self.service_request_id_field, 
                                                                          service_request_id).all()[0]
            log.info('updating existing request %s' % service_request_id)
        except IndexError:
            # create the NewsItem
            ni = NewsItem(schema=self.schema)
            log.info('created new service request %s' % service_request_id)

        ni.title = self._get_request_field(sreq, 'service_name')
        ni.description = self._get_request_field(sreq, 'description')
        ni.location = point
        ni.location_name = self._get_request_field(sreq, 'address')
        # try to reverse geocde this point
        if not ni.location_name:
            try:
                block, distance = reverse_geocode(ni.location)
                ni.location_name = block.pretty_name
            except:
                log.debug("Failed to reverse geocode item %s" % service_request_id)

        # try to pull the requested_datetime into pubdate/itemdate
        # default to now.
        try: 
            ni.pub_date = pyrfc3339.parse(sreq.find('requested_datetime').text)
        except:
            ni.pub_date = datetime.datetime.utcnow()
            log.info("Filling in current time for pub_date on item with no requested_datetime (%s)" % service_request_id)
        ni.item_date = datetime.date(ni.pub_date.year, ni.pub_date.month, ni.pub_date.day)

        if self.html_url_template:
            ni.url = self.html_url_template.replace('{id}', service_request_id)
            log.info('Assigning html url "%s" to %s' % (ni.url, service_request_id))

        ni.save()

        ni.attributes['service_request_id'] = service_request_id

        # varchar fields
        for fieldname in ('request_id', 'service_code', 'address_id',
                          'media_url', 'status_notes', 'service_notice'):
            val = self._get_request_field(sreq, fieldname)
            if val != '':
                if len(val) < 4096:
                    ni.attributes[fieldname] = val
                else: 
                    log.info("truncating value for %s (%s)" % (fieldname, val))
                    ni.attributes[fieldname] = val[0:4096]

        # text fields
        for fieldname in ('service_notice'):
            val = self._get_request_field(sreq, fieldname)
            if val != '':
                ni.attributes[fieldname] = val

        
        # datetime fields
        for fieldname in ('expected_datetime', 'requested_datetime'):
            val = self._get_request_field(sreq, fieldname)
            if val == '':
                continue

            # try to parse it
            try:
                ni.attributes[fieldname] = pyrfc3339.parse(val) 
            except ValueError: 
                # invalid date, just omit
                log.info('Omitting invalid datetime field %s = %s' % (fieldname, val))
                pass
        
        # lookups 
        for fieldname in ('service_name', 'agency_responsible', 'status'):
            val = self._get_request_field(sreq, fieldname)
            if val == '': 
                ni.attributes[fieldname] = self._lookup_for(fieldname, 'Unknown')
            ni.attributes[fieldname] = self._lookup_for(fieldname, val)
コード例 #12
0
        def update(self, searchTerm, searchOffset):
                youtubeAPI = YouTubeAPI()
                numentries = 50 #How many results do we want the API to return
                logger.info("Starting YouTube_Scraper")
                response = youtubeAPI.runQuery(searchTerm, numentries, searchOffset)
                seencount = addcount = updatecount = 0
                if response:
                                for entry in response:
                                                seencount += 1
                                                count = 0
                                                while count != 9:
							if 'ns'+ str(count) + ':title' in entry:
								if entry['ns'+ str(count) + ':title'] != '':
									title = entry['ns'+ str(count) + ':title']
									count += 1
                                                        	else:
									logger.info("Skipping, as title is empty.")
									continue
							else:
								count += 1
                                                try:
                                                        newsItem = NewsItem.objects.get(title=title,schema__id=self.schema.id)
                                                        status = "updated"
                                                except NewsItem.DoesNotExist:
                                                        newsItem = NewsItem()
                                                        status = "added"
                                                except NewsItem.MultipleObjectsReturned:
                                                        logger.warn("Multiple entries matched title %r, event titles are not unique?" % title)
                                                        continue
                                                try:
                                                        newsItem.schema = self.schema
                                                        count = 0
                                                        while count != 9:
                                                                if 'ns'+ str(count) + ':description' in entry:
                                                                        if entry['ns'+ str(count) + ':description'] != '':
                                                                                newsItem.description = entry['ns'+ str(count) + ':description']
                                                                                break
                                                                        else:
                                                                              logger.info("Skipping %r as description is empty." % (title))
                                                                              continue
                                                                else:
                                                                        count += 1
                                                        newsItem.url = entry['ns0:link']
                                                        count = 0
                                                        while count != 9:
                                                                if 'ns'+ str(count) + ':title' in entry:
                                                                        if entry['ns'+ str(count) + ':title'] != '':
                                                                                newsItem.title = entry['ns'+ str(count) + ':title']
                                                                                count += 1
                                                                        else:
                                                                                logger.info("Skipping, as title is empty.")
                                                                                continue
                                                                else:
                                                                        count += 1
                                                        # newsItem.item_date = datetime.datetime.now()
                                                        count = 0
                                                        while count != 9:
                                                                if 'ns'+ str(count) + ':published' in entry:
                                                                        yt_timedate = string.split(entry['ns'+ str(count) + ':published'],'T')
                                                                        break
                                                                else:
                                                                        count += 1
                                                        date = yt_timedate[0]
                                                        time = string.split(yt_timedate[1],'Z')
                                                        formatted = date
							#date + " " + time[0] + "000" #Used to include timestamps
                                                        newsItem.pub_date = datetime.datetime.now()
                                                        newsItem.item_date = formatted.encode( "utf-8" )
							_short_title = newsItem.title[:30] + '...'
                                                        #newsItem.location_name = 'Kent'
                                                        count = 0
                                                        while count != 9:
                                                                if 'ns'+ str(count) + ':pos' in entry:
                                                                        long_lat = string.split(entry['ns'+ str(count) + ':pos'])
                                                                        break
                                                                else:
                                                                        count += 1
                                                        newsItem.location = Point(float(long_lat[1]),float(long_lat[0]))
							x, y = float(long_lat[0]), float(long_lat[1])
                                                        if not intersects_metro_bbox(newsItem.location):
                                                                reversed_loc = Point((float(y), float(x)))
                                                                if intersects_metro_bbox(reversed_loc):
                                                                        logger.info(
                                                                                "Got points in apparently reverse order, flipping them")
                                                                        newsItem.location = reversed_loc
                                                                else:
                                                                        logger.info("Skipping %r as %s,%s is out of bounds" % (_short_title, y, x))
                                                                        continue
                                                        if not newsItem.location_name:
                                                                # Fall back to reverse-geocoding.
                                                                from ebpub.geocoder import reverse
                                                                try:
                                                                        block, distance = reverse.reverse_geocode(newsItem.location)
                                                                        logger.debug(" Reverse-geocoded point to %r" % block.pretty_name)
                                                                        newsItem.location_name = block.pretty_name
                                                                        newsItem.block = block
                                                                except reverse.ReverseGeocodeError:
                                                                        logger.info(" Skip, failed to reverse geocode %s for %r" % (newsItem.location.wkt, _short_title))
                                                                        continue

                                                        attributes_ = {}
                                                        attributes_['photo_href'] = entry['ns0:thumb']
                                                        attributes_['videoID'] = entry['ns0:video_id']
							attributes_['searchTerm'] = searchTerm
    
                                                        newsItem.save()
                                                        newsItem.attributes = attributes_
                                                        newsItem.save()
                                                        
                                                        if status == 'added':
                                                                addcount += 1
                                                        else:
                                                                updatecount += 1
                                                        logger.info("%s: %s" % (status, newsItem.title))
                                                except Exception as e:
                                                        logger.exception("unexpected error: %s" % e)
                logger.info("YouTube_Scraper finished: %d added, %d updated of %s total" % (addcount, updatecount, seencount))
コード例 #13
0
ファイル: retrieval.py プロジェクト: mesrut/openblock
    def update(self):
        logger.info("Starting LocalNewsScraper update %s" % self.url)

        try:
            schema = Schema.objects.get(slug=self.schema_slug)
        except Schema.DoesNotExist:
            logger.error( "Schema (%s): DoesNotExist" % self.schema_slug)
            return 1

        response, content = self.http.request(self.url)
        if response.fromcache:
            logger.info("Feed is unchanged since last update (cached)")
            return

        f = feedparser.parse(content)
        addcount = updatecount = 0
        for entry in f.entries:
            title = convert_entities(entry.title)
            description = convert_entities(entry.description)

            if entry.id.startswith('http'):
                item_url = entry.id
            else:
                item_url = entry.link
            try:
                item = NewsItem.objects.get(schema__id=schema.id,
                                            title=title,
                                            description=description)
                #url=item_url)
                status = 'updated'
            except NewsItem.DoesNotExist:
                item = NewsItem()
                status = 'added'
            except NewsItem.MultipleObjectsReturned:
                # Seen some where we get the same story with multiple URLs. Why?
                logger.warn("Multiple entries matched title %r and description %r. Expected unique!" % (title, description))
                continue
            try:
                item.title = title
                item.schema = schema
                item.description = description
                item.url = item_url
                # Support both georss and xcal for getting the location name.
                # TODO: should also support ev:location per http://web.resource.org/rss/1.0/modules/event/
                item.location_name = entry.get('xCal_x-calconnect-street') or entry.get('x-calconnect-street') or entry.get('georss_featurename') or entry.get('featurename')
                item.item_date = datetime.datetime(*entry.updated_parsed[:6])
                item.pub_date = datetime.datetime(*entry.updated_parsed[:6])
                _short_title = item.title[:30] + '...'

                # feedparser bug: depending on which parser it magically uses,
                # we either get the xml namespace in the key name, or we don't.
                point = entry.get('georss_point') or entry.get('point')
                x, y = None, None
                if point:
                    # GeoRSS puts latitude (Y) first.
                    y, x = point.split(' ')
                else:
                    if item.location_name:
                        text = item.location_name
                    else:
                        # Geocode whatever we can find.
                        text = item.title + ' ' + item.description
                    logger.debug("...Falling back on geocoding from %r..." % text[:50])
                    addrs = parse_addresses(text)
                    for addr, unused in addrs:
                        try:
                            result = SmartGeocoder().geocode(addr)
                            point = result['point']
                            logger.debug("internally geocoded %r" % addr)
                            x, y = point.x, point.y
                            if not item.location_name:
                                item.location_name = result['address']
                            item.block = result['block']
                            break
                        except GeocodingException:
                            logger.debug("Geocoding exception on %r:" % text)
                            log_exception(level=logging.DEBUG)
                            continue
                        except:
                            logger.error('uncaught geocoder exception on %r\n' % addr)
                            log_exception()
                    if None in (x, y):
                        logger.debug("Skip, couldn't geocode any addresses in item '%s...'"
                                     % _short_title)
                        continue
                item.location = Point((float(x), float(y)))
                if not intersects_metro_bbox(item.location):
                    reversed_loc = Point((float(y), float(x)))
                    if intersects_metro_bbox(reversed_loc):
                        logger.info(
                            "Got points in apparently reverse order, flipping them")
                        item.location = reversed_loc
                    else:
                        logger.info("Skipping %r as %s,%s is out of bounds" %
                                    (_short_title, y, x))
                        continue
                if not item.location_name:
                    # Fall back to reverse-geocoding.
                    from ebpub.geocoder import reverse
                    try:
                        block, distance = reverse.reverse_geocode(item.location)
                        logger.debug(" Reverse-geocoded point to %r" % block.pretty_name)
                        item.location_name = block.pretty_name
                        item.block = block
                    except reverse.ReverseGeocodeError:
                        logger.info(" Skip, failed to reverse geocode %s for %r" % (item.location.wkt, _short_title))
                        continue
                item.save()
                if status == 'added':
                    addcount += 1
                else:
                    updatecount += 1
                logger.info("%s: %s" % (status, _short_title))
            except:
                logger.error("Warning: couldn't save %r. Traceback:" % _short_title)
                log_exception()
        logger.info("Finished LocalNewsScraper update: %d added, %d updated" % (addcount, updatecount))
コード例 #14
0
ファイル: retrieval.py プロジェクト: mesrut/openblock
    def update(self):
        """ Download Calendar RSS feed and update database """
        logger.info("Starting EventsCalendarScraper")
        
        feed = feedparser.parse(self.url)
        seencount = addcount = updatecount = 0
        for entry in feed.entries:

            def ns_get(element):
                # work around feedparser unpredictability.
                namespace, element = element.split(':')
                result = entry.get('%s_%s' % (namespace, element))
                if result is None:
                    result = entry.get(element)
                return result

            seencount += 1
            title = convert_entities(entry.title)
            try:
                item = NewsItem.objects.get(title=title,
                                            schema__id=self.schema.id)
                status = "updated"
            except NewsItem.DoesNotExist:
                item = NewsItem()
                status = "added"
            except NewsItem.MultipleObjectsReturned:
                logger.warn("Multiple entries matched title %r, event titles are not unique?" % title)
                continue
            try:
                item.location_name = '%s %s' % (ns_get('xcal:x-calconnect-venue-name'),
                                                ns_get('xcal:x-calconnect-street'))
                item.location_name = item.location_name.strip()
                item.schema = self.schema
                item.title = title
                item.description = convert_entities(entry.description)
                item.url = entry.link
                start_dt = ns_get('xcal:dtstart')
                start_dt = dateutil.parser.parse(start_dt)
                # Upstream bug: They provide a UTC offset of +0000 which
                # means times in UTC, but they're actually times in
                # US/Eastern, so do *not* fix the zone.
                #start_dt = start_dt.astimezone(local_tz)
                item.item_date = start_dt.date()
                item.pub_date = datetime.datetime(*entry.updated_parsed[:6])
                item.location = Point((float(ns_get('geo:long')),
                                       float(ns_get('geo:lat'))))
                if (item.location.x, item.location.y) == (0.0, 0.0):
                    logger.warn("Skipping %r, bad location 0,0" % item.title)
                    continue

                if not item.location_name:
                    # Fall back to reverse-geocoding.
                    from ebpub.geocoder import reverse
                    try:
                        block, distance = reverse.reverse_geocode(item.location)
                        logger.info(" Reverse-geocoded point to %r" % block.pretty_name)
                        item.location_name = block.pretty_name
                        item.block = block
                    except reverse.ReverseGeocodeError:
                        logger.debug(" Failed to reverse geocode %s for %r" % (item.location.wkt, item.title))
                        item.location_name = u''

                item.save()
                item.attributes['start_time'] = start_dt.time()
                end_dt = ns_get('xcal:dtend') or u''
                if end_dt.strip():
                    end_dt = dateutil.parser.parse(end_dt.strip())
                    #end_dt = end_dt.astimezone(local_tz)
                    item.attributes['end_time'] = end_dt.time()
                if status == 'added':
                    addcount += 1
                else:
                    updatecount += 1
                logger.info("%s: %s" % (status, item.title))
            except:
                logger.error("unexpected error:", sys.exc_info()[1])
                log_exception()
        logger.info("EventsCalendarScraper finished: %d added, %d updated of %s total" % (addcount, updatecount, seencount))
コード例 #15
0
ファイル: add_events.py プロジェクト: slinkp/openblock
def update():
    """ Download Calendar RSS feed and update database """
    logger.info("Starting add_events")
    url = """http://calendar.boston.com/search?acat=&cat=&commit=Search\
&new=n&rss=1&search=true&sort=0&srad=20&srss=50&ssrss=5&st=event\
&st_select=any&svt=text&swhat=&swhen=today&swhere=&trim=1"""
    schema = 'events'

    try:
        schema = Schema.objects.get(slug=schema)
    except Schema.DoesNotExist:
        logger.error("Schema (%s): DoesNotExist" % schema)
        sys.exit(1)

    feed = feedparser.parse(url)
    addcount = updatecount = 0
    for entry in feed.entries:
        title = convert_entities(entry.title).strip()
        # Putting 'event' in the title is redundant, ticket #227
        if title.lower().startswith('event: '):
            title = title[7:]
        try:
            item = NewsItem.objects.get(title=title, schema__id=schema.id)
            status = "updated"
        except NewsItem.DoesNotExist:
            item = NewsItem()
            status = "added"
        except NewsItem.MultipleObjectsReturned:
            logger.warn(
                "Multiple entries matched title %r, event titles are not unique?"
                % title)
            continue
        try:
            item.location_name = entry.get(
                'xcal_x-calconnect-street') or entry.get(
                    'x-calconnect-street') or u''
            item.schema = schema
            item.title = title
            item.description = convert_entities(entry.description)
            item.url = entry.link
            item.item_date = datetime.datetime(*entry.updated_parsed[:6])
            item.pub_date = datetime.datetime(*entry.updated_parsed[:6])
            item.location = Point(
                (float(entry['geo_long']), float(entry['geo_lat'])))
            if (item.location.x, item.location.y) == (0.0, 0.0):
                logger.warn("Skipping %r, bad location 0,0" % item.title)
                continue

            if not item.location_name:
                # Fall back to reverse-geocoding.
                from ebpub.geocoder import reverse
                try:
                    block, distance = reverse.reverse_geocode(item.location)
                    logger.info(" Reverse-geocoded point to %r" %
                                block.pretty_name)
                    item.location_name = block.pretty_name
                except reverse.ReverseGeocodeError:
                    logger.debug(" Failed to reverse geocode %s for %r" %
                                 (item.location.wkt, item.title))
                    item.location_name = u''

            item.save()
            if status == 'added':
                addcount += 1
            else:
                updatecount += 1
            logger.info("%s: %s" % (status, item.title))
        except:
            logger.exception("unexpected error:", sys.exc_info()[1])

    logger.info("add_events finished: %d added, %d updated" %
                (addcount, updatecount))
コード例 #16
0
ファイル: add_news.py プロジェクト: egrommet/openblock
def main(argv=None):
    if argv:
        url = argv[0]
    else:
        url = 'http://search.boston.com/search/api?q=*&sort=-articleprintpublicationdate&subject=massachusetts&scope=bonzai'
    schema = 'local-news'

    try:
        schema = Schema.objects.get(slug=schema)
    except Schema.DoesNotExist:
        print "Schema (%s): DoesNotExist" % schema
        sys.exit(1)

    f = feedparser.parse(url)

    for e in f.entries:
        try:
            item = NewsItem.objects.get(title=e.title, description=e.description)
            print "Already have %r (id %d)" % (item.title, item.id)
        except NewsItem.DoesNotExist:
            item = NewsItem()
        try:
            item.schema = schema
            item.title = convert_entities(e.title)
            item.description = convert_entities(e.description)
            item.url = e.link
            item.location_name = e.get('x-calconnect-street') or e.get('georss_featurename')
            item.item_date = datetime.datetime(*e.updated_parsed[:6])
            item.pub_date = datetime.datetime(*e.updated_parsed[:6])
            if 'point' in e:
                x,y = e.point.split(' ')
            elif 'georss_point' in e:
                x,y = e.georss_point.split(' ')
            else:
                text = item.title + ' ' + item.description
                from geocoder_hack import quick_dirty_fallback_geocode
                x, y = quick_dirty_fallback_geocode(text, parse=True)
                if None in (x, y):
                    print " couldn't geocode '%s...'" % item.title[:30]
                    continue
            item.location = Point((float(y), float(x)))
            if item.location.x == 0.0 and item.location.y == 0.0:
                # There's a lot of these. Maybe attempt to
                # parse and geocode if we haven't already?
                print "Skipping %r as it has bad location 0,0" % item.title
                continue
            if not item.location_name:
                # Fall back to reverse-geocoding.
                from ebpub.geocoder import reverse
                try:
                    block, distance = reverse.reverse_geocode(item.location)
                    print " Reverse-geocoded point to %r" % block.pretty_name
                    item.location_name = block.pretty_name
                except reverse.ReverseGeocodeError:
                    print " Failed to reverse geocode %s for %r" % (item.location.wkt, item.title)
                    item.location_name = u''
            item.save()
            print "Added: %s" % item.title
        except:
            print "Warning: couldn't save %r. Traceback:" % item.title
            import cStringIO, traceback
            f = cStringIO.StringIO()
            traceback.print_exc(file=f)
            msg = f.getvalue()
            print msg
コード例 #17
0
ファイル: add_events.py プロジェクト: DotNetWebs/openblock
def update():
    """ Download Calendar RSS feed and update database """
    logger.info("Starting add_events")
    url = """http://calendar.boston.com/search?acat=&cat=&commit=Search\
&new=n&rss=1&search=true&sort=0&srad=20&srss=50&ssrss=5&st=event\
&st_select=any&svt=text&swhat=&swhen=today&swhere=&trim=1"""
    schema = 'events'


    try:
        schema = Schema.objects.get(slug=schema)
    except Schema.DoesNotExist:
        logger.error("Schema (%s): DoesNotExist" % schema)
        sys.exit(1)

    feed = feedparser.parse(url)
    addcount = updatecount = 0
    for entry in feed.entries:
        title = convert_entities(entry.title).strip()
        # Putting 'event' in the title is redundant, ticket #227
        if title.lower().startswith('event: '):
            title = title[7:]
        try:
            item = NewsItem.objects.get(title=title,
                                        schema__id=schema.id)
            status = "updated"
        except NewsItem.DoesNotExist:
            item = NewsItem()
            status = "added"
        except NewsItem.MultipleObjectsReturned:
            logger.warn("Multiple entries matched title %r, event titles are not unique?" % title)
            continue
        try:
            item.location_name = entry.get('xcal_x-calconnect-street') or entry.get('x-calconnect-street') or u''
            item.schema = schema
            item.title = title
            item.description = convert_entities(entry.description)
            item.url = entry.link
            item.item_date = datetime.datetime(*entry.updated_parsed[:6])
            item.pub_date = datetime.datetime(*entry.updated_parsed[:6])
            item.location = Point((float(entry['geo_long']),
                                   float(entry['geo_lat'])))
            if (item.location.x, item.location.y) == (0.0, 0.0):
                logger.warn("Skipping %r, bad location 0,0" % item.title)
                continue

            if not item.location_name:
                # Fall back to reverse-geocoding.
                from ebpub.geocoder import reverse
                try:
                    block, distance = reverse.reverse_geocode(item.location)
                    logger.info(" Reverse-geocoded point to %r" % block.pretty_name)
                    item.location_name = block.pretty_name
                except reverse.ReverseGeocodeError:
                    logger.debug(" Failed to reverse geocode %s for %r" % (item.location.wkt, item.title))
                    item.location_name = u''

            item.save()
            if status == 'added':
                addcount += 1
            else:
                updatecount += 1
            logger.info("%s: %s" % (status, item.title))
        except:
            logger.exception("unexpected error:", sys.exc_info()[1])

    logger.info("add_events finished: %d added, %d updated" % (addcount, updatecount))
コード例 #18
0
    def update(self):
        logger.info("Starting LocalNewsScraper update %s" % self.url)

        try:
            schema = Schema.objects.get(slug=self.schema_slug)
        except Schema.DoesNotExist:
            logger.error("Schema (%s): DoesNotExist" % self.schema_slug)
            return 1

        response, content = self.http.request(self.url)
        if response.fromcache:
            logger.info("Feed is unchanged since last update (cached)")
            return

        f = feedparser.parse(content)
        addcount = updatecount = 0
        for entry in f.entries:
            title = convert_entities(entry.title)
            description = convert_entities(entry.description)

            if entry.id.startswith('http'):
                item_url = entry.id
            else:
                item_url = entry.link
            try:
                item = NewsItem.objects.get(schema__id=schema.id,
                                            title=title,
                                            description=description)
                #url=item_url)
                status = 'updated'
            except NewsItem.DoesNotExist:
                item = NewsItem()
                status = 'added'
            except NewsItem.MultipleObjectsReturned:
                # Seen some where we get the same story with multiple URLs. Why?
                logger.warn(
                    "Multiple entries matched title %r and description %r. Expected unique!"
                    % (title, description))
                continue
            try:
                item.title = title
                item.schema = schema
                item.description = description
                item.url = item_url
                # Support both georss and xcal for getting the location name.
                # TODO: should also support ev:location per http://web.resource.org/rss/1.0/modules/event/
                item.location_name = entry.get(
                    'xCal_x-calconnect-street') or entry.get(
                        'x-calconnect-street') or entry.get(
                            'georss_featurename') or entry.get('featurename')
                item.item_date = datetime.datetime(*entry.updated_parsed[:6])
                item.pub_date = datetime.datetime(*entry.updated_parsed[:6])
                _short_title = item.title[:30] + '...'

                # feedparser bug: depending on which parser it magically uses,
                # we either get the xml namespace in the key name, or we don't.
                point = entry.get('georss_point') or entry.get('point')
                x, y = None, None
                if point:
                    # GeoRSS puts latitude (Y) first.
                    y, x = point.split(' ')
                else:
                    if item.location_name:
                        text = item.location_name
                    else:
                        # Geocode whatever we can find.
                        text = item.title + ' ' + item.description
                    logger.debug("...Falling back on geocoding from %r..." %
                                 text[:50])
                    addrs = parse_addresses(text)
                    for addr, unused in addrs:
                        try:
                            result = SmartGeocoder().geocode(addr)
                            point = result['point']
                            logger.debug("internally geocoded %r" % addr)
                            x, y = point.x, point.y
                            if not item.location_name:
                                item.location_name = result['address']
                            break
                        except GeocodingException:
                            logger.debug("Geocoding exception on %r:" % text)
                            log_exception(level=logging.DEBUG)
                            continue
                        except:
                            logger.error(
                                'uncaught geocoder exception on %r\n' % addr)
                            log_exception()
                    if None in (x, y):
                        logger.debug(
                            "Skip, couldn't geocode any addresses in item '%s...'"
                            % _short_title)
                        continue
                item.location = Point((float(x), float(y)))
                if not intersects_metro_bbox(item.location):
                    reversed_loc = Point((float(y), float(x)))
                    if intersects_metro_bbox(reversed_loc):
                        logger.info(
                            "Got points in apparently reverse order, flipping them"
                        )
                        item.location = reversed_loc
                    else:
                        logger.info("Skipping %r as %s,%s is out of bounds" %
                                    (_short_title, y, x))
                        continue
                if not item.location_name:
                    # Fall back to reverse-geocoding.
                    from ebpub.geocoder import reverse
                    try:
                        block, distance = reverse.reverse_geocode(
                            item.location)
                        logger.debug(" Reverse-geocoded point to %r" %
                                     block.pretty_name)
                        item.location_name = block.pretty_name
                    except reverse.ReverseGeocodeError:
                        logger.info(
                            " Skip, failed to reverse geocode %s for %r" %
                            (item.location.wkt, _short_title))
                        continue
                item.save()
                if status == 'added':
                    addcount += 1
                else:
                    updatecount += 1
                logger.info("%s: %s" % (status, _short_title))
            except:
                logger.error("Warning: couldn't save %r. Traceback:" %
                             _short_title)
                log_exception()
        logger.info("Finished LocalNewsScraper update: %d added, %d updated" %
                    (addcount, updatecount))
コード例 #19
0
    def update(self):
        """ Download Calendar RSS feed and update database """
        logger.info("Starting EventsCalendarScraper")

        feed = feedparser.parse(self.url)
        seencount = addcount = updatecount = 0
        for entry in feed.entries:

            def ns_get(element):
                # work around feedparser unpredictability.
                namespace, element = element.split(':')
                result = entry.get('%s_%s' % (namespace, element))
                if result is None:
                    result = entry.get(element)
                return result

            seencount += 1
            title = convert_entities(entry.title)
            try:
                item = NewsItem.objects.get(title=title,
                                            schema__id=self.schema.id)
                status = "updated"
            except NewsItem.DoesNotExist:
                item = NewsItem()
                status = "added"
            except NewsItem.MultipleObjectsReturned:
                logger.warn(
                    "Multiple entries matched title %r, event titles are not unique?"
                    % title)
                continue
            try:
                item.location_name = '%s %s' % (
                    ns_get('xcal:x-calconnect-venue-name'),
                    ns_get('xcal:x-calconnect-street'))
                item.location_name = item.location_name.strip()
                item.schema = self.schema
                item.title = title
                item.description = convert_entities(entry.description)
                item.url = entry.link
                start_dt = ns_get('xcal:dtstart')
                start_dt = dateutil.parser.parse(start_dt)
                # Upstream bug: They provide a UTC offset of +0000 which
                # means times in UTC, but they're actually times in
                # US/Eastern, so do *not* fix the zone.
                #start_dt = start_dt.astimezone(local_tz)
                item.item_date = start_dt.date()
                item.pub_date = datetime.datetime(*entry.updated_parsed[:6])
                item.location = Point(
                    (float(ns_get('geo:long')), float(ns_get('geo:lat'))))
                if (item.location.x, item.location.y) == (0.0, 0.0):
                    logger.warn("Skipping %r, bad location 0,0" % item.title)
                    continue

                if not item.location_name:
                    # Fall back to reverse-geocoding.
                    from ebpub.geocoder import reverse
                    try:
                        block, distance = reverse.reverse_geocode(
                            item.location)
                        logger.info(" Reverse-geocoded point to %r" %
                                    block.pretty_name)
                        item.location_name = block.pretty_name
                    except reverse.ReverseGeocodeError:
                        logger.debug(" Failed to reverse geocode %s for %r" %
                                     (item.location.wkt, item.title))
                        item.location_name = u''

                item.save()
                item.attributes['start_time'] = start_dt.time()
                end_dt = ns_get('xcal:dtend') or u''
                if end_dt.strip():
                    end_dt = dateutil.parser.parse(end_dt.strip())
                    #end_dt = end_dt.astimezone(local_tz)
                    item.attributes['end_time'] = end_dt.time()
                if status == 'added':
                    addcount += 1
                else:
                    updatecount += 1
                logger.info("%s: %s" % (status, item.title))
            except Exception as e:
                logger.exception("unexpected error: %s" % e)
        logger.info(
            "EventsCalendarScraper finished: %d added, %d updated of %s total"
            % (addcount, updatecount, seencount))
コード例 #20
0
ファイル: add_news.py プロジェクト: horshacktest/openblock
def update(argv=None):
    logger.info("Starting add_news")
    if argv:
        url = argv[0]
    else:
        url = 'http://search.boston.com/search/api?q=*&sort=-articleprintpublicationdate&subject=massachusetts&scope=bonzai'
    schema_slug = 'local-news'

    try:
        schema = Schema.objects.get(slug=schema_slug)
    except Schema.DoesNotExist:
        logger.error( "Schema (%s): DoesNotExist" % schema_slug)
        sys.exit(1)

    f = feedparser.parse(url)
    addcount = updatecount = 0
    for entry in f.entries:
        title = convert_entities(entry.title)
        description = convert_entities(entry.description)

        if entry.id.startswith('http'):
            item_url = entry.id
        else:
            item_url = entry.link
        try:
            item = NewsItem.objects.get(schema__id=schema.id,
                                        title=title,
                                        description=description)
            #url=item_url)
            status = 'updated'
        except NewsItem.DoesNotExist:
            item = NewsItem()
            status = 'added'
        except NewsItem.MultipleObjectsReturned:
            # Seen some where we get the same story with multiple URLs. Why?
            logger.warn("Multiple entries matched title %r and description %r. Expected unique!" % (title, description))
            continue
        try:
            item.title = title
            item.schema = schema
            item.description = description
            item.url = item_url
            item.location_name = entry.get('x-calconnect-street') or entry.get('georss_featurename')
            item.item_date = datetime.datetime(*entry.updated_parsed[:6])
            item.pub_date = datetime.datetime(*entry.updated_parsed[:6])

            # feedparser bug: depending on which parser it magically uses,
            # we either get the xml namespace in the key name, or we don't.
            point = entry.get('georss_point') or entry.get('point')
            x, y = None, None
            if point:
                x, y = point.split(' ')
            if True:
                # Fall back on geocoding.
                text = item.title + ' ' + item.description
                addrs = parse_addresses(text)
                for addr, unused in addrs:
                    try:
                        result = SmartGeocoder().geocode(addr)
                        point = result['point']
                        logger.debug("internally geocoded %r" % addr)
                        x, y = point.x, point.y
                        break
                    except (GeocodingException, ParsingError):
                        logger.debug("Geocoding exception on %r:" % text,
                                     exc_info=True)
                        continue
                    except:
                        logger.exception('uncaught geocoder exception on %r\n' % addr)
                if None in (x, y):
                    logger.info("couldn't geocode '%s...'" % item.title[:30])
                    continue
            item.location = Point((float(y), float(x)))
            if item.location.x == 0.0 and item.location.y == 0.0:
                # There's a lot of these. Maybe attempt to
                # parse and geocode if we haven't already?
                logger.info("Skipping %r as it has bad location 0,0" % item.title)
                continue
            if not item.location_name:
                # Fall back to reverse-geocoding.
                from ebpub.geocoder import reverse
                try:
                    block, distance = reverse.reverse_geocode(item.location)
                    logger.debug(" Reverse-geocoded point to %r" % block.pretty_name)
                    item.location_name = block.pretty_name
                except reverse.ReverseGeocodeError:
                    logger.debug(" Failed to reverse geocode %s for %r" % (item.location.wkt, item.title))
                    item.location_name = u''
            item.save()
            if status == 'added':
                addcount += 1
            else:
                updatecount += 1
            logger.info("%s: %s" % (status, item.title))
        except:
            logger.exception("Warning: couldn't save %r. Traceback:" % item.title)

    logger.info("Finished add_news: %d added, %d updated" % (addcount, updatecount))
コード例 #21
0
def main(argv=None):
    logger.info("Starting add_news")
    if argv:
        url = argv[0]
    else:
        url = 'http://search.boston.com/search/api?q=*&sort=-articleprintpublicationdate&subject=massachusetts&scope=bonzai'
    schema_slug = 'local-news'

    try:
        schema = Schema.objects.get(slug=schema_slug)
    except Schema.DoesNotExist:
        logger.error("Schema (%s): DoesNotExist" % schema_slug)
        sys.exit(1)

    f = feedparser.parse(url)
    addcount = updatecount = 0
    for entry in f.entries:
        title = convert_entities(entry.title)
        description = convert_entities(entry.description)

        if entry.id.startswith('http'):
            item_url = entry.id
        else:
            item_url = entry.link
        try:
            item = NewsItem.objects.get(schema__id=schema.id,
                                        title=title,
                                        description=description)
            #url=item_url)
            status = 'updated'
        except NewsItem.DoesNotExist:
            item = NewsItem()
            status = 'added'
        except NewsItem.MultipleObjectsReturned:
            # Seen some where we get the same story with multiple URLs. Why?
            logger.warn(
                "Multiple entries matched title %r and description %r. Expected unique!"
                % (title, description))
            continue
        try:
            item.title = title
            item.schema = schema
            item.description = description
            item.url = item_url
            item.location_name = entry.get('x-calconnect-street') or entry.get(
                'georss_featurename')
            item.item_date = datetime.datetime(*entry.updated_parsed[:6])
            item.pub_date = datetime.datetime(*entry.updated_parsed[:6])

            # feedparser bug: depending on which parser it magically uses,
            # we either get the xml namespace in the key name, or we don't.
            point = entry.get('georss_point') or entry.get('point')
            if point:
                x, y = point.split(' ')
            else:
                # Fall back on geocoding.
                text = item.title + ' ' + item.description
                try:
                    x, y = quick_dirty_fallback_geocode(text, parse=True)
                except GeocodingException:
                    logger.debug("Geocoding exception on %r:" % text)
                    log_exception(level=logging.DEBUG)
                    continue
                if None in (x, y):
                    logger.info("couldn't geocode '%s...'" % item.title[:30])
                    continue
            item.location = Point((float(y), float(x)))
            if item.location.x == 0.0 and item.location.y == 0.0:
                # There's a lot of these. Maybe attempt to
                # parse and geocode if we haven't already?
                logger.info("Skipping %r as it has bad location 0,0" %
                            item.title)
                continue
            if not item.location_name:
                # Fall back to reverse-geocoding.
                from ebpub.geocoder import reverse
                try:
                    block, distance = reverse.reverse_geocode(item.location)
                    logger.debug(" Reverse-geocoded point to %r" %
                                 block.pretty_name)
                    item.location_name = block.pretty_name
                    item.block = block
                except reverse.ReverseGeocodeError:
                    logger.debug(" Failed to reverse geocode %s for %r" %
                                 (item.location.wkt, item.title))
                    item.location_name = u''
            item.save()
            if status == 'added':
                addcount += 1
            else:
                updatecount += 1
            logger.info("%s: %s" % (status, item.title))
        except:
            logger.error("Warning: couldn't save %r. Traceback:" % item.title)
            log_exception()
    logger.info("Finished add_news: %d added, %d updated" %
                (addcount, updatecount))