def contains_coordinate(self, lat, lon): """ Helper method; given a lat/lon, returns whether the given point is within this PolyModel. """ if not USE_GIS: return None if not self.poly: return False point = geo_from_str("POINT(%s %s)" % (lon,lat)) if isinstance(self.poly,basestring): return geo_from_str(self.poly).contains(point) else: return self.poly.contains(point)
def center(self): """ Returns the Point that corresponds to the center of this object's shape. """ if not self.poly: return None try: if isinstance(self.poly,basestring): return geo_from_str(self.poly).centroid else: return self.poly.centroid except: return None
def handle_noargs(self, **options): # We need HTTP basic auth, so build a urlopener for this password_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm() top_level_url = "http://stream.twitter.com/" password_mgr.add_password(None, top_level_url, getattr(settings,'TWITTER_USERNAME',''), getattr(settings,'TWITTER_PASSWORD','')) handler = urllib2.HTTPBasicAuthHandler(password_mgr) opener = urllib2.build_opener(handler) # Open stream f = opener.open("http://stream.twitter.com/1/statuses/sample.json") # Buffered read through the stream (possible since file objects are iterable by line) for line in f: data = json.loads("[%s]"%line) tweet = data[0] # http://dev.twitter.com/doc/get/statuses/public_timeline # for example tweet payload # Only looking for tweets with geolocation data if tweet.has_key('id') and tweet.has_key('geo') and tweet['geo']: strval = "https://twitter.com/%s/status/%s" % ( tweet['user']['screen_name'], tweet['id'], ) latitude = tweet['geo']['coordinates'][0] longitude = tweet['geo']['coordinates'][1] coordstr = "POINT(%s %s)" % (longitude, latitude) # Save data p = TweetPoint( name=u"%s" % strval, geometry=geo_from_str(coordstr) ) p.save() print p.name print coordstr print f.close()
def handle(self, *args, **options): help = 'Parses the prepared kazni.txt' LOG_FILENAME = 'kazni_parser.log' logging.basicConfig(filename=LOG_FILENAME,level=logging.DEBUG) file = open(args[0], 'r') g = geocoders.Google('ABQIAAAAVlqrrVSbz3C1GeepsZahiRQlxAPqwOs55Ezp72xHBnWJQMD8ORTctpF5AovPLRFPNxMhkOQYYTLKpw') for line in file: index = int(line.split(';')[0]) name = line.split(';')[1] date_parsed = line.split(';')[2] penalty_parsed = line.split(';')[3].strip() try: penalty = int(penalty_parsed) except: penalty = int(penalty_parsed.rpartition(',')[0].replace('.', '')) date_re = re.search('(\d{1,2})\.(\d{1,2})\.(\d{4})\s(\d{1,2}):(\d{2})', date_parsed) date = datetime.datetime(int(date_re.group(3)), int(date_re.group(2)), int(date_re.group(1)), int(date_re.group(4)), int(date_re.group(5))) logging.debug('Processing: %d, %s' % (index, name)) # if index already in db, don't import if not KazenTocka.objects.filter(index__exact=index): # if name already exists in db ... if KazenTocka.objects.filter(name__exact=name).count() != 0: # ... use its geometry geometry = KazenTocka.objects.filter(name__exact=name)[0].geometry entry = KazenTocka( name = name, index = index, geometry = geometry, date = date, penalty = penalty ) entry.save() logging.debug('Using geometry from from an already inserted street.') logging.debug('Inserted: %d, %s, %s, %s, %s' % (index, name, geometry, date, penalty)) # if it's not in the db, ask google for geometry else: try: # wait a bit so we won't make google angry time.sleep(0.5) place = list(g.geocode(name, exactly_one=False))[0] latitude = place[1][0] longitude = place[1][1] coordstr = "POINT(%s %s)" % (longitude, latitude) entry = KazenTocka( name = name, index = index, geometry = geo_from_str(coordstr), date = date, penalty = penalty ) entry.save() logging.debug('Using geometry from google') logging.debug('Inserted: %d, %s, %s, %s, %s' % (index, name, geometry, date, penalty)) except geocoders.google.GTooManyQueriesError: logging.debug('Google blocked us!') sys.exit(1) except Exception, e: logging.debug(e) else: logging.debug('Skipping, index already in the db...')