def get_coordinates(self):
     geocoder = Geocoder()
     uri = urlparse(self.path)
     query = parse_qs(uri.query)
     if query.get('address') is None:
         return None
     coordinates = geocoder.geocode(query['address'][0])
     return coordinates
示例#2
0
    def search(self,
               keyword=None,
               city=None,
               state=None,
               coordinates=None,
               radius='5mi',
               num=20):

        auth = tweepy.OAuthHandler(k.consumer_key, k.consumer_secret)
        auth.set_access_token(k.access_token, k.access_token_secret)
        api = tweepy.API(auth)

        geo = Geocoder()

        print '\nGathering tweets...\n'

        #kcs
        if keyword is not None and city is not None and state is not None and coordinates is None:
            coordinates = geo.getCoordinates(city, state)
            coordinate_str = '{lat},{lng},'.format(**coordinates) + radius
            tweets = api.search(q=keyword, geocode=coordinate_str, count=num)

        #cs
        if keyword is None and city is not None and state is not None and coordinates is None:
            coordinates = geo.getCoordinates(city, state)
            coordinate_str = '{lat},{lng},'.format(**coordinates) + radius
            tweets = api.search(geocode=coordinate_str, count=num)

        #kl
        if keyword is not None and city is None and state is None and coordinates is not None:
            coordinate_str = '{lat},{lng},'.format(**coordinates) + radius
            tweets = api.search(q=keyword, geocode=coordinate_str, count=num)

        #l
        if keyword is None and city is None and state is None and coordinates is not None:
            coordinate_str = '{lat},{lng},'.format(**coordinates) + radius
            tweets = api.search(geocode=coordinate_str, count=num)

        #k
        if keyword is not None and city is None and state is None and coordinates is None:
            tweets = api.search(q=keyword, count=num)

        #st

        tweets.sort(reverse=True, key=lambda x: x.author.followers_count)

        print '\nAnalyzing data...\n'
        a = SentimentAnalyzer()
        return [{
            'author': t.author.name,
            "tweet": t.text,
            "id": t.id,
            "sentiment": a.analyze(t.text)
        } for t in tweets]
 def _geocode(self, lat, lon):
     geo = Geocoder()
     df = geo.geocode_point((float(lat), float(lon)))
     # import pdb; pdb.set_trace()
     self.block_group = df[cn.BLOCK_GROUP].item()
     self.neighborhood_long = df[cn.NBHD_LONG].item()
     self.neighborhood_short = df[cn.NBHD_SHORT].item()
     self.council_district = df[cn.COUNCIL_DISTRICT].item()
     self.urban_village = df[cn.URBAN_VILLAGE].item()
     self.zipcode = df[cn.ZIPCODE].item()
     return self
示例#4
0
    def calculate_coords(self):
        if self.city_name is None:
            return

        geocoder = Geocoder(self.city_name)
        geocode = geocoder.get_geocode()

        if geocode is None:
            return

        self.lat = geocode["lat"]
        self.lon = geocode["lng"]
示例#5
0
def main():
    env = dotenv_values()
    db_creds = {
        key: env[key]
        for key in ["dbname", "user", "password", "host", "port"]
    }
    conn = db.connect(**db_creds)
    iter_conn = db.connect(**db_creds)
    gc = Geocoder(conn, iter_conn, debug=False)

    gc.geocode()
    conn.close()
    iter_conn.close()
示例#6
0
    def _handle_client(self, client, address):
        PACKET_SIZE = 1024

        while True:
            print("CLIENT", client)
            data = client.recv(PACKET_SIZE).decode()

            if not data:
                break

            data_split = data.split(' ')
            request_method = data_split[0]

            if not request_method == "GET":
                print("Unknown HTTP request method: {method}".format(
                    method=request_method))
            else:
                print("Successful method used: {method}".format(
                    method=request_method))

                print("Method: {m}".format(m=request_method))
                print("Request Body: {b}".format(b=data))
                print("Data {data}".format(data=data_split))

                http_code = 200
                geocode_response = []

                try:
                    geocoder = Geocoder(query_string_data=data_split[1])
                    geocode_response = geocoder.request()

                    if 'error' in geocode_response[0]:
                        http_code = 403
                except Exception as e:
                    print("Geocoder request failed: {e}".format(e=e))
                    geocode_response = [{
                        "error": "error_initializing_geocoders"
                    }]
                    http_code = 500

                response_header = self._generate_headers(http_code)

                response = response_header.encode()

                response += json.dumps(geocode_response)

                client.send(response)
                client.close()
                break
示例#7
0
 def geocode_city(self, city_state_country):
     client = Geocoder("http://open.mapquestapi.com/nominatim/v1/search?format=json")
     response = client.geocode(city_state_country)
     
     if (not response):
         print ("no response for city=%s" % (city_state_country))
         return None
     
     
     #print (response)
     #print response[0][self.DISPLAY_NAME]
     #print response[0][self.BOUNDINGBOX]
     #print response[0][self.ADDRESS]
     
     '''return boundary of city in format [bottom, top, left, right]'''
     return response[0][self.BOUNDINGBOX]
def geocode_all(db,
                data_folder="geocoder/data",
                terms_folder="geocoder/terms",
                lines_per_insert=1000):

    print("Loading table...")
    # The query bellow seems not very efficient...
    # Maybe change it as the link says.
    # https://stackoverflow.com/questions/7389759/memory-efficient-built-in-sqlalchemy-iterator-generator
    non_geocoded = Execucao.query.filter(Execucao.searched == False).all()
    with Geocoder(data_folder, terms_folder) as geocoder:
        counter = ProgressCounter(len(non_geocoded), print_abs=True)
        to_be_inserted = 0
        for row in non_geocoded:
            cells = get_geolocable_cells(row)
            geoent = geocoder.geocode_list(cells)
            if geoent:
                lat, lon, reg = geoent.best_coords()
                if lat:
                    row.point = "POINT(%s %s)" % (lon, lat)
            row.searched = True
            to_be_inserted += 1
            if to_be_inserted == lines_per_insert:
                db.session.commit()
                to_be_inserted = 0
            counter.update()
        if to_be_inserted:
            db.session.commit()
        counter.end()
示例#9
0
def geocode_all(db, data_folder="geocoder/data",
                terms_folder="geocoder/terms",
                lines_per_insert=1000):
    print("Loading table...")
    non_geocoded = get_non_geocode(lines_per_insert)
    if non_geocoded:
        while non_geocoded:
            with Geocoder(data_folder, terms_folder) as geocoder:
                counter = ProgressCounter(len(non_geocoded), print_abs=True)
                to_be_inserted = 0
                for row in non_geocoded:
                    cells = get_geolocable_cells(row)
                    geoent = geocoder.geocode_list(cells)
                    if geoent:
                        lat, lon, reg = geoent.best_coords()
                        if lat:
                            row.point = "POINT(%s %s)" % (lon, lat)
                    row.searched = True
                    to_be_inserted += 1
                    if to_be_inserted == lines_per_insert:
                        db.session.commit()
                        to_be_inserted = 0
                    counter.update()
                if to_be_inserted:
                    db.session.commit()
                counter.end()
            non_geocoded = get_non_geocode(lines_per_insert)
 def _get_blockgroup(self, df):
     geo = Geocoder()
     temp = []
     for chunk in self._chunker(df, 250):
         coords = chunk.loc[:, (cn.LAT, cn.LON)]
         blkgrps = geo.geocode_df(coords)
         blkgrps.columns = [
             cn.LAT, cn.LON, cn.DEST_BLOCK_GROUP, cn.NBHD_LONG,
             cn.NBHD_SHORT, cn.COUNCIL_DISTRICT, cn.URBAN_VILLAGE,
             cn.ZIPCODE
         ]
         temp.append(
             pd.merge(chunk,
                      blkgrps,
                      left_on=[cn.LAT, cn.LON],
                      right_on=[cn.LAT, cn.LON],
                      how='left').drop_duplicates())
     df = pd.concat(temp, sort=False).reset_index()
     return df
示例#11
0
    def __init__(self, origin_lat: float, origin_lon: float,
                 mapquest_api_key: str) -> None:
        """
        Creates a new monitor.

        Args:
            origin_lat (float): The origin latitude.
            origin_lon (float): The origin longitude.
            mapquest_api_key (str): The MapQuest API key for geocoding.
        """
        self._ses = requests.Session()
        self._parser = RealTimeParser()
        self._coder = Geocoder(mapquest_api_key)
        self._origin = (origin_lat, origin_lon)
        self._incidents = {}  # type: Dict[str, Incident]

        self._ses.headers.update({
            "User-Agent":
            "SFD Feed Watcher "
            "(https://github.com/xyx0826/sea_fires_around_me)"
        })
示例#12
0
 def post(self):
     lnd = self.request.get('lnd')
     if lnd:
         geo = Geocoder(apikey=apikey)
         lnd_coord = geo.geocode(location=lnd)
         if lnd_coord:
             shops = Shop.all()
             shops = [
                 (shop,
                  calc_distance(shop, lnd_coord),
                  mapurl(shop.geo.lat, shop.geo.lon)
                 ) for shop in shops
             ]
             shops.sort(key=lambda k: k[1]['distance'])
             self.render_response('mobile/search_result.html', {
                 'lnd': lnd,
                 'lnd_map': mapurl(lnd_coord['lat'], lnd_coord['lng']),
                 'shops': shops[0:30]
             })
         else:
             self.response.out.write('not found')
     else:
         self.redirect('/m/search')
示例#13
0
    def __new_bot(self):
        timezone = TimezoneApi('bar')
        timezone.load = MagicMock(return_value=12345)
        darksky = WeatherSource('foo', timezone)
        darksky.load = MagicMock(
            return_value=Weather(now=WeatherAtTime(20, 'Clear', ''),
                                 day=WeatherDay(19, 21, 'Mostly Cloudy', '')))
        geocoder = Geocoder('foo')
        geocoder.geocode = MagicMock(return_value={
            'results': [{
                'geometry': {
                    'location': {
                        'lat': 1.2,
                        'lng': 3.4
                    }
                }
            }]
        })

        webcam_source = WebcamSource('foo')
        webcam_source.load = MagicMock(return_value=None)

        return WeatherBot(darksky, geocoder, webcam_source)
示例#14
0
def arcgis(location, proxies='', timeout=5.0):
    """
    Retrieves geocoding data from ArcGIS's REST geocoding API.

        >>> g = geocoder.arcgis('380 New York St, Redlands, California')
        >>> g.latlng
        (34.05649072776595, -117.19566584280369)
        >>> g.postal
        '92373'
        ...

    Official Docs
    -------------
    http://resources.arcgis.com/en/help/arcgis-rest-api/
    """
    provider = Arcgis(location)
    return Geocoder(provider, proxies=proxies, timeout=timeout)
示例#15
0
def ip(location, proxies='', timeout=5.0):
    """
    Geocodes an IP address using MaxMind's services.

        >>> g = geocoder.ip('74.125.226.99')
        >>> g.latlng
        (37.4192, -122.0574)
        >>> g.address
        'Mountain View, California United States'
        ...

    Official Docs
    -------------
    http://www.maxmind.com/en/geolocation_landing
    """
    provider = Ip(location)
    return Geocoder(provider, proxies=proxies, timeout=timeout)
示例#16
0
def osm(location, proxies='', timeout=5.0):
    """
    Retrieves geocoding data from OSM's data using Nominatim's geocoding API.

        >>> g = geocoder.osm('Tacloban City')
        >>> g.latlng
        (11.2430274, 125.0081402)
        >>> g.country
        'Philippines'
        ...

    Official Docs
    -------------
    http://wiki.openstreetmap.org/wiki/Nominatim
    """
    provider = Osm(location)
    return Geocoder(provider, proxies=proxies, timeout=timeout)
示例#17
0
def mapquest(location, proxies='', timeout=5.0):
    """
    Retrieves geocoding data from MapQuest's address geocoding API.

        >>> g = geocoder.mapquest('1555 Blake street, Denver')
        >>> g.latlng
        (39.740009, -104.992264)
        >>> g.quality
        'CITY'
        ...

    Official Docs
    -------------
    http://www.mapquestapi.com/geocoding/
    """
    provider = Mapquest(location)
    return Geocoder(provider, proxies=proxies, timeout=timeout)
示例#18
0
def bing(location, key='', proxies='', timeout=5.0):
    """
    Retrieves geocoding data from Bing's REST location API.

        >>> key = 'XXXXX'
        >>> g = geocoder.bing('Medina, Washington', key=key)
        >>> g.latlng
        (47.615821838378906, -122.23892211914062)
        >>> g.country
        'United States'
        ...

    Official Docs
    -------------
    http://msdn.microsoft.com/en-us/library/ff701714.aspx
    """
    provider = Bing(location, key=key)
    return Geocoder(provider, proxies=proxies, timeout=timeout)
示例#19
0
def tomtom(location, key='', proxies='', timeout=5.0):
    """
    Retrieves geocoding data from TomTom's geocoding API.

        >>> key = 'XXXXX'
        >>> g = geocoder.tomtom('Amsterdam, Netherlands', key=key)
        >>> g.latlng
        (52.373166, 4.89066)
        >>> g.quality
        'city'
        ...

    Official Docs
    -------------
    http://developer.tomtom.com/products/geocoding_api
    """
    provider = Tomtom(location, key=key)
    return Geocoder(provider, proxies=proxies, timeout=timeout)
示例#20
0
def reverse(latlng, proxies='', timeout=5.0):
    """
    Reverse geocodes a location based on Lat & Lng inputs
    using Google's reverse geocoding API V3.

        >>> latlng = (37.4192, -122.0574)
        >>> g = geocoder.reverse(latlng)
        >>> g.address
        'Sevryns Road, Mountain View, CA 94043, USA'
        >>> g.postal
        '94043'
        ...

    Official Docs
    -------------
    https://developers.google.com/maps/documentation/geocoding/
    """
    provider = Reverse(latlng)
    return Geocoder(provider, proxies=proxies, timeout=timeout)
示例#21
0
def nokia(location, app_id='', app_code='', proxies='', timeout=5.0):
    """
    Retrieves geocoding data from Nokia's HERE geocoder API.

        >>> app_id = 'XXXXX'
        >>> app_code = 'XXXXX'
        >>> g = geocoder.nokia('Keilaniemi, Espoo')
        >>> g.latlng
        (60.1759338, 24.8327808)
        >>> g.country
        'FIN'
        ...

    Official Docs
    -------------
    https://developer.here.com/rest-apis/documentation/geocoder
    """
    provider = Nokia(location, app_id=app_id, app_code=app_code)
    return Geocoder(provider, proxies=proxies, timeout=timeout)
示例#22
0
def geonames(location, username='', proxies='', timeout=5.0):
    """
    Retrieves geocoding data from Geonames's Web Service API.

        >>> username = '******'
        >>> g = geocoder.geonames('Springfield, Virginia', username=username)
        >>> g.latlng
        (38.78928, -77.1872)
        >>> g.country
        'United States'
        >>> g.population
        30484
        ...

    Official Docs
    -------------
    http://www.geonames.org/export/web-services.html
    """
    provider = Geonames(location, username=username)
    return Geocoder(provider, proxies=proxies, timeout=timeout)
示例#23
0
    def check_input(self, location):
        lat, lng = 0.0, 0.0

        # Checking for a String
        if isinstance(location, str):
            lat, lng = Geocoder(Google(location)).latlng

        # Checking for List of Tuple
        if isinstance(location, (list, tuple)):
            lat, lng = self.check_for_list(location)

        # Checking for Dictionary
        elif isinstance(location, dict):
            lat, lng = self.check_for_dict(location)

        # Checking for a Geocoder Class
        elif hasattr(location, 'latlng'):
            lat, lng = location.latlng

        # Return Results
        return lat, lng
示例#24
0
def google(location,
           client='',
           secret='',
           proxies='',
           api_key='',
           timeout=5.0):
    """
    Retrieves geocoding data from Google's geocoding API V3

        >>> g = geocoder.google('1600 Amphitheatre Pkwy, Mountain View, CA')
        >>> g.latlng
        (37.784173, -122.401557)
        >>> g.country
        'United States'
        ...

    Official Docs
    -------------
    https://developers.google.com/maps/documentation/geocoding/
    """
    provider = Google(location, client=client, secret=secret, api_key=api_key)
    return Geocoder(provider, proxies=proxies, timeout=timeout)
示例#25
0
class PostalCodeParser:
  def columnNames(self): return [
    COUNTRY_CODE,
    NAME, # really zipcode
    PLACE_NAME,
    ADMIN1_NAME,
    ADMIN1_CODE,
    ADMIN2_NAME,
    ADMIN2_CODE,
    ADMIN3_NAME,
    ADMIN3_CODE,
    LATITUDE,
    LONGITUDE,
    ACCURACY
  ]

  def __init__(self, filename):
    self.csvReader = unicode_csv_reader(open(filename), delimiter='\t', quotechar = '\x07')
    self.lastRow = self.csvReader.next()
    self.count = 0
    self.geocoder = Geocoder()
    self.done = None

  def makeFeature(self, row):
    if len(row) != len(self.columnNames()):
      print row
      print("col length mismatch: %d vs %d" % (len(row), len(self.columnNames())))

    values = {}
    for (key, val) in zip(self.columnNames(), row):
      values[key] = val

    return Feature(values)

  # make this a real iterator?
  def getNextFeatureSet(self):
    first_feature = self.makeFeature(self.lastRow)
    self.lastRow = None
    features = [first_feature]

    for row in self.csvReader:
      self.count += 1
      if self.count % 1000 == 0:
        print "imported %d zips so far" % self.count

      self.lastRow = row
      feature = self.makeFeature(row)
      if feature.all_names() == first_feature.all_names():
        features.append(feature)
      else:
        break

    if self.lastRow is None:
      self.done = True

    return features

  # try to find each of the admin codes
  # if we can't find it, create and return them
  def find_and_create_parents(self, feature):
    def tooFar(candidate):
      km = distance_km(feature.latitude(), feature.longitude(), candidate[0]['lat'], candidate[0]['lng'])
      return km < 20000
 
    # try to find CC, then admin1, then admin2, then admin3, then place_name
    # if we find it and the name disagrees, add that
    # if we can't find it, try geocoding for it instead
    cc = feature.country_code()
    parents = [feature.cc_id()]

    def tryGeocoder(query, record):
      parents = []
      (geocodes, meta) = self.geocoder.geocode(query)
      geocodes = filter(tooFar, geocodes)
      if len(meta['query']) == 0 and len(geocodes) > 0:
        for g in geocodes:
          parents.append(g[0]['featureid'])
          if 'parents' in g[0]:
            parents += g[0]['parents']
      else:
        record['parents'] = list(set(filter(None, record['parents'])))
        geonames.insert(record)
        parents.append(record['featureid'])
      return parents

    fcodes = [ADM1, ADM2, ADM3]
    codes = [ feature.admin1_id(), feature.admin2_id(), feature.admin3_id() ]
    names = [ feature.values[ADMIN1_NAME], feature.values[ADMIN2_NAME], feature.values[ADMIN3_NAME] ]
    
    for i in xrange(0, len(codes)):
      code = codes[i]
      name = names[i]
      if not code:
        continue
      geocodes = [g for g in geonames.find({'featureid': code, 'cc': cc,
        'parents': { '$all': filter(None,codes[0:i]) + [feature.cc_id()] }
      })]
      if len(geocodes):
        parents.append(code)
        for g in geocodes:
          if normalize(name) not in g['names']:
            geonames.update({'_id': g['_id']}, {'$addToSet': { 'names': normalize(name) }}, safe=True)
      else:
        query = ' '.join(names[0:i+1]) + ' ' + cc
        parents += tryGeocoder(query,
          {
            'cc': feature.country_code(),
            'feature_code': 'A',
            'feature_class': fcodes[i],
            'featureid': code,
            'names': [normalize(name)],
            'lat': feature.latitude(),
            'lng': feature.longitude(),
            'parents': filter(None, parents),
          })
        
    # looking for place name
    query = feature.values[PLACE_NAME] + ' ' + ' '.join(names) + ' ' + cc
    parents += tryGeocoder(query,
      {
          'cc': feature.country_code(),
          'feature_code': 'P',
          'feature_class': 'PPL',
          'featureid': normalize(feature.values[PLACE_NAME]),
          'names': [normalize(feature.values[PLACE_NAME])],
          'lat': feature.latitude(),
          'lng': feature.longitude(),
          'parents': filter(None, parents),
          'displayNames': [{
            'l': 'en',
            's': feature.name(),
            'p': True
          }]
      })

    return list(set(parents))

  def parse(self):
    while not self.done:
      featureSet = self.getNextFeatureSet()

      parents = []

      last_feature = None
      for feature in featureSet:
        if last_feature and last_feature.values[PLACE_NAME] == feature.values[PLACE_NAME]:
          continue

        if feature.country_code() == 'PT':
          next
        parents = feature.parents()
        all_names = feature.all_names()

        # with better run-time ranking, we could do this instead of synthesizing parents
        # all_names.append(last_feature.values[PLACE_NAME])
        
        if options.create_missing_parents:
          parents += self.find_and_create_parents(feature)

        last_feature = feature

      # save the first feature
      feature = featureSet[0]
      try:
        record = {
          'featureid': '%s-%s' % (feature.country_code(), all_names[0]),
          'cc': feature.country_code(),
          'feature_code': 'Z',
          'feature_class': 'ZIP',
          'names': [normalize(n) for n in all_names],
          'lat': feature.latitude(),
          'lng': feature.longitude(),
          'parents': list(set(parents)),
          'accuracy': feature.accuracy(),
          'displayNames': [{
            'l': 'en',
            's': feature.name(),
            'p': True
          }]
        }
      except:
        error(values, row)

      geonames.insert(record)
示例#26
0
 def get_grid_with_address(self, address, width_m, height_m, grid_spacing_m):
     """Return a grid of points centered on <address>."""
     geocoder = Geocoder()
     lat_lon, details = geocoder.get_location_by_address(address)
     lat = lat_lon[0]; lon = lat_lon[1];
     return self.get_grid_with_center(lat, lon, width_m, height_m, grid_spacing_m)
示例#27
0
文件: main.py 项目: rupenp/geocoder
import sys
from geocoder import Geocoder

if len(sys.argv) < 4:
    print "[USAGE]: python %s [state abbr filepath] [city filepath] [location text filepath]" % sys.argv[0]
    exit()

state_abbr_filepath = sys.argv[1]
city_filepath = sys.argv[2]
location_text_filepath = sys.argv[3]

gc = Geocoder(state_abbr_filepath, city_filepath)

for line in open(location_text_filepath, 'r'):
    location_text = line.rstrip()
    point = gc.geocode(location_text)
    if point == None:
        print None
    else:
        print point[0], point[1]
示例#28
0
  return d

source = fiona.open(sys.argv[1])
output_schema = source.schema.copy()
output_schema['properties']['db_geonameid'] = 'str'
output_schema['properties']['db_lat'] = 'float'
output_schema['properties']['db_lng'] = 'float'
output_schema['properties']['db_distance'] = 'float'

output = fiona.open('fixed-' + sys.argv[1], 'w', 
    crs=source.crs,
    driver=source.driver,
    schema=output_schema)

geocoder = Geocoder('demo.twofishes.net')
for f in source:
  query = u"%s %s %s" % (
    f['properties']['NAME'],
    f['properties']['ADM1NAME'],
    f['properties']['ADM0NAME']
  )
  print query
  ll = (f['properties']['LATITUDE'], f['properties']['LONGITUDE'])
  llStr = '%s,%s' % ll

  hint = {
    'woeHint': 'TOWN',
    'll': llStr
  }
示例#29
0
class RealTime:
    """
    A real-time 911 dispatch monitor.
    """
    def __init__(self, origin_lat: float, origin_lon: float,
                 mapquest_api_key: str) -> None:
        """
        Creates a new monitor.

        Args:
            origin_lat (float): The origin latitude.
            origin_lon (float): The origin longitude.
            mapquest_api_key (str): The MapQuest API key for geocoding.
        """
        self._ses = requests.Session()
        self._parser = RealTimeParser()
        self._coder = Geocoder(mapquest_api_key)
        self._origin = (origin_lat, origin_lon)
        self._incidents = {}  # type: Dict[str, Incident]

        self._ses.headers.update({
            "User-Agent":
            "SFD Feed Watcher "
            "(https://github.com/xyx0826/sea_fires_around_me)"
        })

    def _get_two_day_rows(self) -> "List[IncidentRow]":
        """
        Gets all incident rows from today and yesterday
        to deal with midnight jumps.

        Returns:
            List[IncidentRow]: A list of all active incident rows.
        """
        r = self._ses.get(REALTIME_ENDPOINT, params={"action": "Today"})
        self._parser.feed(r.text)
        rows = self._parser.get_rows()

        yesterday = datetime.today() - timedelta(days=1)
        r = self._ses.get(REALTIME_ENDPOINT,
                          params={"incDate": yesterday.strftime("%m/%d/%Y")})
        self._parser.feed(r.text)
        yesterday_rows = self._parser.get_rows()
        if len(yesterday_rows) > 0:
            rows += yesterday_rows
        return rows

    def _add_incident(self, row: IncidentRow) -> None:
        """
        Adds a new incident and prints a message.

        Args:
            row (IncidentRow): The new incident.
        """
        latlon = self._coder.geocode(row.loc)
        inc = Incident(row.id, row.datetime, row.loc, latlon, row.typ)
        dist = inc.get_dist_to(self._origin)
        dire = inc.get_direction_to(self._origin)
        print(f"Incident of type {inc.get_type()} is opened "
              f"at {inc.get_addr()}, {dist:.2f} km {dire}.")
        self._incidents[row.id] = inc

    def _update_incident(self, row: IncidentRow) -> None:
        """
        Updates an open incident.

        Args:
            row (IncidentRow): The incident to update.
        """
        inc = self._incidents[row.id]
        for unit in row.units.split(" "):
            inc.add_unit(unit)
        inc.update_type(row.typ)

    def _remove_incident(self, inc_id: str) -> None:
        """
        Removes a resolved incident and prints a message.

        Args:
            inc_id (str): The ID of the resolved incident.
        """
        inc = self._incidents[inc_id]
        addr = inc.get_addr()
        mins = inc.get_time_since().total_seconds() / 60
        print(f"Incident at {addr} is now resolved after {int(mins)} minutes.")
        del self._incidents[inc_id]

    def update(self) -> None:
        """
        Checks if there are incidents to update.
        """
        rows = self._get_two_day_rows()

        # Add new rows
        for row in rows:
            if row.id not in self._incidents.keys():
                self._add_incident(row)

        # Update existing rows
        updated_rows = [r for r in rows if r.id in self._incidents.keys()]
        for updated_row in updated_rows:
            self._update_incident(updated_row)

        # Remove deleted rows
        active_ids = [r.id for r in rows]
        for inc_id in list(self._incidents):
            if inc_id not in active_ids:
                self._remove_incident(inc_id)
示例#30
0
def text():
    """Respond to incoming texts with a message."""
    resp = MessagingResponse()

    # get sender phone number, check if they are a current subscriber
    incoming_number = request.values.get('From')[2:]
    if incoming_number in users.keys():
        caller = users[incoming_number]
    else:
        caller = contact.Contact(incoming_number)

    # get body of incoming SMS and format it
    body = request.values.get('Body')
    b = body.upper().strip()

    # check if the body is 'HEALTH', 'EDU', 'ADD', 'REMOVE' or anything else
    if b in keywords['Health']:
        health_msg = message.HealthMsg().make_msg()
        resp.message(health_msg)

        print("{} texted HEALTH".format(incoming_number))
        myLogger.emit(logging.INFO, "HEALTH texted", request.values)

    elif b in keywords['Edu']:
        call_msg = message.CallMsg().make_msg()
        resp.message(call_msg)

        print("{} requested a call from a Health Educator".format(
            incoming_number))
        myLogger.emit(logging.INFO, "Health Educator call requested",
                      request.values)

        # get users last requested address or the address they're subscribed to
        if incoming_number in users.keys():
            edu_addr = caller.last_requested_address
        else:
            edu_addr = [a[0] for a in caller.addresses][0]

        # calculate demos nearby to generate short report for Health Dept
        located = Geocoder().geocode(edu_addr)

        # query Socrata datasets
        scheduled_demos = soda_client.get(
            "tsqq-qtet",
            where="within_circle(location, {}, {}, 155)".format(
                located['location']['y'], located['location']['x']))
        pipeline_demos = soda_client.get(
            "dyp9-69zf",
            where="within_circle(location, {}, {}, 155)".format(
                located['location']['y'], located['location']['x']))
        past_demos = soda_client.get(
            "rv44-e9di",
            where="within_circle(location, {}, {}, 155)".format(
                located['location']['y'], located['location']['x']))
        upcoming_demos = scheduled_demos + pipeline_demos

        # format 'em
        demo_dates = []
        if len(scheduled_demos) > 0:
            for d in scheduled_demos:
                demo_date = "{}".format(
                    datetime.strptime(
                        (d['demolish_by_date']),
                        '%Y-%m-%dT%H:%M:%S').strftime('%m-%d-%Y'))
                demo_dates.append(demo_date)
        else:
            demo_date = None
            demo_dates.append(demo_date)

        # send log to Loggly
        log_extras = {
            "last_address_texted": edu_addr,
            "upcoming_demos_count": len(upcoming_demos),
            "past_demos_count": len(past_demos),
            "next_knockdown_date": demo_dates[0]
        }
        myLogger.emit(logging.INFO,
                      "Health Educator call requested.",
                      request.values,
                      last_address_texted=edu_addr,
                      upcoming_demo_count=len(upcoming_demos),
                      next_knockdown_date=demo_dates[0])

        # send request to Slack
        webhook_url = os.environ['SLACK_WEBHOOK_URL']
        caller_msg = ":phone: `{}` requested a call from a Health Educator \nLast address texted: *{}* \nNumber of upcoming demos: *{}* \nNumber of past demos: *{}* \nNext knock-down date: *{}*".format(
            incoming_number, edu_addr, len(upcoming_demos), len(past_demos),
            demo_dates[0])
        slack_data = {'text': caller_msg}

        response = requests.post(webhook_url,
                                 data=json.dumps(slack_data),
                                 headers={'Content-Type': 'application/json'})

        if response.status_code != 200:
            myLogger.emit(logging.ERROR,
                          "Slack request failed", [],
                          status_code=response.status_code,
                          response=response.text)
            raise ValueError(
                'Request to slack returned an error %s, the response is:\n%s' %
                (response.status_code, response.text))

    elif b in keywords['Add'] and caller.last_requested_address:
        caller.watch(caller.last_requested_address)

        msg = message.SubscribeMsg(caller.last_requested_address)
        success_msg = msg.make_msg()
        resp.message(success_msg)

        # remove from users so we grab a 'fresh' copy of the user with sheet rows
        del users[incoming_number]

        print("{} subscribed to {}".format(incoming_number,
                                           caller.last_requested_address))
        myLogger.emit(logging.INFO,
                      "Caller subscribed to an address",
                      request.values,
                      address=caller.last_requested_address)

    elif b in keywords['Remove']:
        for address in caller.addresses:
            caller.unwatch(address)

        msg = message.UnsubscribeMsg([a[0] for a in caller.addresses])
        remove_msg = msg.make_msg()
        resp.message(remove_msg)

        print("{} unsubscribed from {} addresses".format(
            incoming_number, len(caller.addresses)))
        myLogger.emit(logging.INFO,
                      "Caller unsubscribed to a number of addresses",
                      request.values,
                      address_count=len(caller.addresses))

    else:
        pattern = re.compile('(\d{2,5})\s?(\w{2,})')
        result = pattern.search(body)
        if result:
            send = result.group(1) + ' ' + result.group(2)
        else:
            send = body

        # send it to the geocoder
        located = Geocoder().geocode(send)

        # if it's a valid address, build up a text message with demos nearby
        if located:
            print("Geocoded {} from {}".format(located['address'],
                                               incoming_number))
            myLogger.emit(logging.INFO,
                          "Geocoded an address",
                          request.values,
                          geocode=located["address"])

            msg = message.DemoMsg(located)
            demo_msg = msg.make_msg()
            resp.message(demo_msg)

            # store matched address
            caller.last_requested_address = located['address'][:-7]
            users[incoming_number] = caller

        # default message for a bad address
        else:
            default_msg = message.DefaultMsg().make_msg()
            resp.message(default_msg)

            print("Couldn't geocode '{}' from {}; Sent it to Slack".format(
                body, incoming_number))
            myLogger.emit(logging.ERROR,
                          "Geocoding failed",
                          request.values,
                          from_address=body)

            # send it to Slack
            webhook_url = os.environ['SLACK_WEBHOOK_URL']
            err_msg = ":exclamation: demo-alerts can't geocode `{}` from `{}`".format(
                send, incoming_number)
            slack_data = {'text': err_msg}

            response = requests.post(
                webhook_url,
                data=json.dumps(slack_data),
                headers={'Content-Type': 'application/json'})

            if response.status_code != 200:
                myLogger.emit(logging.ERROR,
                              "Slack request failed", [],
                              status_code=response.status_code,
                              response=response.text)
                raise ValueError(
                    'Request to slack returned an error %s, the response is:\n%s'
                    % (response.status_code, response.text))

    # send the text
    return str(resp)
示例#31
0
def main():
    #TODO: make this a callback function for verification
    if options.geocode and not options.geonames_username:
        logger.error("Please enter a Geonames username to use geocoding")
        sys.exit(0)

    directory = options.directory + os.sep
    output_path = directory + os.sep + "rf2zoo" + os.sep
    output_path = os.path.normpath(output_path)
    if not os.path.isdir(output_path):
        os.mkdir(output_path)

    logger.info("Writing to %s" % output_path)
    all_folders = glob.glob(directory + "*")

    if options.geocode:
        from geocoder import Geocoder
        geocoder = Geocoder(options.geonames_username, options.skip_cache)

    for folder in all_folders:
        if not os.path.isdir(folder):
            continue
        #tODO: allow support for specifying the edge file directly

        # Check valid format of x:y where x, y are valid ASNs
        (_, data_folder) = os.path.split(folder)
        if not re.match("\d+:\d+", data_folder):
            continue

        # Only do internal topologies
        if options.intra_as:
            (asn_a, asn_b) = data_folder.split(":")
            if asn_a != asn_b:
                continue
        
        logger.info("Processing %s" % data_folder)

        G = nx.Graph()

        #TODO: check on bidirectionality
        #TODO: download file if not present
        #TODO: add pyparsing to topzootools dependencies for Pypi

        # Definitions for parsing
        colon = Literal(":").suppress()
        #TODO: see if comma built in
        comma = Literal(",").suppress()
        arrow = Literal("->").suppress()
        ASN = Word(nums)
        ObsCount = Word(nums)
        # Can have . eg St. Louis
        place_name = Group(
            OneOrMore(Word(alphas+".")) 
            + Optional(comma) +
            Optional(Word(alphas)))

        node = Group(ASN + colon + place_name)
        entry = (node + arrow + node + ObsCount)

        filename = folder + os.sep + "edges"
        f = open(filename)

        #ToDO; print unparsed lines

        #ToDo: push all into dict and use add_nodes_from and add_edges_from

        for line in f:
            #print line
            processed = entry.parseString(line)
            #print processed
            (src, dst, obs_count) = processed
            src_asn, src_place = src
            dst_asn, dst_place = dst
            src_place = " ".join(src_place)
            dst_place = " ".join(dst_place)
            # Use simple string for ID (not list returned from Pyparsing which
            # don't always hash the same
            src_id = "%s %s" % (src_place, src_asn)
            dst_id = "%s %s" % (dst_place, dst_asn)
            # use full hash for node id
            G.add_node(src_id, ASN=int(src_asn), label =src_place )
            G.add_node(dst_id, ASN=int(dst_asn), label=dst_place )
            G.add_edge(src_id, dst_id, obs_count = int(obs_count))

        f.close()

        # Relabel nodes to have id based on index

        if options.geocode:
                G = geocoder.geocode(G)
                geocoder.save_cache()

        
        G.name = data_folder
        G.graph['Creator'] = 'rocketfuel2zoo'

        out_file = output_path + os.sep + data_folder.replace(":", "_") + ".gml"

        #import pprint
        #pprint.pprint( sorted(G.nodes()))
        #pprint.pprint( G.nodes(data=True))
        nx.write_gml(G, out_file)
示例#32
0
 def __init__(self, filename):
   self.csvReader = unicode_csv_reader(open(filename), delimiter='\t', quotechar = '\x07')
   self.lastRow = self.csvReader.next()
   self.count = 0
   self.geocoder = Geocoder()
   self.done = None
示例#33
0
def set_providers_to_geocoder():
    providers = Provider.objects.order_by('order')
    global geocoder
    geocoder = Geocoder()
    for provider in providers:
        geocoder.add_provider(provider)
示例#34
0
# reflect an existing database into a new model
Base = automap_base()
# reflect the tables
Base.prepare(engine, reflect=True)

# Save references to each table
address_data = Base.classes.Address_Data
census_data = Base.classes.Census_Data

# Create the locations dataframe with all data, including both statuses
loc_df = pd.read_sql_table("Address_Data", con=engine)
# Create a locations dataframe with reference/seeded/locked locations
ref_loc_df = loc_df[loc_df.Status == "S"]
ref_loc_df = ref_loc_df.loc[:, ["Latitude", "Longitude", "StreetAddress"]]
# Call the geocoder class
geo = Geocoder(ref_loc_df)

#################################################
# Flask Routes
#################################################


# Route to render index.html template using data from db
@app.route("/")
def index():
    """Returns all street names to populate street name dropdown in HTML for selection"""

    # select only StreetAddress and get rid of the number and return only unique values
    select_st = 'select distinct right("StreetAddress", (length("StreetAddress") - position(\' \' in "StreetAddress"))) "StreetAddress"\
        from "Address_Data" a'
示例#35
0
文件: app.py 项目: ruinanwang/ratchat
import config
import prompts
import requests
from db_handler import DB
from geocoder import Geocoder
from datetime import timedelta
from flask import Flask, request, session, render_template, url_for
from twilio.twiml.messaging_response import Body, Media, Message, MessagingResponse

db = DB()
app = Flask(__name__)
app.secret_key = config.secret_key
app.config.from_object(__name__)
app.config['PERMANENT_SESSION_LIFETIME'] = timedelta(minutes=5)
geocoder = Geocoder(config.api_key)


@app.route('/', methods=['GET'])
def root():
    db.execute(config.db_credentials, config.select_all_records)
    data = db.get_all_records()
    return render_template('index.html', data=data)


@app.route('/map', methods=['GET'])
def map():
    db.execute(config.db_credentials, config.select_all_records)
    data = db.get_all_records()
    return render_template('map.html', data=data)

import timeit
from geocoder import Geocoder

g = Geocoder('manhattan-addresses.bin', 'manhattan-kdtree.bin')

print g.address_to_latlng('10 WEST 49 STREET')
print g.address_to_latlng('10 WEST 49 ST')
print g.address_to_latlng('10 W 49 ST')

print timeit.timeit("""g.address_to_latlng('99 BROADWAY')""",
                    setup="""from geocoder import Geocoder
g = Geocoder('manhattan-addresses.bin', 'manhattan-kdtree.bin')""")
'''
print timeit.timeit("""g.latlng_to_address((40.769083827890107,-73.952477804528087))""",setup="""from geocoder import Geocoder
g = Geocoder('manhattan-addresses.bin', 'manhattan-kdtree.bin')""")
'''
import timeit
from geocoder import Geocoder
g = Geocoder('manhattan-addresses.bin', 'manhattan-kdtree.bin')

print g.address_to_latlng('10 WEST 49 STREET')
print g.address_to_latlng('10 WEST 49 ST')
print g.address_to_latlng('10 W 49 ST')


print timeit.timeit("""g.address_to_latlng('99 BROADWAY')""",setup="""from geocoder import Geocoder
g = Geocoder('manhattan-addresses.bin', 'manhattan-kdtree.bin')""")

'''
print timeit.timeit("""g.latlng_to_address((40.769083827890107,-73.952477804528087))""",setup="""from geocoder import Geocoder
g = Geocoder('manhattan-addresses.bin', 'manhattan-kdtree.bin')""")
'''
示例#38
0
from geocoder import Geocoder
from query_parser import QueryParser

PORT = 9000
RESPONSES = [
    "Hrm... How about asking me about the weather?",
    "I don't know about that... Want to know about the weather?",
    "I'm not really into that type of thing. Let's talk about the weather.",
    "You must be off your rocker at the moment. Come back when you want to talk weather."
]

app = Flask(__name__)
CORS(app)

store = {}
geocoder = Geocoder()


@app.route('/chat/messages', methods=['GET', 'POST'])
def chat_messages():
    if request.form['action'] == 'join':
        store[request.form['user_id']] = {
            "messages": ["Hello %s!" % (request.form['name'])]
        }
        reply = store[request.form['user_id']]['messages'][-1]

    elif request.form['action'] == 'message':
        location = QueryParser(request.form['text']).location()
        if location:
            lat, lng = geocoder.encode(location)
            reply = Forecaster(lat, lng).weather_summary()
示例#39
0
def main():
    network_files = []
    if options.file:
        network_files.append(options.file)

    if options.directory:
        #TODO: make this os.path.join across all zootools to remove need for trailing /
        network_files = glob.glob(options.directory + "*.graphml")

    if len(network_files) == 0:
        logger.warn("No files found. Specify -f file or -d directory")
        sys.exit(0)

    if options.directory:
        path = options.directory
    elif options.file:
        path = os.path.split(options.file)[0]
        # Get full path - don't want to create in root dir
        path = os.path.abspath(path)

    git_version = get_git_version(path)
    topzootools_version = pkg_resources.get_distribution("TopZooTools").version

    if options.output_dir:
        output_path = options.output_dir
    else:
        output_path = path + os.sep + "zoogml"

    if options.geocode:
        output_path += "_geocoded"

    if not os.path.isdir(output_path):
        os.mkdir(output_path)

    # clean up path
    output_path = os.path.normpath(output_path)

    #TODO: check nx write pickle uses cPickle

    #TODO: make this optional
    output_pickle_path = output_path + os.sep + "cache"       
    if not os.path.isdir(output_pickle_path):
        os.mkdir(output_pickle_path)

    geocoder = Geocoder(options.geocode, options.skip_cache)

    #output_path += strftime("%Y%m%d_%H%M%S")
    logger.info("Saving to folder: %s" % output_path)
    # and create cache directory for pickle files
    pickle_dir = path + os.sep + "cache"       
    if not os.path.isdir(pickle_dir):
        os.mkdir(pickle_dir)

        #ToDO: check why sorting
    for source_file in sorted(network_files):
        # Extract name of network from file path
        filename = os.path.split(source_file)[1]
        net_name = os.path.splitext(filename)[0]
        logger.info( "Converting {0}".format(net_name))

        pickle_file = "{0}/{1}.pickle".format(pickle_dir, net_name)
        if (os.path.isfile(pickle_file) and
            os.stat(source_file).st_mtime < os.stat(pickle_file).st_mtime):
            # Pickle file exists, and source_file is older
            graph = nx.read_gpickle(pickle_file)
        else:
            # No pickle file, or is outdated
            graph = nx.read_graphml(source_file)
            nx.write_gpickle(graph, pickle_file)

        graph = convert_to_undirected(graph)
        # Check for self loops
        for n, data in graph.nodes(data=True):
            if n in graph.neighbors(n):
                logger.warn( "Self loop {0} {1}".format(data['label'], n))

        # if all nodes in network are internal, remove redundant internal tag
        # this makes checking if internal too complex, keep tag for all nodes
        """
        if all(data['Internal'] == 1 for n,data in graph.nodes(data=True)):
            # all internal, remove tag
            for n in graph.nodes():
                del graph.node[n]['Internal']
        """

        #*********************************
        # Geocoding (optional)
        #*********************************
        if options.geocode:
            graph = geocoder.geocode(graph)
            geocoder.save_cache()

        #*********************************
        # Remove graphics data
        #*********************************

        # Will be overwritten with name from metadata if present
        network_label = net_name

        # extract edge data
        for src, dst, key, data in graph.edges(data=True, keys=True):
            if data.get("label"):
                label = data['label']
                extracted_data = extract_edge_data(label)
                # Replace the edge data with extracted
                graph.edge[src][dst][key] = extracted_data

        # remove empty note
        if 'Note' in graph.graph and graph.graph['Note'] == '':
            del graph.graph['Note']

        # Strip & as yEd fails on it
        #TODO: use html entitites fn for this
        network_label_clean = network_label.replace("&", "and")

        # Set other graph attributes
        graph.graph['SourceGitVersion'] = git_version
        graph.graph['Creator'] = "Topology Zoo Toolset"
        graph.graph['ToolsetVersion'] = topzootools_version

        graph.graph['label'] = network_label_clean

        #*********************************
        #OUTPUT - Write the graphs to files
        #*********************************

        filename = network_label.strip()
        #filename = filename.title()
        filename = filename.replace(" ", "_")

        pattern = re.compile('[\W_]+')
        filename = pattern.sub('', filename)
        # And also the pickle cache - write first so older timestamp
        pickle_out_file =  "{0}/{1}.pickle".format(output_pickle_path, filename)
        nx.write_gpickle(graph, pickle_out_file)

        gml_file =  "{0}/{1}.gml".format(output_path, filename)
        nx.write_gml(graph, gml_file)
        logger.info("Wrote to %s" % gml_file)