def row_in_feature_layer(row: pd.Series, feature_layer: FeatureLayer) -> bool:
    # Null check
    if pd.isna(row['pin_longitude']) or pd.isna(row['pin_latitude']):
        return False
    # Construct a point at the row's coordinates
    pin = Point({"x": row['pin_longitude'], "y": row['pin_latitude']})
    # construct a geometry filter to check if each point is in a disputed area
    pin_filter = intersects(pin)

    continue_query = True
    retries = 0
    MAX_RETRIES = 9
    # Default to setting in_disputed_area = True to ensure we never show pins in disputed area
    in_disputed_area = True
    # Make query to determine whether or not the pin is in the disputed area
    # If the query times out, retry with exponential backoff
    while continue_query:
        try:
            in_disputed_area = len(feature_layer.query(geometry_filter=pin_filter).features) > 0
            continue_query = False
        except Exception as e:
            # send slack message if we exceed retry count
            if retries > MAX_RETRIES:
                body = f'Unable to check if the record with ID {row["source_id"]} is in a disputed region.'
                send_slack_message(body, channel='#dev-logging-etl')
                continue_query = False
            else:
                sleep(1.5**(retries))
                retries += 1

    return in_disputed_area
def add_request_point(gis, item_id, address_json, ip_address, user_agent,
                      request_time):
    # get feature layer to edit
    layer_item = gis.content.get(item_id)
    feature_layer = layer_item.layers[0]

    # compose a Point object
    pt = Point({
        'x': address_json['longitude'],
        'y': address_json['latitude'],
        'spatialReference': {
            'wkid': 4326
        }
    })

    # compose a Feature object
    request_attributes = {
        'ip_address': ip_address,
        'user_agent': user_agent,
        'request_address':
        f"{address_json['city']}, {address_json['region_name']}, {address_json['country_name']}, {address_json['zip']}",
        'request_time2': request_time.timestamp() * 1000
    }

    ft = Feature(geometry=pt, attributes=request_attributes)

    # Edit the feature layer
    edit_result = feature_layer.edit_features(adds=[ft])
    return edit_result
Exemplo n.º 3
0
    def build_node_sdf(n_list):

        # List of Node Dictionaries for Data Frame Creation
        data_list = []

        for node in n_list:

            # Set Initial Values
            node_data = {
                'osm_id':
                str(node['id']),
                'geom':
                Point({
                    "x": node['lon'],
                    "y": node['lat'],
                    "spatialReference": {
                        "wkid": 4326
                    }
                })
            }

            # Push All Tag Values into Node Data
            for k, v in node['tags'].items():
                node_data.update({k: v})

            data_list.append(node_data)

        try:
            df = pd.DataFrame(data_list)
            df.spatial.set_geometry('geom')

            return df

        except Exception as e:
            raise Exception(f'Building Spatial Data Frame Failed: {e}')
Exemplo n.º 4
0
 def point(self):
     """
     Property to return point geometry if available.
     :return: Point geometry object if possible.
     """
     if self.x and self.y:
         return Point(x=self.x, y=self.y, sr=SpatialReference(wkid=4326))
     else:
         return None
Exemplo n.º 5
0
def build_node_sdf(n_list, excludedattributes):
    '''
    Function to convert returned OSM point data to Esri SpatialDataFrame.
    Returns an ESRI SpatialDataFrame.
    @param n_list: The list of nodes as returned by th get_osm_elements function 
    @param excludedattributes: The attributes exluded in the configuration file osmconfig.json
    '''

    # Dictionary For Geometries & IDs
    geo_dict = {"geo": []}
    val_dict = {'osm_id': [], 'timestamp': []}

    # Dictionary For Incoming Tags
    for n in n_list:
        n_tags = n['tags'].keys()
        for tag in n_tags:
            if tag not in val_dict.keys() and tag not in excludedattributes:
                tagname = tag
                val_dict[tagname] = []

    print('Constructing points...')
    p = 0
    pbar = createpbar(len(n_list))
    # Build Lists
    for n in n_list:
        try:
            p = updatepbar(p, pbar)
            # Populate Tags
            for tag in [
                    key for key in val_dict.keys()
                    if key not in ['osm_id', 'timestamp']
                    and key not in excludedattributes
            ]:
                val_dict[tag].append(n['tags'].get(str(tag), ''))

            # Populate Geometries & IDs
            point = Point({
                "x": n['lon'],
                "y": n['lat'],
                "spatialReference": {
                    "wkid": 4326
                }
            })
            geo_dict['geo'].append(point)
            val_dict['osm_id'].append(str(n['id']))
            val_dict['timestamp'].append(
                dt.strptime(n['timestamp'], '%Y-%m-%dT%H:%M:%SZ'))

        except Exception as ex:
            print('Node ID {0} Raised Exception: {1}'.format(n['id'], str(ex)))

    try:
        val_dict = {k: v for k, v in val_dict.items() if v is not None}
        return SpatialDataFrame(val_dict, geometry=geo_dict['geo'])

    except TypeError:
        raise Exception('Ensure ArcPy is Included in Python Interpreter')
Exemplo n.º 6
0
    def create_points(self, latitudes, longitudes):
        if (len(latitudes) != len(longitudes)):
            raise ValueError("Coordinate arrays must have equal length!")

        return [
            Point({
                'x': longitude,
                'y': latitude
            }) for (longitude, latitude) in zip(longitudes, latitudes)
        ]
Exemplo n.º 7
0
    def reverse_geocode(self, locations):
        '''Get address from coordinate pairs'''

        addresses = []
        for location in locations:
            unknown_pt = Point(location)
            address = reverse_geocode(unknown_pt)
            addresses.append(address)

        print(addresses)
        return addresses
Exemplo n.º 8
0
 def _project_points_from_wgs84_to_web_mercator(self, wgs84_points):
     major_axis = 6378137
     major_shift = pi * major_axis
     return [
         Point({
             'x':
             wgs84_point.x * major_shift / 180.0,
             'y': (log(tan((90.0 + wgs84_point.y) * pi / 360.0)) /
                   (pi / 180.0)) * major_shift / 180.0
         }) for wgs84_point in wgs84_points
     ]
def snap_to_line(self, polyline_geometry):
    """
    Returns a new point snapped to the closest location along the input line geometry.
    :param polyline_geometry: Required arcgis.geometry.Polyline
        ArcGIS Polyline geometry the Point will be snapped to.
    :return: arcgis.geometry.Point
        ArcGIS Point geometry coincident with the nearest location along the input
        ArcGIS Polyline object
    """
    if not isinstance(self, Point):
        raise Exception('Snap to line can only be performed on a Point geometry object.')
    if polyline_geometry.type.lower() != 'polyline':
        raise Exception('Snapping target must be a single ArcGIS Polyline geometry object.')
    if self.spatial_reference is None:
        raise Warning('The spatial reference for the point to be snapped to a line is not defined.')
    if polyline_geometry.spatial_reference is None:
        raise Warning('The spatial reference of the line being snapped to is not defined.')
    if (self.spatial_reference != polyline_geometry.spatial_reference and
            self.spatial_reference.wkid != polyline_geometry.spatial_reference.wkid and
            self.spatial_reference.latestWkid != polyline_geometry.spatial_reference.wkid and
            self.spatial_reference.wkid != polyline_geometry.spatial_reference.latestWkid and
            self.spatial_reference.latestWkid != polyline_geometry.spatial_reference.latestWkid):
        raise Exception('The spatial reference for the point and the line are not the same.')

    if HASARCPY:
        polyline_geometry = polyline_geometry.as_arcpy
        return Point(self.as_arcpy.snapToLine(in_point=polyline_geometry))

    elif HASSHAPELY:
        polyline_geometry = polyline_geometry.as_shapely
        point_geometry = self.as_shapely
        snap_point = polyline_geometry.interpolate(polyline_geometry.project(point_geometry))
        snap_point = Point({'x': snap_point.x, 'y': snap_point.y, 'spatialReference': self.spatial_reference})
        return snap_point

    else:
        raise Exception('Either arcpy or Shapely is required to perform snap_to_line')
Exemplo n.º 10
0
def check_time():

    # https://ps-cc.maps.arcgis.com/home/webmap/viewer.html?webmap=5589213f373c4e31a3cd2a8e7dc2b4d4
    # Boston Logan: lat=42.3656&long=-71.0096
    # Portland: lat=43.6465&long=-70.3097
    # Manchester: lat=42.9297&long=-71.4352

    # get input parameters
    long = request.args.get("long")
    lat = request.args.get("lat")

    if lat is None:
        lat = 42.3656
    if long is None:
        long = -71.0096

    # search for the feature layer
    gis = GIS()
    itemid = "119a14d182b443b8b56340433d47d7e1"
    layer = gis.content.get(itemid).layers[0]

    # query feature layer using location
    filter = filters.intersects(
        Point({
            "x": long,
            "y": lat,
            "spatialReference": {
                "wkid": 4326
            }
        }))
    results = layer.query(geometry_filter=filter,
                          out_fields="FromBreak,ToBreak",
                          return_geometry=False)

    # no results means it's outside driving distance
    if len(results) == 0:
        response_msg = "This location is more than a 60 minute drive from Dover, NH."

    # return information about smallest buffer
    else:
        closest_result = sorted(results,
                                key=lambda f: f.attributes["FromBreak"])[0]
        response_msg = "This location is a {0} - {1} minute drive from Dover, NH.".format(
            closest_result.attributes["FromBreak"],
            closest_result.attributes["ToBreak"])

    return jsonify({"msg": response_msg}), 200
Exemplo n.º 11
0
    def main(self):
        self.df = self.assignFieldsDF()
        for n in range(self.rango[0], self.rango[1]):
            if n % 10 == 0: print(n)
            self.df = self.extractAddress(self.df, n)

        data = self.df[self.df["x"] != 0]
        list_cli = list(set(data[self.fieldIndice]))
        data = self.df[self.df[self.fieldIndice].isin(list_cli)]

        data = data[data["x"] != 0]

        data = data[["ID", self.fieldIndice, self.fieldAddress, 'x', 'y']]

        geometry = [Point(xy) for xy in zip(data.x, data.y)]
        crs = {'init': 'epsg:4326'}
        geo_df = GeoDataFrame(data, crs=crs, geometry=geometry)

        geo_df.to_file(driver='ESRI Shapefile', filename=self.output)
Exemplo n.º 12
0
    def getCrimesMapByFelonyType(self, dataframe, symbol):
        xCoordList: List[float] = dataframe['x']
        yCoordList: List[float] = dataframe['y']
        if len(xCoordList) == 0 or len(xCoordList) != len(yCoordList):
            raise DatasetEmptyError

        pointList = [
            Point({
                "x": xCoordList[index],
                "y": yCoordList[index]
            }) for index in range(len(xCoordList))
        ]
        featureSet = self.__getPointFeatureSet(pointList)

        crime_map = self.__getAtlantaMapWithNeighborhoodMap()
        crime_map.add_layer(
            FeatureCollection.from_featureset(featureSet, symbol=symbol))

        crime_map.draw(featureSet, symbol=symbol)

        return crime_map
Exemplo n.º 13
0
def build_node_sdf(n_list):

    # Dictionary For Geometries & IDs
    geo_dict = {"geo": []}
    val_dict = {'osm_id': []}

    # Dictionary For Incoming Tags
    for n in n_list:
        n_tags = n['tags'].keys()
        for tag in n_tags:
            if tag not in val_dict.keys():
                val_dict[tag] = []

    # Build Lists
    for n in n_list:
        try:
            # Populate Tags
            for tag in [key for key in val_dict.keys() if key != 'osm_id']:
                val_dict[tag].append(str(n['tags'].get(tag, 'Null')))

            # Populate Geometries & IDs
            point = Point({
                "x": n['lon'],
                "y": n['lat'],
                "spatialReference": {
                    "wkid": 4326
                }
            })
            geo_dict['geo'].append(point)
            val_dict['osm_id'].append(str(n['id']))

        except Exception as ex:
            print('Node ID {0} Raised Exception: {1}'.format(n['id'], str(ex)))

    try:
        return SpatialDataFrame(val_dict, geometry=geo_dict['geo'])

    except TypeError:
        raise Exception('Ensure ArcPy is Included in Python Interpreter')
Exemplo n.º 14
0
mpio_layer = gis.content.get(municipio_id).layers[0]

# Process data
errad_summarize = erradicacion_df.groupby(['nombremunicipio'
                                           ])['hectareas'].sum()

updates = []
for row in erradicacion_lyr.query(where="nombremunicipio = '-'"):

    # get attributes
    oid = row.attributes['OBJECTID']
    x = row.geometry["x"]
    y = row.geometry["y"]

    # Create spatial filter
    point = Point({"x": x, "y": y})
    sr = SpatialReference({"wkid": 4326})
    geom_filter = filters.intersects(point, sr)

    # Query data using a point (Identify)
    mpio = mpio_layer.query(
        out_fields="coddane, nombremunicipio, nombredepartamento",
        geometry_filter=geom_filter,
        return_geometry=False)

    # Create update record
    nombre_mpio = mpio.features[0].attributes["nombremunicipio"]
    nombre_depto = mpio.features[0].attributes["nombredepartamento"]
    coddane = mpio.features[0].attributes["coddane"]
    update = {
        "attributes": {
Exemplo n.º 15
0
 def _xy_to_geometry(x, y, sr):
     """converts x/y coordinates to Point object"""
     return Point({'spatialReference': sr, 'x': x, 'y': y})
Exemplo n.º 16
0
            long, lat = transform(mi_south, wgs84,
                                  geocode_result[0]['location']['x'],
                                  geocode_result[0]['location']['y'])

            # Round Lat/Long
            lat = round(lat, 6)
            long = round(long, 6)

            # Convert Lat/Long to USNG
            m = mgrs.MGRS()
            usng_raw = m.toMGRS(lat, long)
            u = str(usng_raw.decode('utf-8'))
            usng = u[0:3] + ' ' + u[3:5] + ' ' + u[5:10] + ' ' + u[10:15]

            # Construct point feature
            geocode_xy = Point({'x': x, 'y': y})

            # Feature layer query to find box alarm areas
            fset_boxalarmareas = fl_boxalarmareas.query(
                geometry_filter=filters.intersects(geocode_xy))

            # Assign box alarm variables
            boxalarm_fire = None
            boxalarm_medical = None
            boxalarm_wildland = None

            # Loop to populate Box Alarm Variables
            for boxalarmarea in fset_boxalarmareas:
                if boxalarmarea.attributes['BoxAlarmType'] == 'FIRE':
                    boxalarm_fire = boxalarmarea.attributes['BoxAlarmNumber']
                elif boxalarmarea.attributes['BoxAlarmType'] == 'MEDICAL':
Exemplo n.º 17
0
# run with ArcGIS API for Python 3

# Find an address using ArcGIS World Geocoding Service

from arcgis.gis import *
from arcgis.geocoding import geocode, reverse_geocode
from arcgis.geometry import Point

# Log into ArcGIS Online as an anonymous user
dev_gis = GIS()

# Search for the Hollywood sign
geocode_result = geocode(address="200 Lincoln Ave Salinas CA",
                         as_featureset=True)
print(geocode_result)

# Reverse geocode a coordinate
print("")
location = {
    'Y': 36.67526264843514,
    'X': -121.65731271093892,
    'spatialReference': {
        'wkid': 4326
    }
}
unknown_pt = Point(location)

address = reverse_geocode(location=unknown_pt)
print(address)
Exemplo n.º 18
0
def calc_drivetime():

    # Boston Logan: lat=42.3656&long=-71.0096&token=
    # Portland: lat=43.6465&long=-70.3097&token=
    # Manchester: lat=42.9297&long=-71.4352&token=

    # get input parameters
    long = request.args.get("long")
    lat = request.args.get("lat")
    token = request.args.get("token")
    if lat is None or long is None or token is None:
        return jsonify({"error": "Missing input parameter"}), 400

    # get referer header
    referer = request.headers.get("referer")

    # referer should be sent, but for demo:
    if referer is None:
        referer = "http://localhost"

    # make input geometry
    search_geom = Point({
        "x": long,
        "y": lat,
        "spatialReference": {
            "wkid": 4326
        }
    })

    # Dover, NH geometry
    dover_geom = Point({
        "x": -70.8737,
        "y": 43.1979,
        "spatialReference": {
            "wkid": 4326
        }
    })

    # authenticate using token

    # This works if you have a token generated with referer = 'http':
    # gis = GIS(url="https://ps-cc.maps.arcgis.com/", username=None, password=None, token=token)

    # This works for any token as long as referer matches
    gis = GIS(url="https://ps-cc.maps.arcgis.com/")
    gis._con._referer = referer
    gis._con._token = token

    # do routing
    try:
        route_service_url = gis.properties.helperServices.route.url
        route_layer = RouteLayer(route_service_url, gis=gis)
        stops = '{0},{1}; {2},{3}'.format(search_geom.x, search_geom.y,
                                          dover_geom.x, dover_geom.y)
        result = route_layer.solve(stops=stops,
                                   return_directions=False,
                                   return_routes=True,
                                   output_lines='esriNAOutputLineNone',
                                   return_barriers=False,
                                   return_polygon_barriers=False,
                                   return_polyline_barriers=False)

        travel_time = result['routes']['features'][0]['attributes'][
            'Total_TravelTime']
        response_msg = "This location is a {0} minute drive from Dover, NH.".format(
            travel_time)

        return jsonify({"msg": response_msg}), 200

    except:
        return jsonify({"error": "An error occurred"}), 500
Exemplo n.º 19
0
 def point_for_row(x, y, z, sr):
     return Point({'x': x, 'y': y, 'z': z, "spatialReference": sr})
Exemplo n.º 20
0
        "wind_angle": "",
        "gust_strength": "",
        "gust_angle": "",
        "wind_timeutc": ""
        '''

for measure in measuresDict["values"]:
    attr = dict()
    attr["id"] = measure["_id"]
    attr["altitude"] = measure["altitude"]
    attr["temperature"] = measure["temperature"]
    attr["humidity"] = measure["humidity"]
    attr["pressure"] = measure["pressure"]
    attr["rain_60min"] = measure["rain_60min"]
    attr["rain_24h"] = measure["rain_24h"]
    attr["rain_live"] = measure["rain_live"]
    attr["rain_timeutc"] = measure["rain_timeutc"]
    attr["wind_strength"] = measure["wind_strength"]
    attr["wind_angle"] = measure["wind_angle"]
    attr["gust_strength"] = measure["gust_strength"]
    attr["gust_angle"] = measure["gust_angle"]
    attr["wind_timeutc"] = measure["wind_timeutc"]
    lat = measure["Y"]
    lon = measure["X"]
    pt = Point({"x": lon, "y": lat, "spatialReference": {"wkid": 4326}})
    feature = Feature(pt, attr)
    featuresToAdd.append(feature)
#add all the points
#test
#netAtmoFl.manager.truncate()
netAtmoFl.edit_features(adds=featuresToAdd)
Exemplo n.º 21
0
def get_add_new_closest_dataframe(
        origins: [str, pd.DataFrame],
        origin_id_field: str,
        destinations: [str, pd.DataFrame],
        destination_id_field: str,
        closest_table: [str, pd.DataFrame],
        new_destination: Point,
        gis: arcgis.gis.GIS = None,
        origin_weighting_points: [str, pd.DataFrame] = None) -> pd.DataFrame:
    """
    Calculate the impact of a location being added to the retail landscape.
    :param origins: Polygons in a Spatially Enabled Dataframe or string path to Feature Class delineating starting
        locations for closest analysis.
    :param origin_id_field: Field or column name used to uniquely identify each origin.
    :param destinations: Spatially Enabled Dataframe or string path to Feature Class containing all destinations.
    :param destination_id_field: Field or column name used to uniquely identify each destination location.
    :param closest_table: Path to CSV, table, or Dataframe containing solution for nearest locations.
    :param new_destination: Geometry of new location being added to the retail landscape.
    :param origin_weighting_points: Points potentially used to calculate a centroid based on population density
        represented by the weighting points instead of simply the geometric centroid.
    :return: Data frame with rebalanced closest table only for affected origins.
    """
    # read in the existing closest table solution
    closest_orig_df = closest_table if isinstance(
        closest_table, pd.DataFrame) else pd.read_csv(closest_table)

    # get a list of the destination columns from the existing closest table
    dest_cols = [
        col for col in closest_orig_df.columns
        if col.startswith('destination_id')
    ]

    # get a count of the nth number of locations solved for
    dest_count = len(dest_cols)

    # load the original origins into a dataframe and format it for analysis
    origin_df_poly = get_dataframe(origins)
    origin_df = proximity.prep_sdf_for_nearest(origin_df_poly, origin_id_field,
                                               origin_weighting_points)

    # load the original destinations into a dataframe and format it for analysis
    dest_df = get_dataframe(destinations)
    dest_df = proximity.prep_sdf_for_nearest(dest_df, destination_id_field)

    # create new destination dataframe for analysis
    new_id = _get_min_uid(
        origin_df, 'ID'
    )  # creates lowest numbered id available, or 1000 higher than top value
    new_df = pd.DataFrame([[new_id, new_id, new_destination]],
                          columns=['ID', 'Name', 'SHAPE'])
    new_df.spatial.set_geometry('SHAPE')

    # ensure the dataframes are in the same spatial reference and then get the id of the origin the new point resides in
    coincidence_new_dest = new_destination.project_as(origin_df.spatial.sr)
    for row in origin_df_poly.itertuples(name='dest'):
        geom = row.SHAPE
        if geom.contains(coincidence_new_dest):
            dest_id = getattr(row, origin_id_field)

    # get the destination ids of the existing nth closest destinations
    dest_subset_ids = get_destination_id_list_from_near_df_for_origin_id(
        closest_orig_df, dest_id)

    # by cross referencing from the destination ids, get the origin ids allocated to the exiting locations
    subset_origin_ids = pd.concat([
        closest_orig_df[closest_orig_df[dest_col].isin(dest_subset_ids)]
        ['origin_id'] for dest_col in dest_cols
    ]).unique()

    # get a subset dataframe of the origins allocated to the closest nth locations
    subset_origin_df = origin_df[origin_df['ID'].astype('int64').isin(
        subset_origin_ids)].copy()

    # add the new location to the destination dataframe
    dest_analysis_df = pd.concat([dest_df, new_df], sort=False)
    dest_analysis_df.spatial.set_geometry('SHAPE')
    dest_analysis_df.reset_index(inplace=True, drop=True)

    # if a GIS is provided, use online resources to solve for the closest destination to the affected area
    if gis is not None:

        # solve for the closest destination to the affected area
        closest_subset_df = proximity.closest_dataframe_from_origins_destinations(
            subset_origin_df,
            'ID',
            dest_analysis_df,
            'ID',
            gis=gis,
            destination_count=dest_count)

    # otherwise, use local resources
    else:

        # solve for the closest destination to the affected area
        closest_subset_df = proximity.closest_dataframe_from_origins_destinations(
            subset_origin_df,
            'ID',
            dest_analysis_df,
            'ID',
            network_dataset=data.usa_network_dataset,
            destination_count=dest_count)

    return closest_subset_df
Exemplo n.º 22
0
            classes = visual_recognition.classify(images_file,
                                                  parameters=json.dumps({
                                                      'classifier_ids':
                                                      ['##########'],
                                                      'threshold':
                                                      0.876
                                                  }))

        #print(json.dumps(classes, indent=2))
        data = json.loads(json.dumps(classes, indent=2))
        if len(data['images'][0]['classifiers'][0]['classes']) != 0:

            print(json.dumps(classes, indent=2))

            f = open(
                "./images/" +
                filename.replace("photo", "coor").replace("jpg", "txt"), "r")
            d = float(f.read()) * 0.000000301

            gis = GIS(username="******", password="******")
            p = Point({"x": -73.471977 + d, "y": 40.703342})
            a = {"pothole_layer": "pothole"}
            f = Feature(p, a)
            fs = FeatureSet([f])
            lyr = FeatureLayer(
                "https://services8.arcgis.com/x660UqfqVJlbWB0Y/arcgis/rest/services/pothole_layers/FeatureServer/0",
                gis=gis)
            lyr.edit_features(adds=fs)

        #delete photo and txt
Exemplo n.º 23
0
def main(mytimer: func.TimerRequest) -> None:
    utc_timestamp = datetime.datetime.utcnow().replace(
        tzinfo=datetime.timezone.utc).isoformat()

    if mytimer.past_due:
        logging.info('The timer is past due!')


    
    

    # function parse the data from netatmo and share the infos in measuresDict
    # First we need to authenticate, give back an access_token used by requests 
    payload = {'grant_type': 'refresh_token',
            'client_id': privatepass.getClientId(),
            'client_secret': privatepass.getClientSecret(),
            'refresh_token' : privatepass.getRefreshToken(),
            'scope': 'read_station'}
    try:
        response = requests.post("https://api.netatmo.com/oauth2/token", data=payload)
        response.raise_for_status()
        access_token=response.json()["access_token"]
        refresh_token=response.json()["refresh_token"]
        scope=response.json()["scope"]
        
    except requests.exceptions.HTTPError as error:
        print(error.response.status_code, error.response.text)



    '''
    netatmo data is dependent on extent queried, the more you zoom the more you

    https://dev.netatmo.com/en-US/resources/technical/guides/ratelimits
    Per user limits
    50 requests every 10 seconds
    > One global request, and multiple litle on a specific area, while staying under api limit
    '''

    # first global request

    payload = {'access_token': access_token,
            'lat_ne':52.677040100097656,
            'lon_ne': 13.662185668945312,
            'lat_sw' : 52.374916076660156,
            'lon_sw':13.194580078125
                # filter wierd/wrong data 
                ,'filter': 'true'
            }
    try:
        response = requests.post("https://api.netatmo.com/api/getpublicdata", data=payload)
        response.raise_for_status()
        resultJson=response.json()
        parseData(resultJson)
        
    except requests.exceptions.HTTPError as error:
        print(error.response.status_code, error.response.text)



    base_lat_ne = 52.677040100097656
    base_lon_ne = 13.662185668945312
    base_lat_sw = 52.374916076660156
    base_lon_sw = 13.194580078125


    # calc each subextent size
    lon_step = (base_lon_ne - base_lon_sw)/4
    lat_step = (base_lat_ne - base_lat_sw)/4

    currentStep=0

    # we cut the extent in x/x and go through each sub-extent
    lat_sw = base_lat_sw
    while(lat_sw < base_lat_ne):
        lat_ne = lat_sw + lat_step
        #reset the lon_sw
        lon_sw = base_lon_sw
        while(lon_sw < base_lon_ne):
            lon_ne = lon_sw + lon_step
            payload = {'access_token': access_token,
                'lat_sw' : lat_sw,
                'lon_sw':lon_sw,
                'lat_ne':lat_ne,
                'lon_ne': lon_ne,
                    # filter wierd/wrong data 
                    'filter': 'true'
                }
            try:
                currentStep=currentStep+1
                #print(str(lat_ne)  + "   " + str(lon_ne))
                response = requests.post("https://api.netatmo.com/api/getpublicdata", data=payload)
                response.raise_for_status()
                resultJson=response.json()
                # parse the data
                parseData(resultJson)
            except requests.exceptions.HTTPError as error:
                print(error.response.status_code, error.response.text)
            lon_sw = lon_ne
        lat_sw = lat_ne



    # last part - json can be dumped in a file for test purpose or geoevent server integration
    #with open('dataNetAtmo.json', 'w') as outfile:  
    #    json.dump(measuresDict, outfile)

    # or we can get each object and push it as a feature !

    # connect to to the gis
    # get the feature layer
    gis = GIS("https://esrich.maps.arcgis.com", "cede_esrich", privatepass.getPass())       
    netAtmoFl =  gis.content.get('0078c29282174460b57ce7ca72262549').layers[0]        

    featuresToAdd = []
    '''" sample value
            _id": "70:ee:50:3f:4d:26",
            "X": 13.5000311,
            "Y": 52.5020974,
            "altitude": 37,
            "temperature": 10.4,
            "humidity": 71,
            "pressure": 1018.1,
            "rain_60min": "",
            "rain_24h": "",
            "rain_live": "",
            "rain_timeutc": "",
            "wind_strength": "",
            "wind_angle": "",
            "gust_strength": "",
            "gust_angle": "",
            "wind_timeutc": ""
            '''

    for measure in measuresDict["values"]:
        attr = dict()
        attr["id"] = measure["_id"]
        attr["altitude"] = measure["altitude"]
        attr["temperature"] = measure["temperature"]
        attr["humidity"] = measure["humidity"]
        attr["pressure"] = measure["pressure"]
        attr["rain_60min"] = measure["rain_60min"]
        attr["rain_24h"] = measure["rain_24h"]
        attr["rain_live"] = measure["rain_live"]
        attr["rain_timeutc"] = measure["rain_timeutc"]
        attr["wind_strength"] = measure["wind_strength"]
        attr["wind_angle"] = measure["wind_angle"]
        attr["gust_strength"] = measure["gust_strength"]
        attr["gust_angle"] = measure["gust_angle"]
        attr["wind_timeutc"] = measure["wind_timeutc"]
        lat = measure["Y"]
        lon = measure["X"]
        pt = Point({"x" : lon, "y" : lat, "spatialReference" : {"wkid" : 4326}})
        feature = Feature(pt,attr)
        featuresToAdd.append(feature)
    #add all the points  
    #test

    netAtmoFl.edit_features(adds=featuresToAdd)


    logging.info('Python timer trigger function ran at %s', utc_timestamp)