Beispiel #1
0
def download_from_arcmap(url, out_dir):
    split = urlsplit(url)

    filename_str = today.strftime("%Y-%m-%d_%H:%M:%S") + split.path.replace(
        "/", ".") + ".geojson"
    #print(filename_str)

    d = EsriDumper(url, timeout=300)

    start_string = """{"type": "FeatureCollection","features":"""
    end_string = "}"

    #start_time = time.time()

    # Iterate over each feature

    #index = 0
    feature_count = d.get_feature_count()
    print(str(feature_count) + " total Features")
    #print(d.get_metadata())

    all_features = []
    with tqdm(total=feature_count) as pbar:
        #all_features = list(d)
        #file_txt = json.dumps(all_features)

        for feature in d:
            #print(type(all_features[index]))
            #index += 1
            all_features.append(feature)
            # if index == feature_count:
            #    # if above is true that should mean were on the last feture in the set, so dont add ",\n" to the end - add closing brackets
            #    #file_txt = file_txt + json.dumps(feature) + "]}"
            #    pass
            # else:
            #    pass
            #file_txt = file_txt + json.dumps(feature) + ",\n"
            pbar.update(1)
            #print(time.time() - start_time, end="\r", flush=True)

    file_txt = json.dumps(all_features)
    file_txt = start_string + file_txt + end_string
    #print(type(file_txt))

    with open(out_dir + "/" + filename_str, 'w') as outfile:
        outfile.write(file_txt)
        json.dumps(d.get_metadata())

    return filename_str.replace(".geojson", "")
Beispiel #2
0
    def test_oid_enumeration_when_statistics_doesnt_work(self):
        self.add_fixture_response(
            '.*/\?f=json.*',
            'us-mi-kent/us-mi-kent-metadata.json',
            method='GET',
        )
        self.add_fixture_response(
            '.*returnCountOnly=true.*',
            'us-mi-kent/us-mi-kent-count-only.json',
            method='GET',
        )
        self.add_fixture_response(
            '.*outStatistics=.*',
            'us-mi-kent/us-mi-kent-statistics.json',
            method='GET',
        )
        self.add_fixture_response(
            '.*returnIdsOnly=true.*',
            'us-mi-kent/us-mi-kent-ids-only.json',
            method='GET',
        )
        self.add_fixture_response(
            '.*query.*',
            'us-mi-kent/us-mi-kent-0.json',
            method='POST',
        )

        dump = EsriDumper(self.fake_url)
        data = list(dump)

        self.assertEqual(15, len(data))
Beispiel #3
0
    def test_coerces_floats_to_integer(self):
        self.add_fixture_response(
            '.*/\?f=json.*',
            'us-mo-columbia/us-mo-columbia-metadata.json',
            method='GET',
        )
        self.add_fixture_response(
            '.*returnCountOnly=true.*',
            'us-mo-columbia/us-mo-columbia-count-only.json',
            method='GET',
        )
        self.add_fixture_response(
            '.*returnIdsOnly=true.*',
            'us-mo-columbia/us-mo-columbia-ids-only.json',
            method='GET',
        )
        self.add_fixture_response(
            '.*query.*',
            'us-mo-columbia/us-mo-columbia-0.json',
            method='POST',
        )

        dump = EsriDumper(self.fake_url)
        data = list(dump)

        self.assertEqual(43, len(data))
Beispiel #4
0
    def test_proxy_requests(self):
        self.add_fixture_response(
            r'http://proxy/\?http://example\.com\?f=json',
            'us-mo-columbia/us-mo-columbia-metadata.json',
            method='GET',
        )
        self.add_fixture_response(
            r'http://proxy/\?http://example\.com/.*returnCountOnly=true.*',
            'us-mo-columbia/us-mo-columbia-count-only.json',
            method='GET',
        )
        self.add_fixture_response(
            r'http://proxy/\?http://example\.com/.*returnIdsOnly=true.*',
            'us-mo-columbia/us-mo-columbia-ids-only.json',
            method='GET',
        )
        self.add_fixture_response(
            r'http://proxy/\?http://example\.com/.*query.*',
            'us-mo-columbia/us-mo-columbia-0.json',
            method='POST',
        )

        dump = EsriDumper(self.fake_url, proxy='http://proxy?')
        data = list(dump)

        self.assertEqual(43, len(data))
Beispiel #5
0
    def test_handles_exception(self):
        self.add_fixture_response(
            '.*/\?f=json.*',
            'us-mo-columbia/us-mo-columbia-metadata.json',
            method='GET',
        )
        self.add_fixture_response(
            '.*returnCountOnly=true.*',
            'us-mo-columbia/us-mo-columbia-count-only.json',
            method='GET',
        )
        self.add_fixture_response(
            '.*returnIdsOnly=true.*',
            'us-mo-columbia/us-mo-columbia-ids-only.json',
            method='GET',
        )
        self.responses.add(
            method='POST',
            url=re.compile('.*query.*'),
            body=Exception(),
        )

        dump = EsriDumper(self.fake_url)
        with self.assertRaisesRegexp(EsriDownloadError, "Could not connect to URL"):
            list(dump)
Beispiel #6
0
    def test_object_id_enumeration(self):
        self.add_fixture_response(
            '.*/\?f=json.*',
            'us-ca-carson/us-ca-carson-metadata.json',
            method='GET',
        )
        self.add_fixture_response(
            '.*returnCountOnly=true.*',
            'us-ca-carson/us-ca-carson-count-only.json',
            method='GET',
        )
        self.add_fixture_response(
            '.*returnIdsOnly=true.*',
            'us-ca-carson/us-ca-carson-ids-only.json',
            method='GET',
        )
        self.add_fixture_response(
            '.*query.*',
            'us-ca-carson/us-ca-carson-0.json',
            method='POST',
        )

        dump = EsriDumper(self.fake_url)
        data = list(dump)

        self.assertEqual(6, len(data))
Beispiel #7
0
    def test_statistics_pagination(self):
        self.add_fixture_response(
            '.*/\?f=json.*',
            'us-ms-madison/us-ms-madison-metadata.json',
            method='GET',
        )
        self.add_fixture_response(
            '.*returnCountOnly=true.*',
            'us-ms-madison/us-ms-madison-count-only.json',
            method='GET',
        )
        self.add_fixture_response(
            '.*outStatistics=.*',
            'us-ms-madison/us-ms-madison-outStatistics.json',
            method='GET',
        )
        self.add_fixture_response(
            '.*query.*',
            'us-ms-madison/us-ms-madison-0.json',
            method='POST',
        )

        dump = EsriDumper(self.fake_url)
        data = list(dump)

        self.assertEqual(1, len(data))
Beispiel #8
0
def main(event, context):
    d = EsriDumper(
        "https://services.arcgis.com/JMAJrTsHNLrSsWf5/arcgis/rest/services/Animal_Care_Intake_and_Outcome_Data/FeatureServer/0"
    )
    pet_list = []
    for feature in d:
        if feature["properties"]["OutcomeWeightDate"] > 0:
            feature["properties"]["OutcomeWeightDate"] = to_datetime(
                feature["properties"]["OutcomeWeightDate"])
            feature["properties"]["Extra11"] = to_datetime(
                feature["properties"]["Extra11"])
            feature["properties"]["LatestUpdate"] = to_datetime(
                feature["properties"]["LatestUpdate"])
            feature["properties"]["IntakeWeightDate"] = to_datetime(
                feature["properties"]["IntakeWeightDate"])
            feature["properties"]["IntakeDate"] = to_datetime(
                feature["properties"]["IntakeDate"])
            feature["properties"]["OutcomeDate"] = to_datetime(
                feature["properties"]["OutcomeDate"])
            feature["properties"]["WeightDate"] = to_datetime(
                feature["properties"]["WeightDate"])

            pet_list.append(feature["properties"])

    save_S3_sheet(pet_list, "broward_pets.csv")
Beispiel #9
0
    def test_advanced_query_pagination_incorrect_outfield_name(self):
        self.add_fixture_response(
            '.*/\?f=json.*',
            'us-ca-tuolumne/us-ca-tuolumne-metadata.json',
            method='GET',
        )
        self.add_fixture_response(
            '.*returnCountOnly=true.*',
            'us-ca-tuolumne/us-ca-tuolumne-count-only.json',
            method='GET',
        )
        self.add_fixture_response(
            '.*outStatistics=.*',
            'us-ca-tuolumne/us-ca-tuolumne-statistics.json',
            method='GET',
        )
        self.add_fixture_response(
            '.*query.*',
            'us-ca-tuolumne/us-ca-tuolumne-0.json',
            method='POST',
        )

        dump = EsriDumper(self.fake_url)
        data = list(dump)

        self.assertEqual(15, len(data))
Beispiel #10
0
    def test_handles_timeout_error(self):
        self.add_fixture_response(
            '.*/\?f=json.*',
            'us-mo-columbia/us-mo-columbia-metadata.json',
            method='GET',
        )
        self.add_fixture_response(
            '.*returnCountOnly=true.*',
            'us-mo-columbia/us-mo-columbia-count-only.json',
            method='GET',
        )
        self.add_fixture_response(
            '.*returnIdsOnly=true.*',
            'us-mo-columbia/us-mo-columbia-ids-only.json',
            method='GET',
        )
        import socket
        self.responses.add(
            method='POST',
            url=re.compile('.*query.*'),
            body=socket.timeout(),
        )

        dump = EsriDumper(self.fake_url)
        with self.assertRaisesRegexp(EsriDownloadError, "Timeout when connecting to URL"):
            list(dump)
def allGeoDataForEachBlock(countyInfoList, existingBlockData):
    if (len(existingBlockData) > 0):
        print('*** Getting geo info on all blocks ***')
        stateFIPSCode = existingBlockData[0]['state']

        startTimeForProcessingState = time.localtime()
        fullBlockListWithGeo = []
        for county in countyInfoList:
            print('Getting all geo info in {0}'.format(county['NAME']))
            startTimeForProcessingCounty = time.localtime()
            countyFIPSCode = county['county']

            blockGeometries = EsriDumper(
                url=
                'https://tigerweb.geo.census.gov/arcgis/rest/services/Census2010/tigerWMS_Census2010/MapServer/14',
                extra_query_args={
                    'where':
                    'STATE=\'{0}\' AND COUNTY=\'{1}\''.format(
                        stateFIPSCode, countyFIPSCode),
                    'orderByFields':
                    'TRACT, BLKGRP, BLOCK'
                },
                timeout=120
            )  # extending timeout because there were some long load times
            # https://github.com/openaddresses/pyesridump

            for blockGeometry in blockGeometries:
                blockGeoProperties = blockGeometry['properties']
                blockGeoStateFIPS = blockGeoProperties['STATE']
                blockGeoCountyFIPS = blockGeoProperties['COUNTY']
                blockGeoTractFIPS = blockGeoProperties['TRACT']
                blockGeoBlockFIPS = blockGeoProperties['BLOCK']

                matchingBlockData = next(
                    (item for item in existingBlockData
                     if item['state'] == blockGeoStateFIPS and item['county']
                     == blockGeoCountyFIPS and item['tract'] ==
                     blockGeoTractFIPS and item['block'] == blockGeoBlockFIPS),
                    None)
                matchingBlockData['geometry'] = blockGeometry['geometry']
                fullBlockListWithGeo.append(matchingBlockData)

            endTimeForProcessingCounty = time.localtime()
            elapsedSecondsForProcessingCounty = (
                time.mktime(endTimeForProcessingCounty) -
                time.mktime(startTimeForProcessingCounty))
            print('   {0} took {1} seconds'.format(
                county['NAME'], elapsedSecondsForProcessingCounty))

        endTimeForProcessingState = time.localtime()
        elapsedMinutesForProcessingState = (
            time.mktime(endTimeForProcessingState) -
            time.mktime(startTimeForProcessingState)) / 60
        print(
            'It took {0} total minutes to get all the requested block geo data'
            .format(elapsedMinutesForProcessingState))
        return fullBlockListWithGeo
    else:
        return None
Beispiel #12
0
    def test_geo_queries_when_oid_enumeration_times_out(self):
        print("Test OID queries")
        self.add_fixture_response(
            '.*/\?f=json.*',
            'us-il-cook/metadata.json',
            method='GET',
        )
        self.add_fixture_response(
            '.*returnCountOnly=true.*',
            'us-il-cook/count-only.json',
            method='GET',
        )

        # Dang, there are too many OIDs to return. The request times out.
        import socket
        self.responses.add(
            method='GET',
            url=re.compile('.*returnIdsOnly=true.*'),
            body=socket.timeout(),
        )

        # We expect to see geometry queries now.
        self.add_fixture_response(
            '.*geometry=.*',
            'us-il-cook/page-full.json',
            method='GET',
        )
        self.add_fixture_response(
            '.*geometry=.*',
            'us-il-cook/page-partial.json',
            method='GET',
        )
        self.add_fixture_response(
            '.*geometry=.*',
            'us-il-cook/page-partial.json',
            method='GET',
        )
        self.add_fixture_response(
            '.*geometry=.*',
            'us-il-cook/page-partial.json',
            method='GET',
        )
        self.add_fixture_response(
            '.*geometry=.*',
            'us-il-cook/page-partial.json',
            method='GET',
        )

        dump = EsriDumper(self.fake_url, max_retries=0)
        data = list(dump)

        # Note that this count is entirely fake because of the deduping happening
        # This test is only designed to make sure we're splitting into smaller
        # bounding boxes.

        self.assertEqual(2, len(data))
Beispiel #13
0
def all_geo_data_for_each_county(existing_county_data):
    if len(existing_county_data) > 0:
        print('*** Getting geo info on all counties ***')
        state_fips_code = existing_county_data[0]['state']

        start_time_for_processing_state = time.localtime()
        full_county_list_with_geo = []

        where_argument = 'STATE=\'{0}\' AND ('.format(state_fips_code)

        for county in existing_county_data:
            where_argument = '{0}NAME=\'{1}\''.format(where_argument,
                                                      county['NAME'])
            if existing_county_data.index(
                    county) != len(existing_county_data) - 1:
                where_argument = '{0} OR '.format(where_argument)

        where_argument = '{0})'.format(where_argument)

        county_geometries = EsriDumper(
            url=
            'https://tigerweb.geo.census.gov/arcgis/rest/services/Census2010/tigerWMS_Census2010/MapServer/90',
            extra_query_args={
                'where': where_argument,
                'orderByFields': 'COUNTY'
            })
        # https://github.com/openaddresses/pyesridump

        for countyGeometry in county_geometries:
            county_geo_properties = countyGeometry['properties']
            county_geo_state_fips = county_geo_properties['STATE']
            county_geo_county_fips = county_geo_properties['COUNTY']

            matching_county_data = next(
                (item for item in existing_county_data
                 if item['state'] == county_geo_state_fips
                 and item['county'] == county_geo_county_fips), None)

            matching_county_data['geometry'] = countyGeometry['geometry']
            full_county_list_with_geo.append(matching_county_data)

        end_time_for_processing_state = time.localtime()
        elapsed_minutes_for_processing_state = (
            time.mktime(end_time_for_processing_state) -
            time.mktime(start_time_for_processing_state))
        print(
            'It took {0} total seconds to get all the requested county geo data'
            .format(elapsed_minutes_for_processing_state))
        return full_county_list_with_geo
    else:
        return None
def getAllGeoDataForFederalCongressionalDistricts(stateFIPSCode):
    districtGeometries = EsriDumper(
        url='https://tigerweb.geo.census.gov/arcgis/rest/services/Generalized_ACS2017/Legislative/MapServer/5',
        extra_query_args={'where': 'STATE=\'{0}\''.format(stateFIPSCode)})
    # https://github.com/openaddresses/pyesridump

    existingDistricts = []
    for districtGeometry in districtGeometries:
        geoJSONGeometry = districtGeometry['geometry']
        districtNumber = districtGeometry['properties']['BASENAME']
        existingDistrict = ExistingDistrict(districtNumber=districtNumber, geoJSONGeometry=geoJSONGeometry)
        existingDistricts.append(existingDistrict)

    return existingDistricts
Beispiel #15
0
    def test_geo_queries_when_oid_enumeration_doesnt_work(self):
        self.add_fixture_response(
            '.*/\?f=json.*',
            'us-il-cook/metadata.json',
            method='GET',
        )
        self.add_fixture_response(
            '.*returnCountOnly=true.*',
            'us-il-cook/count-only.json',
            method='GET',
        )
        self.add_fixture_response(
            '.*returnIdsOnly=true.*',
            'us-il-cook/ids-only.json',
            method='GET',
        )
        self.add_fixture_response(
            '.*geometry=.*',
            'us-il-cook/page-full.json',
            method='GET',
        )
        self.add_fixture_response(
            '.*geometry=.*',
            'us-il-cook/page-partial.json',
            method='GET',
        )
        self.add_fixture_response(
            '.*geometry=.*',
            'us-il-cook/page-partial.json',
            method='GET',
        )
        self.add_fixture_response(
            '.*geometry=.*',
            'us-il-cook/page-partial.json',
            method='GET',
        )
        self.add_fixture_response(
            '.*geometry=.*',
            'us-il-cook/page-partial.json',
            method='GET',
        )

        dump = EsriDumper(self.fake_url)
        data = list(dump)

        # Note that this count is entirely fake because of the deduping happening
        # This test is only designed to make sure we're splitting into smaller
        # bounding boxes.

        self.assertEqual(2, len(data))
def py_esri_dump(feature_server_url, map_geojson_path):
    print(feature_server_url)
    dd = EsriDumper(feature_server_url)
    all_features = list(dd)  # get all features in one list

    # create output .geojson file
    # this should probably be another function...trying to take advantage of only calling EsriDumper 1x
    # thinking about it....could split this into 3 functions, get features -> output file -> output variable in js inline html
    with open(map_geojson_path, 'w') as f:
        json.dump(all_features, f)

    # create geojson variable for inline html script
    data_json_dump = json.dumps(all_features)
    data_json_dump = 'var esriGeoJSON = ' + data_json_dump
    return data_json_dump
def day_schedule(day, truck, loc):
    x = PrettyTable()
    x.field_names = ["Truck", "Day", "Location"]

    d = EsriDumper(
        'https://services.arcgis.com/sFnw0xNflSi8J0uh/arcgis/rest/services/food_trucks_schedule/FeatureServer/0'
    )

    all_features = list(d)
    if day != '':
        day_dict = [
            1
            if fuzz.partial_ratio(x["properties"]["Day"], day) > THRESH else 0
            for x in all_features
        ]
    else:
        day_dict = [1 for x in all_features]
    if truck != '':
        truck_dict = [
            1 if fuzz.partial_ratio(x["properties"]["Truck"], truck) > THRESH
            else 0 for x in all_features
        ]
    else:
        truck_dict = [1 for x in all_features]
    if loc != '':
        loc_dict = [
            1
            if fuzz.partial_ratio(x["properties"]["Loc"], loc) > THRESH else 0
            for x in all_features
        ]
    else:
        loc_dict = [1 for x in all_features]

    # Iterate over each feature

    selections = np.array(day_dict) & np.array(truck_dict) & np.array(loc_dict)
    for (feature, flag) in zip(all_features, selections):
        #    print(fuzz.partial_ratio(feature["properties"]["Loc"], loc))
        if flag:
            #            print(feature["properties"]["Truck"]+ ":" + feature["properties"]['Loc'] + ":" + feature['properties']['Day'])
            x.add_row([
                feature["properties"]["Truck"], feature["properties"]["Day"],
                feature["properties"]["Loc"]
            ])
    print(x)
Beispiel #18
0
    def test_object_id_retries(self):
        self.add_fixture_response(
            '.*/\?f=json.*',
            'us-ca-carson/us-ca-carson-metadata.json',
            method='GET',
        )
        self.add_fixture_response(
            '.*returnCountOnly=true.*',
            'us-ca-carson/us-ca-carson-count-only.json',
            method='GET',
        )
        self.add_fixture_response(
            '.*returnIdsOnly=true.*',
            'us-ca-carson/us-ca-carson-ids-only.json',
            method='GET',
        )

        # Should retry after a timeout
        import socket
        self.responses.add(
            method='POST',
            url=re.compile('.*query.*'),
            body=socket.timeout(),
        )

        # Should retry after a ConnectionError
        import requests
        self.responses.add(
            method='POST',
            url=re.compile('.*query.*'),
            body=requests.exceptions.ConnectionError(),
        )

        # Second request should pass
        self.add_fixture_response(
            '.*query.*',
            'us-ca-carson/us-ca-carson-0.json',
            method='POST',
        )

        dump = EsriDumper(self.fake_url)
        data = list(dump)

        self.assertEqual(6, len(data))
Beispiel #19
0
    def test_advanced_query_pagination(self):
        self.add_fixture_response(
            '.*/\?f=json.*',
            'us-esri-test/us-esri-test-metadata.json',
            method='GET',
        )
        self.add_fixture_response(
            '.*returnCountOnly=true.*',
            'us-esri-test/us-esri-test-count-only.json',
            method='GET',
        )
        self.add_fixture_response(
            '.*query.*',
            'us-esri-test/us-esri-test-0.json',
            method='POST',
        )

        dump = EsriDumper(self.fake_url)
        data = list(dump)

        self.assertEqual(1000, len(data))
Beispiel #20
0
def get_parcel_geometry(request):
    pin = request.GET.get('pin')

    if pin:
        query_args = {
            'f': 'json',
            'outSR': 4326,
            'where': 'PIN14={}'.format(pin)
        }
        dumper = EsriDumper(
            'http://cookviewer1.cookcountyil.gov/arcgis/rest/services/cookVwrDynmc/MapServer/44',
            extra_query_args=query_args)

        try:
            geometry = list(dumper)[0]
            response = HttpResponse(json.dumps(geometry),
                                    content_type='application/json')
        except (EsriDownloadError, StopIteration, IndexError) as e:
            resp = {
                'status': 'error',
                'message': "PIN '{}' could not be found".format(pin)
            }
            response = HttpResponseNotFound(json.dumps(resp),
                                            content_type='application/json')
        except Exception as e:
            client.captureException()
            resp = {
                'status': 'error',
                'message': "Unknown error occured '{}'".format(str(e))
            }
            response = HttpResponse(json.dumps(resp),
                                    content_type='application/json')
            response.status_code = 500
    else:
        resp = {'status': 'error', 'message': "'pin' is a required parameter"}
        response = HttpResponse(json.dumps(resp),
                                content_type='application/json')
        response.status_code = 400

    return response
Beispiel #21
0
    def test_oid_enumeration_when_wrong_min_max_is_given(self):
        self.add_fixture_response(
            '.*/\?f=json.*',
            'us-fl-polk/us-fl-polk-metadata.json',
            method='GET',
        )
        self.add_fixture_response(
            '.*returnCountOnly=true.*',
            'us-fl-polk/us-fl-polk-count-only.json',
            method='GET',
        )
        self.add_fixture_response(
            '.*outStatistics=.*',
            'us-fl-polk/us-fl-polk-statistics.json',
            method='GET',
        )
        self.add_fixture_response(
            '.*returnIdsOnly=true.*',
            'us-fl-polk/us-fl-polk-ids-only.json',
            method='GET',
        )
        self.add_fixture_response(
            '.*returnIdsOnly=true.*',
            'us-fl-polk/us-fl-polk-ids-only.json',
            method='GET',
        )
        self.add_fixture_response(
            '.*query.*',
            'us-fl-polk/us-fl-polk-0.json',
            method='POST',
        )

        dump = EsriDumper(self.fake_url)
        data = list(dump)

        self.assertEqual(10, len(data))
Beispiel #22
0
"""
Download GeoJSON data for Cincinnati's neighbourhoods.
Not currently working; meant to replicate the behaviour of:
esri2geojson https://services.arcgis.com/JyZag7oO4NteHGiq/arcgis/rest/services/CPD_Neighborhoods/FeatureServer/0 maps/CPD_Neighborhoods.json
"""

import json
from esridump.dumper import EsriDumper

DATA = EsriDumper(
    'https://services.arcgis.com/JyZag7oO4NteHGiq/arcgis/rest/services/CPD_Neighborhoods/FeatureServer/0'
)

print(json.dumps(DATA))
Beispiel #23
0
import json
import math
import sys
import logging

from esridump.dumper import EsriDumper

logging.basicConfig(level=logging.DEBUG)

outfile_name = sys.argv[1] if len(sys.argv) > 1 else 'medellin.geojson'

d = EsriDumper(
    'https://www.medellin.gov.co/arcgis/rest/services/ServiciosPlaneacion/Pot_base1/MapServer/0'
)

outfile = open(outfile_name, 'w')

outfile.write('{"type":"FeatureCollection","features":[\n')

features = iter(d)

try:
    feature = next(features)

    while True:
        if not feature.get('geometry') or feature['geometry']['coordinates'][
                0] == 'NaN' or (
                    type(feature['geometry']['coordinates'][0]) is float
                    and math.isnan(feature['geometry']['coordinates'][0])):
            feature = next(features)
            continue
Beispiel #24
0
 def __init__(self, url: str, **kwargs):
     super(Layer, self).__init__(url)
     self.crs = kwargs.get("crs") or 4326
     self.layer = EsriDumper(self.url, outSR=str(self.crs), **kwargs)
  var geoJsonLayer = L.geoJson(esriGeoJSON).addTo(map);"""
html_script_leaflet_geotype_geometry_collection = """
  var geoJsonLayer = L.geoJson(esriGeoJSON).addTo(map);"""
html_script_leaflet_geotype_error = """
  var geoJsonLayer = L.geoJson(esriGeoJSON).addTo(map);"""

# call leaflet map function, close off function call
html_script_leaflet_map_call = """
};
makeMap();
</script>\n"""

# create first part of html script leaflet output
# make this into a function!!
# also, second EsriDumper call....combine with first? seems like one call, output data, whatever functions for manipulating data
d = EsriDumper(feature_server_url)


# get feature geometry type of geojson data
def geojson_geometry_type(d):
    for feature in d:
        geojson_geometry_type_feature = json.dumps(feature["geometry"]["type"])
        #print('geojson geometry type feature')
        #print(geojson_geometry_type_feature)
        return json.dumps(feature["geometry"]["type"])


geojson_geometry_type_feature = geojson_geometry_type(d)
""" If a string has single or double quotes around it, remove them. Make sure the pair of quotes match. If a matching pair of quotes is not found, return the string unchanged. """

Beispiel #26
0
import json
import math
import sys
import logging

from esridump.dumper import EsriDumper

logging.basicConfig(level=logging.DEBUG)

outfile_name = sys.argv[1] if len(sys.argv) > 1 else 'bogota.geojson'

d = EsriDumper('http://serviciosgis.catastrobogota.gov.co/arcgis/rest/services/Mapa_Referencia/Mapa_Referencia/MapServer/33')

outfile = open(outfile_name, 'w')

outfile.write('{"type":"FeatureCollection","features":[\n')

features = iter(d)

try:
    feature = next(features)

    while True:
        if not feature.get('geometry') or feature['geometry']['coordinates'][0] == 'NaN' or (type(feature['geometry']['coordinates'][0]) is float and math.isnan(feature['geometry']['coordinates'][0])):
            feature = next(features)
            continue
        props = feature['properties']
        interior = props['PDoCInteri']
        if interior and interior.strip():
            street = props['PDoTexto']
            if street and street.strip() and not street.strip()[0].isdigit():
Beispiel #27
0
def all_geo_data_for_each_block(county_info_list, existing_block_data):
    if len(existing_block_data) > 0:
        print('*** Getting geo info on all blocks ***')
        state_fips_code = existing_block_data[0]['state']

        start_time_for_processing_state = time.localtime()
        full_block_list_with_geo = []
        for county in county_info_list:
            print('Getting all geo info in {0}'.format(county['NAME']))
            start_time_for_processing_county = time.localtime()
            county_fips_code = county['county']

            block_geometries = EsriDumper(
                url=
                'https://tigerweb.geo.census.gov/arcgis/rest/services/Census2010/tigerWMS_Census2010/MapServer/14',
                extra_query_args={
                    'where':
                    'STATE=\'{0}\' AND COUNTY=\'{1}\''.format(
                        state_fips_code, county_fips_code),
                    'orderByFields':
                    'TRACT, BLKGRP, BLOCK'
                },
                timeout=120
            )  # extending timeout because there were some long load times
            # https://github.com/openaddresses/pyesridump

            for block_geometry in block_geometries:
                block_geo_properties = block_geometry['properties']
                block_geo_state_fips = block_geo_properties['STATE']
                block_geo_county_fips = block_geo_properties['COUNTY']
                block_geo_tract_fips = block_geo_properties['TRACT']
                block_geo_block_fips = block_geo_properties['BLOCK']

                matching_block_data = next(
                    (item for item in existing_block_data
                     if item['state'] == block_geo_state_fips
                     and item['county'] == block_geo_county_fips
                     and item['tract'] == block_geo_tract_fips
                     and item['block'] == block_geo_block_fips), None)

                matching_block_data['geometry'] = block_geometry['geometry']
                full_block_list_with_geo.append(matching_block_data)

            end_time_for_processing_county = time.localtime()
            elapsed_seconds_for_processing_county = (
                time.mktime(end_time_for_processing_county) -
                time.mktime(start_time_for_processing_county))
            print('   {0} took {1} seconds'.format(
                county['NAME'], elapsed_seconds_for_processing_county))

        end_time_for_processing_state = time.localtime()
        elapsed_minutes_for_processing_state = (
            time.mktime(end_time_for_processing_state) -
            time.mktime(start_time_for_processing_state)) / 60
        print(
            'It took {0} total minutes to get all the requested block geo data'
            .format(elapsed_minutes_for_processing_state))
        return full_block_list_with_geo

    else:
        return None