#!/usr/bin/env python3
# Written for Python 3.6
from __future__ import print_function

from datetime import date
import re, json, base64

import boto3

from common import request_input, yes_no_to_bool

# Walk user through choosing layers (and versions of layers)
# Allow them to choose to create a deployment from another deployment or to choose all latest or choose individual versions

deployment_name = request_input('What is the name of this deployment?', date.today().strftime('vector-tiles-%Y-%m-%d'))
method = request_input('Create new deployment from previous deployment (p), arbitrary layers (l) or with all latest layers (a)?', 'p')

print('Connecting to and analysing S3 bucket')

terria_aws = boto3.session.Session(profile_name='terria')

# Get all layers currently on the server
s3 = terria_aws.resource('s3')
s3c = terria_aws.client('s3')
bucket = s3.Bucket('vector-tile-server')
keys = [re.search(r'config/(.*)-v(\d*).json$', obj.key).groups() for obj in bucket.objects.filter(Prefix='config/')]

# Dictionary of all versions of each layer
all_versions = {}
for key, version in keys:
    all_versions[key] = all_versions.get(key, []) + [int(version)]
Exemple #2
0
def process_shapefile(shapefile):
    layername = request_input(
        'What is the name of this layer?',
        os.path.basename(shapefile)[:-4])  # Chop off .shp
    attribute_list = list_props(shapefile, layername)
    print('Attributes in shapefile: {}'.format(', '.join(attribute_list)))
    generate_tiles_to = int(
        request_input('What zoom level should tiles be generated to?', 12))
    has_fid = yes_no_to_bool(request_input('Is there an FID attribute?', 'n'),
                             False)
    if has_fid:
        fid_attribute = request_input(
            'Which attribute should be used as an FID?', 'FID')
    server_url = request_input(
        'Where is the vector tile server hosted?',
        'http://staging.vector-tiles.terria.io/{}/{{z}}/{{x}}/{{y}}.pbf'.
        format(layername))
    description = request_input('What is the description of this region map?',
                                '')
    regionMapping_entries = OrderedDict()
    regionMapping_entry_name = request_input(
        'Name another regionMapping.json entry (leave blank to finish)', '')
    while regionMapping_entry_name != '':
        o = OrderedDict()
        o['layerName'] = ''  # Set in addRegionMap.js
        o['server'] = ''
        o['regionProp'] = request_input(
            'Which attribute should be used as the region property?', '')
        # Test this property
        if not unique_with_prop(shapefile, layername, o['regionProp']):
            o['disambigProp'] = request_input(
                'The given region property does not uniquely define every region. Which attribute should be used to disambiguate region matching?',
                '')
            o['disambigRegionId'] = request_input(
                'Which regionMapping definition does this disambiguation property come from?',
                '')
        else:
            print('The given region property uniquely defines each region.')
        o['aliases'] = request_input(
            'What aliases should this be available under? Separate aliases with a comma and space',
            '').split(', ')
        o['description'] = description

        regionMapping_entries[regionMapping_entry_name] = o
        regionMapping_entry_name = request_input(
            'Name another regionMapping.json entry (leave blank to finish)',
            '')
    cf = os.path.join(temp_dir, '{}.json'.format(layername))
    with open(cf, 'w') as f:
        json.dump(
            {
                'layerName': layername,
                'shapefile': os.path.join('..', shapefile),
                'generateTilesTo': generate_tiles_to,
                'addFID': not has_fid,
                'uniqueIDProp': fid_attribute if has_fid else 'FID',
                'server': server_url,
                'serverSubdomains': [],
                'regionMappingEntries': regionMapping_entries
            }, f)
    print('Generating tiles and config for {}'.format(layername))
    return layername, subprocess.Popen(['node', 'addRegionMap.js', cf])
Exemple #3
0
 for directory in [
         'temp', temp_dir[:-1], 'data', 'config', 'epsg4326_shapefiles',
         'output_files'
 ]:
     try:
         os.mkdir(directory)
     except OSError as exc:
         if exc.errno == errno.EEXIST and os.path.isdir(directory):
             pass
         else:
             raise
 print(
     'Ensure you have the following directories locally: temp, python_temp, data, config, output_files'
 )
 shapefiles = sys.argv[1:] or request_input(
     'Which shapefiles do you want to add? Seperate shapefiles with a comma and space',
     '').split(', ')
 procs = [process_shapefile(shp) for shp in shapefiles]
 for layer, proc in procs:
     proc.wait()
     print('Tile and config generation finished for {}'.format(layer))
 if any(proc.returncode != 0 for _, proc in procs):
     print('Processing of at least 1 shapefile failed')
 else:
     # Local processing done
     # Now send to s3
     conn = boto.connect_s3()
     bucket = conn.get_bucket('vector-tile-server')
     for layer, _ in procs:
         # Get the highest version of this layer and add 1
         maxversion = max([
# Written for Python 2.7
from __future__ import print_function

from datetime import date
import re, json, base64

import boto
import boto.cloudformation
from boto.s3.key import Key
from common import request_input, yes_no_to_bool

# Walk user through choosing layers (and versions of layers)
# Allow them to choose to create a deployment from another deployment or to choose all latest or choose individual versions

name = request_input('What is the name of this deployment?', date.today().strftime('vector-tiles-%Y-%m-%d'))
method = request_input('Create new deployment from previous deployment (p), arbitrary layers (l) or with all latest layers (a)?', 'p')

print('Connecting to and analysing S3 bucket')

# Get all layers currently on the server
conn = boto.connect_s3()
bucket = conn.get_bucket('vector-tile-server')
keys = [re.search(r'config/(.*)-v(\d*).json$', key.key).groups() for key in bucket.list(prefix='config/')]

# Dictionary of all versions of each layer
all_versions = {}
for key, version in keys:
    all_versions[key] = all_versions.get(key, []) + [int(version)]

# Layers to use this deployment
def process_shapefile(shapefile):
    layername = os.path.basename(shapefile)[:-4] # Chop off .shp
    attribute_list = list_props(shapefile, layername)
    print('Attributes in shapefile: {}'.format(', '.join(attribute_list)))
    generate_tiles_to = int(request_input('What zoom level should tiles be generated to?', 12))
    has_fid = yes_no_to_bool(request_input('Is there an FID attribute?', 'n'), False)
    if has_fid:
        fid_attribute = request_input('Which attribute should be used as an FID?', 'FID')
    server_url = request_input('Where is the vector tile server hosted?', 'http://staging.vector-tiles.terria.io/{}/{{z}}/{{x}}/{{y}}.pbf'.format(layername))
    description = request_input('What is the description of this region map?','')
    regionMapping_entries = OrderedDict()
    regionMapping_entry_name = request_input('Name another regionMapping.json entry (leave blank to finish)', '')
    while regionMapping_entry_name != '':
        o = OrderedDict()
        o['layerName'] = '' # Set in addRegionMap.js
        o['server'] = ''
        o['regionProp'] = request_input('Which attribute should be used as the region property?', '')
        # Test this property
        if not unique_with_prop(shapefile, layername, o['regionProp']):
            o['disambigProp'] = request_input('The given region property does not uniquely define every region. Which attribute should be used to disambiguate region matching?','')
            o['disambigRegionId'] = request_input('Which regionMapping definition does this disambiguation property come from?', '')
        else:
            print('The given region property uniquely defines each region.')
        o['aliases'] = request_input('What aliases should this be available under? Separate aliases with a comma and space', '').split(', ')
        o['description'] = description
        
        regionMapping_entries[regionMapping_entry_name] = o
        regionMapping_entry_name = request_input('Name another regionMapping.json entry (leave blank to finish)', '')
    cf = os.path.join(temp_dir, '{}.json'.format(layername))
    with open(cf, 'w') as f:
        json.dump({
            'layerName': layername,
            'shapefile': os.path.join('..', shapefile),
            'generateTilesTo': generate_tiles_to,
            'addFID': not has_fid,
            'uniqueIDProp': fid_attribute if has_fid else 'FID',
            'server': server_url,
            'serverSubdomains': [],
            'regionMappingEntries': regionMapping_entries
        }, f)
    print('Generating tiles and config for {}'.format(layername))
    return layername, subprocess.Popen(['node', 'addRegionMap.js', cf])
    print('Generating tiles and config for {}'.format(layername))
    return layername, subprocess.Popen(['node', 'addRegionMap.js', cf])


if __name__ == '__main__':
    # Create folders if they don't exist
    try:
        for directory in ['temp', temp_dir[:-1], 'data', 'config', 'epsg4326_shapefiles', 'output_files']:
            os.makedir(directory_name)
    except OSError as exc: 
        if exc.errno == errno.EEXIST and os.path.isdir(path):
            pass
        else:
            raise
    print('Ensure you have the following directories locally: temp, python_temp, data, config, output_files')
    shapefiles = sys.argv[1:] or request_input('Which shapefiles do you want to add? Seperate shapefiles with a comma and space', '').split(', ')
    procs = [process_shapefile(shp) for shp in shapefiles]
    for layer, proc in procs:
        proc.wait()
        print('Tile and config generation finished for {}'.format(layer))
    if any(proc.returncode != 0 for _, proc in procs):
        print('Processing of at least 1 shapefile failed')
    else:
        # Local processing done
        # Now send to s3
        conn = boto.connect_s3()
        bucket = conn.get_bucket('vector-tile-server')
        for layer, _ in procs:
            # Get the highest version of this layer and add 1
            maxversion = max([int(re.search(r'v(\d*).json$', key.key).group(1)) for key in bucket.list(prefix='config/{}'.format(layer))] + [0]) # Default to 0
            print('Uploading {}-v{} to S3'.format(layer, maxversion+1))