Esempio n. 1
0
def process_shapefile(shapefile):
    layername = os.path.basename(shapefile)[:-4] # Chop off .shp
    attribute_list = list_props(shapefile, layername)
    print('Attributes in shapefile: {}'.format(', '.join(attribute_list)))
    generate_tiles_to = int(request_input('What zoom level should tiles be generated to?', 12))
    has_fid = yes_no_to_bool(request_input('Is there an FID attribute?', 'n'), False)
    if has_fid:
        fid_attribute = request_input('Which attribute should be used as an FID?', 'FID')
    server_url = request_input('Where is the vector tile server hosted?', 'http://staging.vector-tiles.terria.io/{}/{{z}}/{{x}}/{{y}}.pbf'.format(layername))
    description = request_input('What is the description of this region map?','')
    regionMapping_entries = OrderedDict()
    regionMapping_entry_name = request_input('Name another regionMapping.json entry (leave blank to finish)', '')
    while regionMapping_entry_name != '':
        o = OrderedDict()
        o['layerName'] = '' # Set in addRegionMap.js
        o['server'] = ''
        o['regionProp'] = request_input('Which attribute should be used as the region property?', '')
        # Test this property
        if not unique_with_prop(shapefile, layername, o['regionProp']):
            o['disambigProp'] = request_input('The given region property does not uniquely define every region. Which attribute should be used to disambiguate region matching?','')
            o['disambigRegionId'] = request_input('Which regionMapping definition does this disambiguation property come from?', '')
        else:
            print('The given region property uniquely defines each region.')
        o['aliases'] = request_input('What aliases should this be available under? Separate aliases with a comma and space', '').split(', ')
        o['description'] = description
        
        regionMapping_entries[regionMapping_entry_name] = o
        regionMapping_entry_name = request_input('Name another regionMapping.json entry (leave blank to finish)', '')
    cf = os.path.join(temp_dir, '{}.json'.format(layername))
    with open(cf, 'w') as f:
        json.dump({
            'layerName': layername,
            'shapefile': os.path.join('..', shapefile),
            'generateTilesTo': generate_tiles_to,
            'addFID': not has_fid,
            'uniqueIDProp': fid_attribute if has_fid else 'FID',
            'server': server_url,
            'serverSubdomains': [],
            'regionMappingEntries': regionMapping_entries
        }, f)
    print('Generating tiles and config for {}'.format(layername))
    return layername, subprocess.Popen(['node', 'addRegionMap.js', cf])
Esempio n. 2
0
    remove = [layer.strip() for layer in request_input('Which layers do you want to remove? Separate layers with a comma:', '').split(',') if layer.strip() != '']
    for layer in remove:
        del old_data[layer]
    add = [layer_str.strip().split(':') for layer_str in request_input('Which layers do you want to add/change versions? Format layers like layer_name:version and separate layers with a comma:', '').split(',') if layer_str.strip() != '']
    for layer, version in add:
        old_data[layer] = int(version)
    deployment_data = old_data
elif method == 'l':
    print('The following layers are available:')
    print('\n'.join('{:40}  {:7}'.format(*t) for t in [('Layer name', 'Version')] + list(latest_layers.items())))
    deployment_data = dict([(layer_str.split(':')[0], int(layer_str.split(':')[1])) for layer_str in filter(None, request_input('Which layers do you want to add/change versions? Format layers like layer_name:version and separate layers with a comma and space:', '').split(', '))])

key = 'deployments/{}.json'.format(deployment_name)
obj = s3c.put_object(
    Bucket=bucket.name,
    Key=key,
    Body=json.dumps({"data": deployment_data}).encode('utf-8')
)
if yes_no_to_bool(request_input('Deployment file {} uploaded to S3. Start an EC2 with this deployment configuration?'.format(key), 'y'), False):
    # Retrive user-data and template from S3
    server_versions = sorted([re.search(r'server-(.*).tar.gz$', key.key).group(1) for key in bucket.objects.filter(Prefix='server-')], key=lambda s: [int(n) for n in s.split('.')], reverse=True)
    server_version = request_input('Out of {}, which server version do you want to use?'.format(', '.join(server_versions)), server_versions[0])

    userdata = open('user-data').read().replace('{~STACK NAME~}', deployment_name).replace('{~SERVER VERSION~}', server_version)
    template = open('aws-template.json').read().replace('{~BASE64 USER DATA~}', base64.b64encode(userdata.encode('utf-8')).decode('utf-8'))

    cfn = terria_aws.client('cloudformation', region_name='ap-southeast-2')
    cfn.create_stack(StackName=deployment_name, TemplateBody=template, Capabilities=['CAPABILITY_IAM'])
    print('Stack {} created'.format(deployment_name))

Esempio n. 3
0
def process_shapefile(shapefile):
    layername = request_input(
        'What is the name of this layer?',
        os.path.basename(shapefile)[:-4])  # Chop off .shp
    attribute_list = list_props(shapefile, layername)
    print('Attributes in shapefile: {}'.format(', '.join(attribute_list)))
    generate_tiles_to = int(
        request_input('What zoom level should tiles be generated to?', 12))
    has_fid = yes_no_to_bool(request_input('Is there an FID attribute?', 'n'),
                             False)
    if has_fid:
        fid_attribute = request_input(
            'Which attribute should be used as an FID?', 'FID')
    server_url = request_input(
        'Where is the vector tile server hosted?',
        'http://staging.vector-tiles.terria.io/{}/{{z}}/{{x}}/{{y}}.pbf'.
        format(layername))
    description = request_input('What is the description of this region map?',
                                '')
    regionMapping_entries = OrderedDict()
    regionMapping_entry_name = request_input(
        'Name another regionMapping.json entry (leave blank to finish)', '')
    while regionMapping_entry_name != '':
        o = OrderedDict()
        o['layerName'] = ''  # Set in addRegionMap.js
        o['server'] = ''
        o['regionProp'] = request_input(
            'Which attribute should be used as the region property?', '')
        # Test this property
        if not unique_with_prop(shapefile, layername, o['regionProp']):
            o['disambigProp'] = request_input(
                'The given region property does not uniquely define every region. Which attribute should be used to disambiguate region matching?',
                '')
            o['disambigRegionId'] = request_input(
                'Which regionMapping definition does this disambiguation property come from?',
                '')
        else:
            print('The given region property uniquely defines each region.')
        o['aliases'] = request_input(
            'What aliases should this be available under? Separate aliases with a comma and space',
            '').split(', ')
        o['description'] = description

        regionMapping_entries[regionMapping_entry_name] = o
        regionMapping_entry_name = request_input(
            'Name another regionMapping.json entry (leave blank to finish)',
            '')
    cf = os.path.join(temp_dir, '{}.json'.format(layername))
    with open(cf, 'w') as f:
        json.dump(
            {
                'layerName': layername,
                'shapefile': os.path.join('..', shapefile),
                'generateTilesTo': generate_tiles_to,
                'addFID': not has_fid,
                'uniqueIDProp': fid_attribute if has_fid else 'FID',
                'server': server_url,
                'serverSubdomains': [],
                'regionMappingEntries': regionMapping_entries
            }, f)
    print('Generating tiles and config for {}'.format(layername))
    return layername, subprocess.Popen(['node', 'addRegionMap.js', cf])
Esempio n. 4
0
        print('The following layers are different in the old deployment from the "most current" layers in S3:')
        print('Note: a version of 0 signifies a missing layer either in the old deployment or currently in the S3 bucket')
        print('\n'.join('{:40}  {:5}  {:5}'.format(*t) for t in [('Layer name', 'Old v', 'New v')] + changed_layers))
    remove = request_input('Which layers do you want to remove? Separate layers with a comma and space', '').split(', ')
    for layer in remove:
        del old_data[layer]
    add = [layer_str.split(':') for layer_str in request_input('Which layers do you want to add/change versions? Format layers like layer_name:version and separate layers with a comma and space:', '').split(', ')]
    for layer, version in add:
        old_data[layer] = int(version)
    deployment_data = old_data
elif method == 'l':
    print('The following layers are available:')
    print('\n'.join('{:40}  {:7}'.format(*t) for t in [('Layer name', 'Version')] + list(latest_layers.items())))
    deployment_data = dict([(layer_str.split(':')[0], int(layer_str.split(':')[0])) for layer_str in request_input('Which layers do you want to add/change versions? Format layers like layer_name:version and separate layers with a comma and space:', '').split(', ')])

k = Key(bucket)
k.key = 'deployments/{}.json'.format(name)
k.set_contents_from_string(json.dumps({"data": deployment_data}))
if yes_no_to_bool(request_input('Deployment file {} uploaded to S3. Start an EC2 with this deployment configuration?'.format(k.key), 'n'), False):
    # Retrive user-data and template from S3
    server_versions = sorted([re.search(r'server-(.*).tar.gz$', key.key).group(1) for key in bucket.list(prefix='server-')], key=lambda s: [int(n) for n in s.split('.')], reverse=True)
    server_version = request_input('Out of {}, which server version do you want to use?'.format(', '.join(server_versions)), server_versions[0])

    userdata = open('user-data').read().replace('{~STACK NAME~}', name).replace('{~SERVER VERSION~}', server_version)
    template = open('aws-template.json').read().replace('{~BASE64 USER DATA~}', base64.b64encode(userdata))

    cfn = boto.cloudformation.connect_to_region('ap-southeast-2') # Sydney
    cfn.create_stack(name, template_body=template, capabilities=['CAPABILITY_IAM'])
    print('Stack {} created'.format(name))