Ejemplo n.º 1
0
def select_upload_data(dataset_to_update, dataframe):

    try:
        # initialize an empty list to hold the features that we have already uploaded to mapbox
        existing = []

        # initialize API session
        datasets = Datasets()
        os.environ['MAPBOX_ACCESS_TOKEN'] = 'sk.eyJ1IjoiZmlyZS1yaXNrLXNmIiwiYSI6ImNqZGw1dTlwdDA1aXMzM3FrbDZpZnpmczMifQ.JbUPsvx8384WHOAfA9Vy9w'

        # get features of the mapbox dataset by calling the API
        features_collection = datasets.list_features(dataset_to_update).json()

        # loop through the features of the mapbox dataset and save them to the list "existing"
        for i in range(0,len(features_collection['features'])):
            existing.append([
                features_collection['features'][i]["id"],
                features_collection['features'][i]["properties"]["name"],
            ])

        # convert "existing" to a dataframe
        existing = pd.DataFrame(existing,columns=["id","address"])
        
        print("succesfully accessed the existing mapbox dataset")
        
    except:
        
        print("Error: could not access the mapbox dataset. Check that you are accessing your token correctly")

    try:
        # get rows in dataframe that are not yet uploaded
        new_rows = pd.merge(left = dataframe,
                            right = existing,
                            on = "address",
                            how = "left")
        new_rows = new_rows[new_rows.id.isnull() == True]
        new_rows = new_rows.set_index(np.arange(0,len(new_rows)))

        # create a "start point" for new row ids
        # note: this was not a very smart way to creat ids, but I'm stuck with it for now
        start = max(existing["id"].astype("int64"))

        # create new random ids for these new uploads
        new_ids = random.sample(range(start,start*5),len(new_rows))
        
        print("succesfully selected new upload data")
    
    except:
        
        print("there was a problem uploading the data")
        
    # make sure these new random ids are not already in the dataset
    
    try:
        [new_id not in existing["id"] for new_id in new_ids]
    except:
        "Error! Duplicate ids created"
        
    # return the new rows and new ids to upload
    return new_rows, new_ids
Ejemplo n.º 2
0
def check_upload_result(upload, dataset_to_update):
    try:
        # check that the dataset upload was successful
        datasets = Datasets()
        os.environ['MAPBOX_ACCESS_TOKEN'] = 'sk.eyJ1IjoiZmlyZS1yaXNrLXNmIiwiYSI6ImNqZGw1dTlwdDA1aXMzM3FrbDZpZnpmczMifQ.JbUPsvx8384WHOAfA9Vy9w'
        resp = datasets.list_features(dataset_to_update).json()
        assert len(resp['features']) == len(upload), "Only " + str(len(resp['features'])) + " features are included in the dataset"    
    except:
        
        print("Error: unable to check the number of uploaded records. To manually check the execution: Mapbox > Studio > Datasets > Menu > View details")
Ejemplo n.º 3
0
import os
os.environ[
    'MAPBOX_ACCESS_TOKEN'] = "pk.eyJ1IjoidmliaHUxOTk0NiIsImEiOiJjam01MjB1YjkwMDZrM3BsMHI4d21reXduIn0.j1yPdjaTrJ1i4Uc_dcT62g"
import pandas as pd
import numpy as np
from mapbox import Datasets
import matplotlib.pyplot as plt
import json
import sys
import geopandas as gpd
import descartes
from pandas.io.json import json_normalize
import pyvisgraph as vg

datasets = Datasets()
col = datasets.list_features("ck5gcnyor0rcw2nmdwaeaiko9").json()
#attrs = datasets.read_dataset("ck6ub6amu0h6g2nlhtl42fguy").json()
df = pd.read_json(json.dumps(col['features']))
#df.head()

#nycc = json_normalize(df['properties'])
nyc = json_normalize(df['geometry'])
#len(nycc.index) #== len(nyc.index)
#nyc.head()

pp = []
for i in nyc.index:
    for ff in nyc['coordinates'][i]:
        pp.append(Polygon(ff))

f = pd.DataFrame(pp)
if len(sys.argv) != 3:
    print(
        'Please supply a mode <create|publish> and tile set <plaetze|strassen|viertel|denkmaeler|gebaeude>.'
    )
    sys.exit(1)

mode = sys.argv[1]
tileset = sys.argv[2]
url = 'https://api.mapbox.com/tilesets/v1/'
access_string = '?access_token=' + os.getenv("tileset_token")

if 'source' in mode:
    dataset_id = os.getenv(tileset + '_dataset')
    datasets = Datasets(access_token=os.getenv("dataset_token"))
    collection = datasets.list_features(dataset_id).json()
    features = collection['features']
    ld_features = '\n'.join(json.dumps(feature) for feature in features)
    files = {'file': (tileset + '.geojson', ld_features)}
    sources_url = url + 'sources/strassenlaerm/' + tileset + access_string
    if mode == 'create_source':
        r = requests.post(sources_url, files=files)
    elif mode == 'replace_source':
        r = requests.put(sources_url, files=files)
    else:
        print('Please use a valid mode.')
        sys.exit(1)
elif mode == 'create_tileset':
    with open('recipe-' + tileset + '.json', 'r') as recipe_file:
        r = requests.post(url + 'strassenlaerm.' + tileset + access_string,
                          json=json.loads(recipe_file.read()))
Ejemplo n.º 5
0
def get_features(dataset_id):
    ''' Takes the ID of a dataset and returns a dict with all
        the features in the dataset
    '''
    datasets = Datasets()
    return datasets.list_features(dataset_id).json()
#!/usr/bin/env python3
# -*- coding: utf-8 -*-

import sys
import os
import json
from mapbox import Datasets
from dotenv import load_dotenv
load_dotenv()

datasets = Datasets(access_token=os.getenv("dataset_token"))
streets_dataset = os.getenv("strassen_dataset")
squares_dataset = os.getenv("plaetze_dataset")
existing_streets = datasets.list_features(streets_dataset).json()['features']
existing_squares = datasets.list_features(squares_dataset).json()['features']
existing_objects = existing_streets + existing_squares
# print(existing_objects)


def generate_geometry(street_square, parts, msg):
    def reverse_part(part_to_reverse):
        part_to_reverse['properties']['endet_bei_'], part_to_reverse['properties']['beginnt_be'] = \
            part_to_reverse['properties']['beginnt_be'], part_to_reverse['properties']['endet_bei_']
        part_to_reverse['geometry']['coordinates'][0].reverse()

    if street_square['properties']['type'] == 'street':
        # print(street_square['properties']['name'])
        parts_multi = []
        # go through all parts belonging to an object to simplify geometry
        # for p in parts:
        #     print(p['properties']['beginnt_be'] + '  ->  ' + p['properties']['endet_bei_'] + ': ' + str(p['geometry']['coordinates']))