def mapbox_js(): # 0. Parse url get data keywordInput = request.args.get('keyword') # 1. Fetch tweets from twitter api # Authenticate using config.py and connect to Twitter Streaming API. hash_tag_list = [keywordInput] fetched_tweets_filename = "tweets.json" twitter_streamer = TwitterStreamer() twitter_streamer.stream_tweets(fetched_tweets_filename, hash_tag_list) #formatter formatForML('tweets.json', 'formattedData.json') # 2. Request to azure - positive or negative analysis calculate_sentiment('formattedData.json', 'predicted.json') geoConverter('predicted.json', 'myoutput.geojson') # 4. Send data file to mapbox api service = Uploader() from time import sleep from random import randint mapid = getfixture('myoutput.geojson') # 'uploads-test' with open('myoutput.geojson', 'rb') as src: upload_resp = service.upload(src, mapid) if (upload_resp.status_code): print(upload_resp.status_code) return render_template('map.html', ACCESS_KEY=MAPBOX_ACCESS_TOKEN)
def mapboxUpload(filename): """ See: https://mapbox-mapbox.readthedocs-hosted.com/en/latest/uploads.html#uploads """ #raise NotImplementedError("MapboxUpload is not implemented yet") service = Uploader() service.session.params['access_token'] = mapbox_access_token mapid = 'uploads-test' # 'uploads-test' with open(filename, 'rb') as src: upload_resp = service.upload(src, mapid) if upload_resp.status_code == 422: for i in range(5): sleep(5) with open(filename, 'rb') as src: upload_resp = service.upload(src, mapid) if upload_resp.status_code != 422: break upload_id = upload_resp.json()['id'] for i in range(5): status_resp = service.status(upload_id).json() if status_resp['complete']: print(status_resp) print("Finished uploading tileset " + mapid) break sleep(5)
def upload_to_mapbox(username, datasetname, geojson): if type(geojson) == dict: geojson = json.dumps(geojson) if type(geojson) == str: geojson = BytesIO(geojson.encode()) # see: https://www.mapbox.com/api-documentation/#uploads # Acquisition of credentials, staging of data, and upload # finalization is done by a single method in the Python SDK. service = Uploader() dataset_name = '{}.{}'.format(username, datasetname) resp = service.upload(geojson, dataset_name) return resp
def upload_file( data, name, username='******', token='sk.eyJ1IjoibWF0dXRlaWdsZXNpYXMiLCJhIjoiY2puODA4bW8xMGV1dzNrcGtiOGp6NXQ5aCJ9.DohKmjn_o6MK1Y4Q5FG8ew' ): # Dump into file for upload with open('./upload_data.geojson', 'w') as outfile: json.dump(data, outfile) service = Uploader(access_token=token) with open('./upload_data.geojson', 'rb') as src: # Acquisition of credentials, staging of data, and upload # finalization is done by a single method in the Python SDK. upload_resp = service.upload(src, username + '.' + name)
def upload_shapefile(data, dataset_name): service = Uploader() upload_resp = service.upload(data, dataset_name) if upload_resp.status_code != 201: raise MapboxException( f"Upload failed with status {upload_resp.status_code}") upload_id = upload_resp.json()["id"] # wait on status to change while True: status = service.status(upload_id) if status.status_code != 200: raise MapboxException( f"Status check failed with status {status.status_code}") status = status.json() if status["complete"]: break if status["error"]: raise MapboxException("mapbox error: " + status["error"]) time.sleep(10)
class Mapbox(): datasets = Datasets(access_token=ALL_AT) uploader = Uploader(access_token=ALL_AT) username = '******' @classmethod def datasets_list(cls, response): resp = cls.datasets.list() return HttpResponse(resp.json(), content_type=json_content_type) @classmethod def datasets_insert(cls, response, dataset_id): ## pokud je nějaká změna musí se updated_to_mapbox změnit na false mista = Misto.objects.filter(active=True, uploaded_to_mapbox=False) results = {} for misto in mista: feature = { 'id': str(misto.id), 'type': 'Feature', 'properties': { 'nazev': misto.nazev, 'popis': misto.popis }, 'geometry': { 'type': 'Point', 'coordinates': [misto.y, misto.x] } } resp = cls.datasets.update_feature(dataset_id, str(misto.id), feature).json() results[misto.id] = resp misto.uploaded_to_mapbox = True misto.save() return HttpResponse(dumps(results), content_type=json_content_type) @classmethod def upload_tileset_from_dataset(cls, request, tileset_id, dataset_id): # https://github.com/mapbox/mapbox-sdk-py/issues/152#issuecomment-311708422 # https://www.mapbox.com/api-documentation/?language=Python#create-an-upload uri = "mapbox://datasets/{username}/{dataset_id}".format( username=cls.username, dataset_id=dataset_id) res = cls.uploader.create(uri, tileset_id, name='Msta') return HttpResponse(dumps({ 'tileset_id': tileset_id, 'dataset_id': dataset_id, 'result': res.json() }), content_type=json_content_type)
def upload_shapefile(data, dataset_name): service = Uploader() upload_resp = service.upload(data, dataset_name) if upload_resp.status_code == 201: upload_id = upload_resp.json()["id"] while True: status = service.status(upload_id) if status.status_code != 200: break print(status.status_code) status = status.json() if status["complete"]: break if status["error"]: print("mapbox error:", status["error"]) break else: print(status) time.sleep(10) else: print("Upload failed with status", upload_resp.status_code)
def upload_as_tileset(self, dataset_filename, map_id=None): map_id = uuid.uuid1().hex if not map_id else map_id service = Uploader() with open(os.path.join(DATASETS_PATH, dataset_filename)) as src: upload_resp = service.upload(src, map_id) if upload_resp.status_code == 201: upload_id = upload_resp.json()['id'] for i in xrange(0, 15): status_resp = service.status(upload_id).json() if 'complete' == status_resp['complete']: logger.info("Tileset completed for dataset %s", dataset_filename) break else: logger.info("Waiting for upload to complete") sleep(5) logger.info( "Upload did not complete in the last 75 seconds. Check dashboard." )
def update_mapbox_tileset(dataset, lang): print('Uploading {} version to Mapbox'.format(lang.upper())) MAPBOX_API_KEY = secrets['MAPBOX_API_KEY'] TILESET_ID = secrets['TILESET_ID_' + lang.upper()] def init_upload(): with open('src/hexes_crime_{}.geojson'.format(lang), 'rb') as src: upload_resp = uploader.upload(src, TILESET_ID) return upload_resp # Write geodataframe to JSON file to be read into Mapbox with open('src/hexes_crime_{}.geojson'.format(lang), 'w', encoding='utf-8') as out: out.write(dataset.to_json()) uploader = Uploader(MAPBOX_API_KEY) upload_resp = init_upload() # Keep trying if upload doesn't succeed if upload_resp.status_code == 422: print('Update unsuccessful. Retrying...') while True: time.sleep(5) upload_resp = init_upload() if upload_resp.status_code != 422: break # If upload successful, check on status if upload_resp.status_code == 201: upload_id = upload_resp.json()['id'] while True: status_resp = uploader.status(upload_id).json() if status_resp['complete']: print('Mapbox tileset update successful.') break time.sleep(5) else: raise Exception('Unable to connect to Mapbox.')
def handle(self, *args, **options): try: # Try to load the tileset with tileset information from Django database. print("Trying to load the tileset from Django database...") # Retrieving the first record. mapbox_tileset = MapBoxDataset.objects.first() tileset_id = mapbox_tileset.tileset_id tileset_name = mapbox_tileset.name print("Found tileset:", tileset_id) except IndexError: # If there is no such tileset, we are going to create one. print( "Could not find a tileset. Please create a tileset in Django database before this process." ) tileset_id = None # Stopping if no tileset is found. if not tileset_id: pass # Initializing uploader service. service = Uploader(access_token=settings.MAPBOX_TOKEN) # Retrieving schools list. schools = School.objects.all() # Initializing json response. allfeatures = {"features": [], "type": "FeatureCollection"} # Loading all school information to the json response. for school in schools: if school.lat and school.long: if school.lat != 0 or school.long != 0: feature = { 'type': 'Feature', 'geometry': { 'type': "Point", 'coordinates': [float(school.long), float(school.lat)] }, 'properties': { 'name': school.name, 'id': school.id } } allfeatures["features"].append(feature) file_storage = get_storage_class()() with tempfile.TemporaryDirectory() as tempdir: fname = os.path.join(tempdir, 'schools.geojson') with open(fname, 'w') as f: f.write(json.dumps(allfeatures)) with open(fname, 'rb') as f: upload_resp = service.upload(f, tileset_id, name=tileset_name) if upload_resp.status_code == 201: print("Upload successful.") else: print("Upload failed with code:", upload_resp.status_code)
def refresh(): uploader = Uploader() uri = 'mapbox://datasets/timothycrosley/{}'.format(MESSES_DATASET) return uploader.create(uri, 'messes').json()
def GOES17_Tile_Creation(self): # Open the file try: C = self.data except: print("Can't open file:", self.fileName) return None delete_old_tiles = True # Scan's start time, converted to datetime object scan_start = datetime.strptime(C.time_coverage_start, '%Y-%m-%dT%H:%M:%S.%fZ') scan_start = scan_start.strftime("%Y%m%d_%H%M") # scan_mid = int(np.ma.round(C.variables['t'][0], decimals=0)) # DATE = datetime(2000, 1, 1, 12) + timedelta(seconds=scan_mid) # Satellite height sat_h = C['goes_imager_projection'].perspective_point_height # Satellite longitude sat_lon = C['goes_imager_projection'].longitude_of_projection_origin # Satellite sweep sat_sweep = C['goes_imager_projection'].sweep_angle_axis major_ax = C['goes_imager_projection'].semi_major_axis minor_ax = C['goes_imager_projection'].semi_minor_axis # The projection x and y coordinates equals the scanning angle (in radians) multiplied by # the satellite height (https://proj.org/operations/projections/geos.html) x = C['x'].values * sat_h y = C['y'].values * sat_h # map object with pyproj p = Proj(proj='geos', h=sat_h, lon_0=sat_lon, sweep=sat_sweep) # to latitude and longitude values. XX, YY = np.meshgrid(x, y) lons, lats = p(XX, YY, inverse=True) nx = C['x'].size ny = C['y'].size xmin, ymin, xmax, ymax = [x.min(), y.min(), x.max(), y.max()] xres = (xmax - xmin) / float(nx) yres = (ymax - ymin) / float(ny) # The satellite seems to be off in the x-dir by 4 pixels and 2 pixels in the y-dir #x_adjustment_factor = xres * 4 #y_adjustment_factor = yres * 2 x_adjustment_factor = xres * 0 y_adjustment_factor = yres * 0 geotransform = ((xmin + x_adjustment_factor), xres, 0, (ymax - y_adjustment_factor), 0, -yres) R_band = self.R_band * 255 G_band = self.G_band * 255 B_band = self.B_band * 255 # Get the path name of the directory for our GEOTIF and MBTILES dir_path = os.path.dirname(os.path.realpath(__file__)) print("Your Files Are Saving Here: " + dir_path) # The file created here will be overwritten every time. The geotiff is fed into rio mbtiles. dst_ds = gdal.GetDriverByName('GTiff').Create(os.path.join(dir_path,'GOES17.tif'), nx, ny, 3, gdal.GDT_Byte) #dst_ds = gdal.Open('GOES17.tif', nx, ny, 3, gdal.GDT_Byte) # FROM: https://github.com/lanceberc/GOES/blob/master/GOES_GDAL.ipynb # The important part of this is to use the +over in the projection. It accounts for the fact that # the image crosses -180 W goes17_proj = f"+proj=geos -ellps=GRS80 +f=.00335281068119356027 +sweep=x +no_defs +lon_0={sat_lon} " \ f"+h={sat_h} +x_0=0 +y_0=0 +a={major_ax} +b={minor_ax} +units=m +over +lon_wrap=-180" # HUGE HELP! FROM: https://github.com/lanceberc/GOES/blob/master/GOES_GDAL.ipynb # The EPSG definition of Mercator doesn't allow longitudes that extend past -180 or 180 which makes # working in the Pacific difficult. Define our own centralized on the anti-meridian to make working # with GOES-17 (which crosses the anti-meridian) continuous. #proj_anti_mercator = "+proj=merc +k=1 +x_0=0 +y_0=0 +datum=WGS84 +units=m +no_defs +over +lon_0=-180" proj_anti_mercator = "+proj=merc +k=1 +x_0=0 +y_0=0 +datum=WGS84 +units=m +no_defs +over +lon_0=0 +lon_wrap=-180" warpOptions = gdal.WarpOptions( format="GTiff", width=nx, height=ny, outputBoundsSRS="EPSG:4326", # WGS84 - Allows use of lat/lon outputBounds outputBounds=[-161.0, 15.0, -103.0, 50.0], # lat/long of ll, ur corners dstSRS="EPSG:4326", # GOES-17 full-disk crosses the anti-meridian warpOptions=["SOURCE_EXTRA=500"], # Magic from The Internet for off-earth pixels multithread=True, # Not sure if this buys anything on my setup ) dst_ds.SetGeoTransform(geotransform) # specify coords # srs.ImportFromEPSG(3857) # WGS84 x/y # srs.ImportFromEPSG(4326) # WGS84 lat/long dst_ds.GetRasterBand(1).WriteArray(R_band) # write r-band to the raster dst_ds.GetRasterBand(2).WriteArray(G_band) # write g-band to the raster dst_ds.GetRasterBand(3).WriteArray(B_band) # write b-band to the raster srs = osr.SpatialReference() # establish encoding srs.ImportFromProj4(goes17_proj) # Get the projection of the GOES-17. dst_ds.SetProjection(srs.ExportToWkt()) # set the projection of our geotiff to match the GOES-17 gdal.Warp(os.path.join(dir_path,'GOES17_warped.tif'), dst_ds, options=warpOptions) dst_ds.FlushCache() # write to disk dst_ds = None # clear data # rio is installed by conda by installing "rasterio". You still need to >pip install rio-mbtiles and automatically installs the exe files for command lines. Need to tell python # where the path to rio exists. Using $> which rio incase the install directory changes. # rio_path = check_output('which rio') rio_path = "/opt/anaconda3/envs/django_python/bin/rio" if "Linux" in platform.platform(terse=True): rio_path = "/var/www/venv/django_python/bin/rio" # The path of our file created by the RGB bands above. input_path = os.path.join(dir_path, "GOES17_warped.tif") tile_source = f'GOES17_{scan_start}.mbtiles' tile_id = f'GOES17_{scan_start}' # If user wants raster tiles, then use rio to create the mbtiles. if self.mbtileFormat != 'polygon': call([rio_path, 'mbtiles', '--format', 'PNG', '--overwrite', '--zoom-levels', f'1..{self.tileMaxZoom}', input_path, tile_source]) # If the user wants vector tiles, then use rasterio and fiona to create the geojson, then use # tippecanoe to create the vector tiles from the geojson else: polygonize(input_path) json_source = os.path.join(dir_path, "GOES17_Poly.json") call(['tippecanoe', '--force', '-l', 'goes17_fire', '--coalesce', '-P', '-D9', '-z6', '--grid-low-zooms', '-o', tile_source, json_source]) uploader = Uploader(access_token=self.mb_access_token) tile_list = requests.get(f'https://api.mapbox.com/tilesets/v1/smotley?access_token={self.mb_access_token}') tile_list = json.loads(tile_list.text) if delete_old_tiles: delete_tilesets(tile_list, self.mb_access_token) with open(tile_source, 'rb') as src: # test = requests.delete(f'https://api.mapbox.com/tilesets/v1/smotley.GOES17_TC_Fire?access_token={mb_access_token}') print("UPLOADING TILE " + tile_source) upload_resp = uploader.upload(src, tile_id) if upload_resp.status_code == 201: print("DONE UPLOADING TILE " +tile_id) # Dict containing RGB colorTuple that combines the FireTemp and TrueColor else: print("UPLOAD FAILED") os.remove(tile_source) # Delete .mbtiles file return
from osgeo.gdalnumeric import * from osgeo.gdalconst import * import os, sys from datetime import datetime, timedelta import pytz import netCDF4 as nc from subprocess import call from mapbox import Uploader import uuid import botocore import boto3 import re gdaladdoFile = 'C:/Users/smotley/AppData/Local/Continuum/anaconda3/Library/bin/gdaladdo.exe' mb_access_token = os.environ['MAPBOX_TOKEN'] uploader = Uploader(access_token=mb_access_token) def main(): hours_of_data = 1 last_file = False #Do you want just the latest file, or all files since bbox = [-134.12207, 52.628703, -60.9284436, 21.146938] # namNest Bounds gribData(bbox, last_file, hours_of_data) def gribData(bbox, grab_last_file, hours_of_data): dir_path = os.path.dirname(os.path.realpath(__file__)) sat_dir = os.path.join(dir_path, 'satellite') #product = 'ABI-L1b-RadF' #Full Disk product = 'ABI-L1b-RadC' #CONUS band_num = 2 #ABI Band 1 can effectively be thought of as the blue channel of visible light seen by the satellite. Used on its own it can be effective in identifying aerosols (dust, haze, smoke) suspended in the atmosphere, in contrast to clouds and ground features. These can be identified by locating areas of moderate to low brightness with a milky or featureless appearance on ABI band 1 versus the 'Red' band (ABI 2) where these features effectively disappear. This is because of the difference in scattering of red light verses blue light.
import os import sys from mapbox import Uploader USER = '******' if __name__ == '__main__': path = sys.argv[1] mosaic_id = sys.argv[2] date = sys.argv[3] basepath, fname = os.path.split(path) u = Uploader() tileset = '{}.{}-{}'.format(USER, mosaic_id, date) url = u.stage(open(path)) job = u.create(url, tileset, name=fname).json() print(job)
import os from time import sleep from mapbox import Uploader service = Uploader() dir = os.path.dirname(__file__) tilesets = [ { 'mapid': 'habitat-areas-2017', 'path': '/app/tiles/habitat-areas-2017.mbtiles' }, { 'mapid': 'landscape_regions-2017', 'path': '/app/tiles/landscape_regions-2017.mbtiles' }, ] for tileset in tilesets: with open(tileset['path'], 'rb') as src: print('Uploading:') upload_resp = service.upload(src, tileset['mapid']) tileset['upload_id'] = upload_resp.json()['id'] print(upload_resp.json()) def uploads_complete(tileset_list): statuses = [] for tileset in tileset_list: statuses.append(
from mapbox import Uploader from time import sleep from random import randint import os import sys filename = sys.argv[-1] service = Uploader() service.session.params['access_token'] == os.environ['MAPBOX_ACCESS_TOKEN'] basename = os.path.basename(filename) mapid = basename[:32] with open(filename, 'rb') as src: upload_resp = service.upload(src, mapid) if upload_resp.status_code == 422: for i in range(5): sleep(5) with open(filename, 'rb') as src: upload_resp = service.upload(src, mapid) if upload_resp.status_code != 422: break
#!/usr/bin/env python # python script to upload geojson for ZWEDC into mapbox, where it would convert them as tileset # tin 2019.01.16 # https://github.com/mapbox/mapbox-sdk-py/blob/master/docs/uploads.md # need mapbox python api, from parent dir of above link. # https://www.mapbox.com/api-documentation/maps/#uploads # need to have mapbox secret token to generate temp aws key for stage file in S3, see area51 # pip install mapbox # maybe do it inside virtual env? or just have it in bofh as root... less hassle :) from mapbox import Uploader service = Uploader() from time import sleep from random import randint mapid = getfixture('uploads_dest_id') # 'uploads-test' # not sure what that getfixture call is # and found the cli sdk is way way simpler, will use that instead. # https://github.com/mapbox/mapbox-cli-py#upload
#!./venv/bin/python from io import BytesIO from configparser import ConfigParser from mapbox import Uploader from geojson import Point, Feature, FeatureCollection, dumps import yaml config = ConfigParser() config.read('mapbox.ini') sites = yaml.load(open('sites.yaml', encoding='utf-8').read()) points = FeatureCollection([ Feature(geometry=Point((float(s["longitude"]), float(s["latitude"]))), properties=dict(campaign=c['campaign'], location="%i) %s" % (i + 1, s['location']), idx=str(i + 1), date=s['date'], text=s['text'])) for c in sites for i, s in enumerate(c['sites']) ]) service = Uploader(access_token=config['mapbox']['access-token']) upl = service.upload(BytesIO(dumps(points).encode('utf-8')), 'nzherald.new-zealand-wars-sites-v15', name="New Zealand Wars Sites v15") print(upl.json())
# from mapbox import Geocoder # >>> geocoder = Geocoder(access_token="pk.YOUR_ACCESS_TOKEN") from mapbox import Uploader service = Uploader() from time import sleep from random import randint mapid = getfixture('uploads_dest_id') # 'uploads-test' with open('tests/twopoints.geojson', 'rb') as src: upload_resp = service.upload(src, mapid) if upload_resp.status_code == 422: for i in range(5): sleep(5) with open('tests/twopoints.geojson', 'rb') as src: upload_resp = service.upload(src, mapid) if upload_resp.status_code != 422: break upload_resp.status_code upload_id = upload_resp.json()['id'] for i in range(5): status_resp = service.status(upload_id).json() if status_resp['complete']: break sleep(5) mapid in status_resp['tileset']
try: k = Key(mybucket) k.key = data['key'] os.chdir(data_dir) k.set_contents_from_filename(act_out_file_name + '.zip') k.set_contents_from_filename(seg_out_file_name + '.zip') logging.info('successfully staged zip files on AWS S3 bucket') except Exception as e: logging.exception('error uploading files to S3 staging bucket') pass os.chdir(data_dir) from mapbox import Uploader service = Uploader(access_token=token) try: with open(seg_out_file_name + '.zip', 'r') as src: upload_resp_seg = service.upload(src, 'ADV_all_segments') with open(act_out_file_name + '.zip', 'r') as src: upload_resp_act = service.upload(src, 'ADV_all_activities') upload_id_act = upload_resp_act.json()['id'] upload_id_seg = upload_resp_seg.json()['id'] status_resp_act = service.status(upload_id_act).json() status_resp_seg = service.status(upload_id_seg).json() logging.info('successfully uploaded files to Mapbox account') except Exception as e:
#Example python process_images.py my_mbx_username input/base.tfw input output from glob import glob import os import sys from mapbox import Uploader username = sys.argv[1] world = sys.argv[2] inDir = sys.argv[3] outDir = sys.argv[4] service = Uploader() tifs = '%s/*.tif' % (inDir) for absPath in glob(tifs): fullfile = os.path.basename(absPath) filename = os.path.splitext(fullfile)[0] worldCom = 'cp %s %s/%s.tfw' % (world, inDir, filename) print worldCom os.system(worldCom) outCom = 'gdal_translate -co tiled=yes -co compress=LZW -co BLOCKXSIZE=256 -co BLOCKYSIZE=256 -of Gtiff -a_srs EPSG:3857 -a_nodata 0 %s %s/%s' % ( absPath, outDir, fullfile) print outCom os.system(outCom) with open('%s/%s' % (outDir, fullfile), 'r') as src: # Acquisition of credentials, staging of data, and upload # finalization is done by a single method in the Python SDK. tilesetName = '%s.%s' % (username, filename) print 'uploading %s, %s' % (tilesetName, fullfile)
'go': go, } }) airtable.process_records( table='Restaurants', params={ # "fields": ["Restaurant", "Status", "Longitude", "Latitude" "[TA] ID", "[TA] Reviews", "[TA] Rating", ], # "filterByFormula": "{Today's Trip}", # "pageSize": 100, }, operation=add_feature, ) FeatureCollection = { 'type': 'FeatureCollection', 'features': features, } filename = 'features.json' with open(filename, 'w', encoding='utf-8') as file: json.dump(FeatureCollection, file, ensure_ascii=False, indent=4) from mapbox import Uploader from settings.secret import MAPBOX uploader = Uploader(access_token=MAPBOX['SECRET_KEY']) with open(filename, 'rb') as file: upload_response = uploader.upload(file, 'test-kadoresto') print(upload_response.json())
for prop in properties: feature['properties'][prop] = row[prop] # add this feature (aka, converted dataframe row) to the list of features inside our dict geojson['features'].append(feature) return geojson from mapbox import Uploader import json username = '******' token = 'sk.eyJ1IjoibWF0dXRlaWdsZXNpYXMiLCJhIjoiY2puODA4bW8xMGV1dzNrcGtiOGp6NXQ5aCJ9.DohKmjn_o6MK1Y4Q5FG8ew' service = Uploader(access_token=token) def upload_file( data, name, username='******', token='sk.eyJ1IjoibWF0dXRlaWdsZXNpYXMiLCJhIjoiY2puODA4bW8xMGV1dzNrcGtiOGp6NXQ5aCJ9.DohKmjn_o6MK1Y4Q5FG8ew' ): # Dump into file for upload with open('./upload_data.geojson', 'w') as outfile: json.dump(data, outfile) service = Uploader(access_token=token) with open('./upload_data.geojson', 'rb') as src: # Acquisition of credentials, staging of data, and upload