def mapboxUpload(filename):
  """
  See: https://mapbox-mapbox.readthedocs-hosted.com/en/latest/uploads.html#uploads
  """

  #raise NotImplementedError("MapboxUpload is not implemented yet")

  service = Uploader()
  service.session.params['access_token'] = mapbox_access_token
  mapid = 'uploads-test' # 'uploads-test'
  with open(filename, 'rb') as src:
    upload_resp = service.upload(src, mapid)

    if upload_resp.status_code == 422:
      for i in range(5):
        sleep(5)
        with open(filename, 'rb') as src:
          upload_resp = service.upload(src, mapid)
        if upload_resp.status_code != 422:
          break
    
    upload_id = upload_resp.json()['id']
    for i in range(5):
      status_resp = service.status(upload_id).json()
      if status_resp['complete']:
        print(status_resp)
        print("Finished uploading tileset " + mapid)
        break
      sleep(5)
Esempio n. 2
0
def mapbox_js():

    # 0. Parse url get data
    keywordInput = request.args.get('keyword')

    # 1. Fetch tweets from twitter api
    # Authenticate using config.py and connect to Twitter Streaming API.
    hash_tag_list = [keywordInput]
    fetched_tweets_filename = "tweets.json"
    twitter_streamer = TwitterStreamer()
    twitter_streamer.stream_tweets(fetched_tweets_filename, hash_tag_list)

    #formatter
    formatForML('tweets.json', 'formattedData.json')

    # 2. Request to azure - positive or negative analysis
    calculate_sentiment('formattedData.json', 'predicted.json')

    geoConverter('predicted.json', 'myoutput.geojson')
    # 4. Send data file to mapbox api
    service = Uploader()
    from time import sleep
    from random import randint
    mapid = getfixture('myoutput.geojson')  # 'uploads-test'
    with open('myoutput.geojson', 'rb') as src:
        upload_resp = service.upload(src, mapid)
        if (upload_resp.status_code):
            print(upload_resp.status_code)

    return render_template('map.html', ACCESS_KEY=MAPBOX_ACCESS_TOKEN)
Esempio n. 3
0
def upload_to_mapbox(username, datasetname, geojson):
    if type(geojson) == dict:
        geojson = json.dumps(geojson)
    if type(geojson) == str:
        geojson = BytesIO(geojson.encode())
    # see: https://www.mapbox.com/api-documentation/#uploads

    # Acquisition of credentials, staging of data, and upload
    # finalization is done by a single method in the Python SDK.
    service = Uploader()
    dataset_name = '{}.{}'.format(username, datasetname)
    resp = service.upload(geojson, dataset_name)

    return resp
Esempio n. 4
0
def upload_file(
    data,
    name,
    username='******',
    token='sk.eyJ1IjoibWF0dXRlaWdsZXNpYXMiLCJhIjoiY2puODA4bW8xMGV1dzNrcGtiOGp6NXQ5aCJ9.DohKmjn_o6MK1Y4Q5FG8ew'
):
    # Dump into file for upload
    with open('./upload_data.geojson', 'w') as outfile:
        json.dump(data, outfile)

    service = Uploader(access_token=token)
    with open('./upload_data.geojson', 'rb') as src:
        # Acquisition of credentials, staging of data, and upload
        # finalization is done by a single method in the Python SDK.
        upload_resp = service.upload(src, username + '.' + name)
Esempio n. 5
0
    def upload_as_tileset(self, dataset_filename, map_id=None):
        map_id = uuid.uuid1().hex if not map_id else map_id
        service = Uploader()
        with open(os.path.join(DATASETS_PATH, dataset_filename)) as src:
            upload_resp = service.upload(src, map_id)

        if upload_resp.status_code == 201:
            upload_id = upload_resp.json()['id']

            for i in xrange(0, 15):
                status_resp = service.status(upload_id).json()
                if 'complete' == status_resp['complete']:
                    logger.info("Tileset completed for dataset %s",
                                dataset_filename)
                    break
                else:
                    logger.info("Waiting for upload to complete")
                    sleep(5)
            logger.info(
                "Upload did not complete in the last 75 seconds. Check dashboard."
            )
Esempio n. 6
0
def upload_shapefile(data, dataset_name):
    service = Uploader()
    upload_resp = service.upload(data, dataset_name)
    if upload_resp.status_code != 201:
        raise MapboxException(
            f"Upload failed with status {upload_resp.status_code}")

    upload_id = upload_resp.json()["id"]

    # wait on status to change
    while True:
        status = service.status(upload_id)
        if status.status_code != 200:
            raise MapboxException(
                f"Status check failed with status {status.status_code}")
        status = status.json()
        if status["complete"]:
            break
        if status["error"]:
            raise MapboxException("mapbox error: " + status["error"])
        time.sleep(10)
Esempio n. 7
0
def upload_shapefile(data, dataset_name):
    service = Uploader()
    upload_resp = service.upload(data, dataset_name)
    if upload_resp.status_code == 201:
        upload_id = upload_resp.json()["id"]
        while True:
            status = service.status(upload_id)
            if status.status_code != 200:
                break
            print(status.status_code)
            status = status.json()
            if status["complete"]:
                break
            if status["error"]:
                print("mapbox error:", status["error"])
                break
            else:
                print(status)
            time.sleep(10)
    else:
        print("Upload failed with status", upload_resp.status_code)
Esempio n. 8
0
                'go': go,
            }
        })


airtable.process_records(
    table='Restaurants',
    params={
        #	"fields": ["Restaurant", "Status", "Longitude", "Latitude" "[TA] ID", "[TA] Reviews", "[TA] Rating", ],
        #	"filterByFormula": "{Today's Trip}",
        #	"pageSize": 100,
    },
    operation=add_feature,
)

FeatureCollection = {
    'type': 'FeatureCollection',
    'features': features,
}

filename = 'features.json'
with open(filename, 'w', encoding='utf-8') as file:
    json.dump(FeatureCollection, file, ensure_ascii=False, indent=4)

from mapbox import Uploader
from settings.secret import MAPBOX

uploader = Uploader(access_token=MAPBOX['SECRET_KEY'])
with open(filename, 'rb') as file:
    upload_response = uploader.upload(file, 'test-kadoresto')
    print(upload_response.json())
        os.chdir(data_dir)
        k.set_contents_from_filename(act_out_file_name + '.zip')
        k.set_contents_from_filename(seg_out_file_name + '.zip')
        logging.info('successfully staged zip files on AWS S3 bucket')
    except Exception as e:
        logging.exception('error uploading files to S3 staging bucket')
        pass

    os.chdir(data_dir)

    from mapbox import Uploader
    service = Uploader(access_token=token)

    try:
        with open(seg_out_file_name + '.zip', 'r') as src:
            upload_resp_seg = service.upload(src, 'ADV_all_segments')
        with open(act_out_file_name + '.zip', 'r') as src:
            upload_resp_act = service.upload(src, 'ADV_all_activities')

        upload_id_act = upload_resp_act.json()['id']
        upload_id_seg = upload_resp_seg.json()['id']

        status_resp_act = service.status(upload_id_act).json()
        status_resp_seg = service.status(upload_id_seg).json()
        logging.info('successfully uploaded files to Mapbox account')

    except Exception as e:
        logging.exception('error uploading Mapbox files using Uploads API to acount')
        pass

    try:
Esempio n. 10
0
# from mapbox import Geocoder
# >>> geocoder = Geocoder(access_token="pk.YOUR_ACCESS_TOKEN")
from mapbox import Uploader
service = Uploader()
from time import sleep
from random import randint
mapid = getfixture('uploads_dest_id')  # 'uploads-test'
with open('tests/twopoints.geojson', 'rb') as src:
    upload_resp = service.upload(src, mapid)

if upload_resp.status_code == 422:
    for i in range(5):
        sleep(5)
        with open('tests/twopoints.geojson', 'rb') as src:
            upload_resp = service.upload(src, mapid)
        if upload_resp.status_code != 422:
            break

upload_resp.status_code

upload_id = upload_resp.json()['id']
for i in range(5):
    status_resp = service.status(upload_id).json()
    if status_resp['complete']:
        break
    sleep(5)

mapid in status_resp['tileset']
Esempio n. 11
0
    def GOES17_Tile_Creation(self):
        # Open the file
        try:
            C = self.data
        except:
            print("Can't open file:", self.fileName)
            return None
        delete_old_tiles = True
        # Scan's start time, converted to datetime object
        scan_start = datetime.strptime(C.time_coverage_start, '%Y-%m-%dT%H:%M:%S.%fZ')
        scan_start = scan_start.strftime("%Y%m%d_%H%M")
        # scan_mid = int(np.ma.round(C.variables['t'][0], decimals=0))
        # DATE = datetime(2000, 1, 1, 12) + timedelta(seconds=scan_mid)
        # Satellite height
        sat_h = C['goes_imager_projection'].perspective_point_height

        # Satellite longitude
        sat_lon = C['goes_imager_projection'].longitude_of_projection_origin

        # Satellite sweep
        sat_sweep = C['goes_imager_projection'].sweep_angle_axis

        major_ax = C['goes_imager_projection'].semi_major_axis
        minor_ax = C['goes_imager_projection'].semi_minor_axis

        # The projection x and y coordinates equals the scanning angle (in radians) multiplied by
        # the satellite height (https://proj.org/operations/projections/geos.html)
        x = C['x'].values * sat_h
        y = C['y'].values * sat_h

        # map object with pyproj
        p = Proj(proj='geos', h=sat_h, lon_0=sat_lon, sweep=sat_sweep)

        # to latitude and longitude values.
        XX, YY = np.meshgrid(x, y)
        lons, lats = p(XX, YY, inverse=True)

        nx = C['x'].size
        ny = C['y'].size

        xmin, ymin, xmax, ymax = [x.min(), y.min(), x.max(), y.max()]
        xres = (xmax - xmin) / float(nx)
        yres = (ymax - ymin) / float(ny)

        # The satellite seems to be off in the x-dir by 4 pixels and 2 pixels in the y-dir
        #x_adjustment_factor = xres * 4
        #y_adjustment_factor = yres * 2
        x_adjustment_factor = xres * 0
        y_adjustment_factor = yres * 0
        geotransform = ((xmin + x_adjustment_factor), xres, 0, (ymax - y_adjustment_factor), 0, -yres)

        R_band = self.R_band * 255
        G_band = self.G_band * 255
        B_band = self.B_band * 255

        # Get the path name of the directory for our GEOTIF and MBTILES
        dir_path = os.path.dirname(os.path.realpath(__file__))
        print("Your Files Are Saving Here: " + dir_path)

        # The file created here will be overwritten every time. The geotiff is fed into rio mbtiles.
        dst_ds = gdal.GetDriverByName('GTiff').Create(os.path.join(dir_path,'GOES17.tif'), nx, ny, 3, gdal.GDT_Byte)
        #dst_ds = gdal.Open('GOES17.tif', nx, ny, 3, gdal.GDT_Byte)

        # FROM: https://github.com/lanceberc/GOES/blob/master/GOES_GDAL.ipynb
        # The important part of this is to use the +over in the projection. It accounts for the fact that
        # the image crosses -180 W
        goes17_proj = f"+proj=geos -ellps=GRS80 +f=.00335281068119356027 +sweep=x +no_defs +lon_0={sat_lon} " \
                      f"+h={sat_h} +x_0=0 +y_0=0 +a={major_ax} +b={minor_ax} +units=m +over +lon_wrap=-180"

        # HUGE HELP! FROM: https://github.com/lanceberc/GOES/blob/master/GOES_GDAL.ipynb
        # The EPSG definition of Mercator doesn't allow longitudes that extend past -180 or 180 which makes
        # working in the Pacific difficult. Define our own centralized on the anti-meridian to make working
        # with GOES-17 (which crosses the anti-meridian) continuous.
        #proj_anti_mercator = "+proj=merc +k=1 +x_0=0 +y_0=0 +datum=WGS84 +units=m +no_defs +over +lon_0=-180"
        proj_anti_mercator = "+proj=merc +k=1 +x_0=0 +y_0=0 +datum=WGS84 +units=m +no_defs +over +lon_0=0 +lon_wrap=-180"

        warpOptions = gdal.WarpOptions(
            format="GTiff",
            width=nx,
            height=ny,
            outputBoundsSRS="EPSG:4326",  # WGS84 - Allows use of lat/lon outputBounds
            outputBounds=[-161.0, 15.0, -103.0, 50.0], # lat/long of ll, ur corners
            dstSRS="EPSG:4326",  # GOES-17 full-disk crosses the anti-meridian
            warpOptions=["SOURCE_EXTRA=500"],  # Magic from The Internet for off-earth pixels
            multithread=True,  # Not sure if this buys anything on my setup
        )

        dst_ds.SetGeoTransform(geotransform)                  # specify coords
        # srs.ImportFromEPSG(3857)                            # WGS84 x/y
        # srs.ImportFromEPSG(4326)                            # WGS84 lat/long
        dst_ds.GetRasterBand(1).WriteArray(R_band)            # write r-band to the raster
        dst_ds.GetRasterBand(2).WriteArray(G_band)            # write g-band to the raster
        dst_ds.GetRasterBand(3).WriteArray(B_band)            # write b-band to the raster

        srs = osr.SpatialReference()                          # establish encoding
        srs.ImportFromProj4(goes17_proj)                      # Get the projection of the GOES-17.

        dst_ds.SetProjection(srs.ExportToWkt())               # set the projection of our geotiff to match the GOES-17
        gdal.Warp(os.path.join(dir_path,'GOES17_warped.tif'), dst_ds, options=warpOptions)
        dst_ds.FlushCache()                                   # write to disk
        dst_ds = None                                         # clear data

        # rio is installed by conda by installing "rasterio". You still need to >pip install rio-mbtiles and automatically installs the exe files for command lines. Need to tell python
        # where the path to rio exists. Using $> which rio incase the install directory changes.
        # rio_path = check_output('which rio')
        rio_path = "/opt/anaconda3/envs/django_python/bin/rio"
        if "Linux" in platform.platform(terse=True):
            rio_path = "/var/www/venv/django_python/bin/rio"

        # The path of our file created by the RGB bands above.
        input_path = os.path.join(dir_path, "GOES17_warped.tif")

        tile_source = f'GOES17_{scan_start}.mbtiles'
        tile_id = f'GOES17_{scan_start}'

        # If user wants raster tiles, then use rio to create the mbtiles.
        if self.mbtileFormat != 'polygon':
            call([rio_path, 'mbtiles', '--format', 'PNG', '--overwrite', '--zoom-levels', f'1..{self.tileMaxZoom}', input_path, tile_source])

        # If the user wants vector tiles, then use rasterio and fiona to create the geojson, then use
        # tippecanoe to create the vector tiles from the geojson
        else:
            polygonize(input_path)
            json_source = os.path.join(dir_path, "GOES17_Poly.json")

            call(['tippecanoe', '--force', '-l', 'goes17_fire', '--coalesce', '-P', '-D9', '-z6', '--grid-low-zooms', '-o', tile_source, json_source])
        uploader = Uploader(access_token=self.mb_access_token)
        tile_list = requests.get(f'https://api.mapbox.com/tilesets/v1/smotley?access_token={self.mb_access_token}')
        tile_list = json.loads(tile_list.text)

        if delete_old_tiles:
            delete_tilesets(tile_list, self.mb_access_token)
        with open(tile_source, 'rb') as src:
            # test = requests.delete(f'https://api.mapbox.com/tilesets/v1/smotley.GOES17_TC_Fire?access_token={mb_access_token}')
            print("UPLOADING TILE " + tile_source)
            upload_resp = uploader.upload(src, tile_id)
            if upload_resp.status_code == 201:
                print("DONE UPLOADING TILE " +tile_id)  # Dict containing RGB colorTuple that combines the FireTemp and TrueColor
            else:
                print("UPLOAD FAILED")
            os.remove(tile_source)                     # Delete .mbtiles file
        return
Esempio n. 12
0
tilesets = [
    {
        'mapid': 'habitat-areas-2017',
        'path': '/app/tiles/habitat-areas-2017.mbtiles'
    },
    {
        'mapid': 'landscape_regions-2017',
        'path': '/app/tiles/landscape_regions-2017.mbtiles'
    },
]

for tileset in tilesets:
    with open(tileset['path'], 'rb') as src:
        print('Uploading:')
        upload_resp = service.upload(src, tileset['mapid'])
        tileset['upload_id'] = upload_resp.json()['id']
        print(upload_resp.json())


def uploads_complete(tileset_list):
    statuses = []
    for tileset in tileset_list:
        statuses.append(
            service.status(tileset['upload_id']).json()['complete'])
    return all(statuses)


print(
    'Processing uploads... Check status at https://www.mapbox.com/studio/tilesets/'
)
Esempio n. 13
0
import os
import sys
from mapbox import Uploader

username = sys.argv[1]
world = sys.argv[2]
inDir = sys.argv[3]
outDir = sys.argv[4]

service = Uploader()

tifs = '%s/*.tif' % (inDir)
for absPath in glob(tifs):
    fullfile = os.path.basename(absPath)
    filename = os.path.splitext(fullfile)[0]
    worldCom = 'cp %s %s/%s.tfw' % (world, inDir, filename)
    print worldCom
    os.system(worldCom)
    outCom = 'gdal_translate -co tiled=yes -co compress=LZW -co BLOCKXSIZE=256 -co BLOCKYSIZE=256 -of Gtiff -a_srs EPSG:3857 -a_nodata 0 %s %s/%s' % (
        absPath, outDir, fullfile)
    print outCom
    os.system(outCom)

    with open('%s/%s' % (outDir, fullfile), 'r') as src:
        # Acquisition of credentials, staging of data, and upload
        # finalization is done by a single method in the Python SDK.
        tilesetName = '%s.%s' % (username, filename)
        print 'uploading %s, %s' % (tilesetName, fullfile)
        uploadResp = service.upload(src, tilesetName)
        print uploadResp
Esempio n. 14
0
    def handle(self, *args, **options):

        try:
            # Try to load the tileset with tileset information from Django database.
            print("Trying to load the tileset from Django database...")

            # Retrieving the first record.
            mapbox_tileset = MapBoxDataset.objects.first()
            tileset_id = mapbox_tileset.tileset_id
            tileset_name = mapbox_tileset.name

            print("Found tileset:", tileset_id)

        except IndexError:
            # If there is no such tileset, we are going to create one.
            print(
                "Could not find a tileset. Please create a tileset in Django database before this process."
            )

            tileset_id = None

        # Stopping if no tileset is found.
        if not tileset_id:
            pass

        # Initializing uploader service.
        service = Uploader(access_token=settings.MAPBOX_TOKEN)

        # Retrieving schools list.
        schools = School.objects.all()

        # Initializing json response.
        allfeatures = {"features": [], "type": "FeatureCollection"}

        # Loading all school information to the json response.
        for school in schools:
            if school.lat and school.long:
                if school.lat != 0 or school.long != 0:
                    feature = {
                        'type': 'Feature',
                        'geometry': {
                            'type': "Point",
                            'coordinates':
                            [float(school.long),
                             float(school.lat)]
                        },
                        'properties': {
                            'name': school.name,
                            'id': school.id
                        }
                    }
                    allfeatures["features"].append(feature)

        file_storage = get_storage_class()()

        with tempfile.TemporaryDirectory() as tempdir:
            fname = os.path.join(tempdir, 'schools.geojson')
            with open(fname, 'w') as f:
                f.write(json.dumps(allfeatures))

            with open(fname, 'rb') as f:
                upload_resp = service.upload(f, tileset_id, name=tileset_name)

                if upload_resp.status_code == 201:
                    print("Upload successful.")
                else:
                    print("Upload failed with code:", upload_resp.status_code)
Esempio n. 15
0
#!./venv/bin/python

from io import BytesIO
from configparser import ConfigParser

from mapbox import Uploader
from geojson import Point, Feature, FeatureCollection, dumps
import yaml

config = ConfigParser()
config.read('mapbox.ini')

sites = yaml.load(open('sites.yaml', encoding='utf-8').read())
points = FeatureCollection([
    Feature(geometry=Point((float(s["longitude"]), float(s["latitude"]))),
            properties=dict(campaign=c['campaign'],
                            location="%i) %s" % (i + 1, s['location']),
                            idx=str(i + 1),
                            date=s['date'],
                            text=s['text'])) for c in sites
    for i, s in enumerate(c['sites'])
])

service = Uploader(access_token=config['mapbox']['access-token'])
upl = service.upload(BytesIO(dumps(points).encode('utf-8')),
                     'nzherald.new-zealand-wars-sites-v15',
                     name="New Zealand Wars Sites v15")

print(upl.json())