Example #1
0
def main():

    # instantiate the weosession and connect
    # market.weogeo.com gets us the whole public library
    weos = WeoGeoAPI.weoSession('http://market.weogeo.com', '*****@*****.**', 'avimordy')

    # Now we connect to the market
    weos.connectToMarket()

    # if you want to see your connection parameters use this line
    #print weos

    ## Get all the datasets that intersect this box in the public library
    dataSets = weos.getDatasets('JSON', '&north=37.1714&south=37.1012&west=-122.1728&east=-122.0614&per_page=25');

    # Here are a bunch of calls I used to dig into the structure of a returned dataset. Given it's XML origins it
    # has a pretty deeply nested structure. I use the playing around to get to the minimal information I needed to
    # decide which data I wanted

    #print type(dataSets)

    # Get the actual data
    #print json.dumps(dataSets, sort_keys=True, indent=4)

    # Get the actual dictionary of the data, not the metadata of the query
    dataDict = dataSets[1]

    # Now pull the actual array of datasets
    dataList = dataDict['items']
    #print json.dumps(dataList[0], sort_keys=True, indent=4)
    #for keys in dataList[0].keys():
    #	print keys
    	#print type(dataDict['items'])

    # We want token, name, data_type,layers and data coord type (we need this to create bounding box in the native
    # coordinate system.
    
    for item in dataList:

        #the token is the unique identifier which we use to create a job
        print item['token']
        print item['name']
        
        print item['data_type']
        print json.dumps(item['layers'])

        #
        print json.dumps(item['boundaries']['tiles'])
        
        print ('\n----/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/----\n')
    

    print "----------\n"
    
    datasetOSM = weos.getDataset('e0dd1ef6-df94-4e7f-a95f-f48327ba3467','JSON')
    #datasetsOSM = weos.getDatasets('JSON', '&north=89&south=89&west=-179&east=179')
    print json.dumps(datasetOSM, indent=4, sort_keys=True)
    
    print "Finished!"
Example #2
0
def main():

    weos = WeoGeoAPI.weoSession('http://market.weogeo.com', '*****@*****.**', 'avimordy')

    if weos.connectToMarket() is True:

        print "\n Getting Download(s) \n"
        
        response = weos.getDownloadFile("921f2d6f-4f21-4238-aa0d-3cb6452a9883")
        print json.dumps(response, indent=4)
def get_connex(options):
    print "Trying to connect to library %s using username %s and password %s" % (options.library,options.username,options.password)
    weos = WeoGeoAPI.weoSession(options.library,options.username,options.password)
        
    result = weos.connect()        
    if not result:
        print "\nCould not connect to your library with the credentils that you supplied.\nPlease check #your credentials and the CLI syntax.\n"
        parser.print_help()
        sys.exit()

    return weos
Example #4
0
"""
In this example we will demonstrate how obtain the download link for a job order using
information that was logged from the order_multiple.py example. Important note: download
links are only valid for 7 days after the job has been ordered.
"""

import WeoGeoAPI

# Establish connection to Trimble Data Marketplace
session = WeoGeoAPI.weoSession('market.trimbledata.com', 'username', 'password')
session.connect()
print session

# List of job tokens pulled from previous example
jobtokens = ['c6c89bce-c633-4fe3-814f-61265d3324a6',
             '3ea0b1c7-0c09-44b5-a7fe-b183b4e8ab76',
             '8d0a9080-3bcc-426a-b367-585ee8d01161']

# Get download link for each job
for token in jobtokens:
    response = session.getDownloadFile(token)
    for item in response.content:
        print item['url']

print "\nAll download links displayed."
Example #5
0
def main():

	print "making jobs"

	# Alright, we need to make an order for each dataset we want and we need to add it to our cart.
	# For the meaning of the different pieces of the Job please refer to "create a job" on the following
	# page:  http://www.weogeo.com/developer_doc/Jobs_API.html

	# DRGs are a raster images which are scans of the USGS Topo sheets - we will use this as a backdrop
	drgOrder = {

		# may be either true or false
		"cart" : "true",
		"job" :
			{
				# This corresponds to the token from the listing we did before
				"dataset_token" : "f1bca22c-013e-4e57-a78b-f9bbb3a918e8",

				# must be 1 or it will not be accepted
				"content_license_acceptance" : "1",

				"parameters":
					{
						"job_geocrop" : "Clip",
						#all of the coordinates must in the native coords of the data layer,
						# NOT in our projected coord system
						"job_north": "4457478.778 ",
						"job_south": "4436707.6 ",
						"job_east" : "-13579988.662 ",
						"job_west" : "-13595996.424 ",
						"job_layers": "Layer_1",
						"job_datum_projection" : "EPSG:4326",
						#"job_datum_projection" : "Native",
						# FME name for file format
						"job_file_format": "JPEG",


						# raster only - this seems to correspond to the level of compression used on the image
						"job_spatial_resolution": "1"
					}
			}
	}

	# Add the job we just created to a List which will contain all orders
	jobOrders = [drgOrder]



	# Next job will be the vector data from the US Census - TIGER/Line - Santa Cruz County, California
	landmarkOrder = {
		"cart" : "true",
		"job" :
			{
				"dataset_token" : "63472b72-0f0d-2554-8120-67b445942ff7",
				"content_license_acceptance" : "1",
				"parameters":
					{
						#Spatial Selection will grab everything that touches the bounding box but not clip it to the box
						"job_geocrop" : "Spatial_Selection",
						"job_north": "37.1714 ",
						"job_south": "37.1012 ",
						"job_east" : "-122.1728 ",
						"job_west" : "-122.0614 ",
						# We are grabbing all the landmarks from this census dataset
						"job_layers": "Point Landmark;Area Landmark",
						"job_datum_projection" : "EPSG:4326",
						#Most of this data is natively in Shapefile format but we could set this to SHAPE instead
						"job_file_format": "Native",
					}

			}

	}

	censusOrder = {
		"cart" : "true",
		"job" :
			{
				"dataset_token" : "85da9c3d-a58b-9b5f-9c59-f324f4d4d186",
				"content_license_acceptance" : "1",
				"parameters":
					{
						"job_geocrop" : "Spatial_Selection",
						"job_north": "37.1714 ",
						"job_south": "37.1012 ",
						"job_east" : "-122.1728 ",
						"job_west" : "-122.0614 ",
						# We are grabbing all the landmarks from this census dataset
						"job_layers": "Census Block Group;Census Block;Census Tract;Tribal Block Group;Tribal Census Tract",
						"job_datum_projection" : "EPSG:4326",
						"job_file_format": "Native",
						}

			}

	}

	votingOrder = {
		"cart" : "true",
		"job" :
			{
				"dataset_token" : "1a9f8125-cd90-ef5a-9c16-c3329fb51497",
				"content_license_acceptance" : "1",
				"parameters":
					{
						"job_geocrop" : "Spatial_Selection",
						"job_north": "37.1714 ",
						"job_south": "37.1012 ",
						"job_east" : "-122.1728 ",
						"job_west" : "-122.0614 ",
						# We are grabbing all the landmarks from this census dataset
						"job_layers": "State Legislative District--Lower Chamber;State Legislative District--Upper Chamber;Place",
						"job_datum_projection" : "EPSG:4326",
						"job_file_format": "Native",
						}

			}

	}

	zipOrder = {
		"cart" : "true",
		"job" :
			{
				"dataset_token" : "5c910ad3-fc69-4023-b180-8c862639eadb",
				"content_license_acceptance" : "1",
				"parameters":
					{
						"job_geocrop" : "Spatial_Selection",
						"job_north": "37.1714 ",
						"job_south": "37.1012 ",
						"job_east" : "-122.1728 ",
						"job_west" : "-122.0614 ",
						# We are grabbing all the landmarks from this census dataset
						"job_layers": "Layer_1",
						"job_datum_projection" : "EPSG:4326",
						"job_file_format": "Native",
						}

			}

	}

	schoolOrder = {
		"cart" : "true",
		"job" :
			{
				"dataset_token" : "1a4d4a22-d84b-4cb3-b874-1a99bf6d42eb",
				"content_license_acceptance" : "1",
				"parameters":
					{
						"job_geocrop" : "Spatial_Selection",
						"job_north": "37.1714 ",
						"job_south": "37.1012 ",
						"job_east" : "-122.1728 ",
						"job_west" : "-122.0614 ",
						# We are grabbing all the landmarks from this census dataset
						"job_layers": "School District--Elementary;School District--Secondary;School District--Unified",
						"job_datum_projection" : "EPSG:4326",
						"job_file_format": "Native",
						}

			}

	}

	transOrder = {
		"cart" : "true",
		"job" :
			{
				"dataset_token" : "fd732c1b-880b-4ea2-ac4e-60e8f95d677f",
				"content_license_acceptance" : "1",
				"parameters":
					{
						"job_geocrop" : "Spatial_Selection",
						"job_north": "37.1714 ",
						"job_south": "37.1012 ",
						"job_east" : "-122.1728 ",
						"job_west" : "-122.0614 ",
						# We are grabbing all the landmarks from this census dataset
						"job_layers": "Primary Roads;Primary and Secondary Roads;All Roads;Rails",
						"job_datum_projection" : "EPSG:4326",
						"job_file_format": "Native",
						}

			}

	}

	waterOrder = {
		"cart" : "true",
		"job" :
			{
				"dataset_token" : "e1f0bcce-30cb-4890-81e2-e5e768c7ddfa",
				"content_license_acceptance" : "1",
				"parameters":
					{
						"job_geocrop" : "Spatial_Selection",
						"job_north": "37.1714 ",
						"job_south": "37.1012 ",
						"job_east" : "-122.1728 ",
						"job_west" : "-122.0614 ",
						# We are grabbing all the landmarks from this census dataset
						"job_layers": "Area Hydrography;Linear Hydrography",
						"job_datum_projection" : "EPSG:4326",
						"job_file_format": "Native",
						}

			}

	}


	OSMOrder = {
		"cart" : "true",
		"job" :
			{
				"dataset_token" : "e0dd1ef6-df94-4e7f-a95f-f48327ba3467",
				"content_license_acceptance" : "1",
				"parameters":
					{
						"job_geocrop" : "Spatial_Selection",
						"job_north": "37.1714 ",
						"job_south": "37.1012 ",
						"job_east" : "-122.1728 ",
						"job_west" : "-122.0614 ",
						# We are grabbing all the landmarks from this census dataset
						"job_layers": "Highway;Man Made;Place;Leisure;Shop;Tourism;Amenity",
						"job_datum_projection" : "EPSG:4326",
						"job_file_format": "SHAPE",
						}

			}

	}

	jobOrders.append(landmarkOrder)
	jobOrders.append(censusOrder)
	jobOrders.append(votingOrder)
	jobOrders.append(zipOrder)
	jobOrders.append(schoolOrder)
	jobOrders.append(transOrder)
	jobOrders.append(waterOrder)
	jobOrders.append(OSMOrder)


	weos = WeoGeoAPI.weoSession('http://market.weogeo.com', '*****@*****.**', 'avimordy')

	if weos.connectToMarket() is True:

		print "\nSubmitting your jobs...\n"

		for job in jobOrders:
			response = weos.createJob(job, 'JSON')
			print "Here is your order info:\n"
			print job['job']['dataset_token'] + '\n'
			print json.dumps(response, indent=4)

		print "\nfinished creating jobs - now to order!\n"
		weoJobOrder = weos.orderJobsInCart('JSON')
		print json.dumps(weoJobOrder, indent=4)

		print "\n\n ---Finished---\n"
Example #6
0
"""
In this example we will demonstrate how to order a raster job with customization using
information we logged from the getdatasets_by_area.py example. The dataset used in this
example is "Natural Earth Shaded Relief", which can be found here:
http://market.weogeo.com/datasets/5dbdb7db-1acf-4f19-b629-04b54f907552
"""

# Third Party Modules
import WeoGeoAPI

# Establish connection to WeoGeo Market
weos = WeoGeoAPI.weoSession("market.weogeo.com", "username", "password")
weos.connect()
print weos

# Set initial job parameters. The 'note' variable is optional. Here we only want one layer, '10m High Res'.
# Spatial resolution is 1 (native). Use 2, 3 or 4 to deliver as 2x/3x/4x coarser.
# Reproject our order to NAD83-Geo(EPSG:4269).
newJob = WeoGeoAPI.weoJob(
    datasetToken="5dbdb7db-1acf-4f19-b629-04b54f907552",
    layers=["10m High Res"],
    outputFormat="GeoTIFF",
    coordinateSystem="EPSG:4269",
    spatialResolution="1",
    note="Extract of area around Oregon.",
    acceptLicense=True,
)

# Set crop box around Oregon. EPSG must be GEO(EPSG:4326) or Spherical Mercator(EPSG:3857).
newJob.setBoxCropArea("EPSG:4326", 46.17, 42.13, -116.28, -124.33)
Example #7
0
# this script is a library that I made to handle the various functions involved in making the kml
# and getting all of the links, as well as getting the user's input and processing it, to a degree.
import WeoGeoAPI
import BeautifulSoup
import random
import getpass  # trying this for password input

baseUrlString = "http://market.weogeo.com/"  # baseUrlString is a string we will need later on, for the api calls
username = ""
password = ""
http = WeoGeoAPI.httpController()  # an httpController object to handle the requests and such
# get params asks the user for their input on a few relevant questions
def getParams():
    paramsEntered = False  # this boo will be turned true if the parameters entered are valid
    validSubDomain = False
    while not paramsEntered:  # this part will take some params
        try:
            print "enter your parameters:"
            while not validSubDomain:
                subdomain = raw_input("enter your desired subdomain ('http://market.weogeo.com' is default):\n")
                if subdomain == "":
                    subdomain = "market.weogeo.com"
                thisUsername = raw_input("enter your username:\n")
                thisPassword = getpass.getpass("enter your password:\n")  # haven't tested this yet
                urlExt = "datasets.json?page=1&east=0&north=0&south=0&west=0"
                resultCode, resultString = http.Get(subdomain, urlExt, thisUsername, thisPassword)
                if resultCode == 200:
                    baseUrlString = subdomain
                    username = thisUsername
                    password = thisPassword
                    validSubDomain = True
Example #8
0
"""
In this example we will demonstrate how to order a standard job using information we
logged from the getdatasets_by_area.py example. A job on a Standard listing delivers all data
and therefore do not allow for customization. The dataset used in this example is "NASA: Earth at Night - 2000",
which can be found here: http://market.trimbledata.com/#/datasets/5416193f-ba3d-f45e-bd9c-6dbf8193bfad
"""

import WeoGeoAPI

# Establish connection to Trimble Data Marketplace
session = WeoGeoAPI.weoSession('market.trimbledata.com', 'username', 'password')
session.connect()
print session

# Create job object. Since there is no customization involved we only need to supply the token accept the license.
testJob = WeoGeoAPI.weoJob(dataset_token='5416193f-ba3d-f45e-bd9c-6dbf8193bfad',
                           content_license_acceptance=True)

# Create job object to be used for ordering.
response = session.createJob(testJob)

# Pricing and size information
price = session.getPrice(testJob)
print "\n-Order Summary-"
print "Price: " + price.content['job_price']['price']
print "Size:  " + price.content['job_price']['human_estimated_data_size']

# Complete the order
if response.status != 201:
    print response.content
    exit(0)
Example #9
0
"""
In this example we will demonstrate how to use the cart to order multiple datasets using
information that was logged from the getdatasets_by_area.py example. We will use the vector,
raster and standard datasets found in previous examples. We will log the job tokens for use
in another example to download the jobs.
"""

# Third Party Modules
import WeoGeoAPI

# Establish connection to WeoGeo Market
weos = WeoGeoAPI.weoSession('market.weogeo.com', 'username', 'password')
weos.connect()
print weos

# Create text file to log tokens
outfile = open('job_tokens.txt', 'w')

# Create lists for job instances and another list for their tokens
jobs = []
jobtokens = []

# Create standard job object and append to jobs list
standardJob = WeoGeoAPI.weoJob( datasetToken = '9dc42e34-cbd0-6952-ad6c-fb39eb23fd0a',
                                acceptLicense = True)
jobs.append(standardJob)

# Create vector job object and append to jobs list
vectorJob = WeoGeoAPI.weoJob( datasetToken = 'bfc2b36e-3d0d-4a6d-935d-e9ab090aaa3c',
                              layers = ['Area Hydrography', 'Linear Hydrography'],
                              outputFormat = 'SHAPE',
Example #10
0
"""
In this example we will demonstrate how to order a vector job with customization using
information we logged from the getdatasets_by_area.py example. The dataset used in this
example is "TIGER/Line 2014", which can be found here:
http://market.trimbledata.com/#/datasets/tiger-line-2014
"""

import WeoGeoAPI

# Establish connection to Trimble Data Marketplace
session = WeoGeoAPI.weoSession('market.trimbledata.com', 'username', 'password')
session.connect()
print session

# Set initial job parameters. The 'note' variable is optional. Datasets with no layers do not need to be specified.
newJob = WeoGeoAPI.weoJob(dataset_token='3d52ffef-50cf-41e5-aadf-a5ec3dc5fc11',
                          layers=['All Roads', 'Census Tract'],
                          job_file_format='SHAPE',
                          note='Extract of area around Portland, OR.',
                          content_license_acceptance=True)

# Create a polygon selection using pairs of X,Y points that are in sequence.
newJob.setClipAreaCoordinateSystem('EPSG:4326')
newJob.addClipAreaPoints(((-122.55, 45.43), (-122.46, 45.43), (-122.14, 45.32), (-122.14, 45.27), (-122.55, 45.27), (-122.55, 45.43)))

# Create job object to be used for ordering.
response = session.createJob(newJob)

# Pricing and size information
price = session.getPrice(newJob)
print "\n-Order Summary-"
Example #11
0
"""
In this example we will demonstrate how to order a standard job using information we
logged from the getdatasets_by_area.py example. A job on a Standard listing delivers all data 
and therefore do not allow for customization. The dataset used in this example is "GNS World Waterbodies",
which can be found here: http://market.weogeo.com/datasets/9dc42e34-cbd0-6952-ad6c-fb39eb23fd0a
"""

# Third Party Modules
import WeoGeoAPI

# Establish connection to WeoGeo Market
weos = WeoGeoAPI.weoSession('market.weogeo.com', 'username', 'password')
weos.connect()
print weos

# Create job object. Since there is no customization involved we only need to supply the token accept the license.
testJob = WeoGeoAPI.job(datasetToken = '9dc42e34-cbd0-6952-ad6c-fb39eb23fd0a',
                        acceptLicense = True)

# Create job object to be used for ordering.
job_response, job_output = weos.createJob(testJob)

# Pricing and size information
response, price = weos.getPrice(testJob)
print "\n-Order Summary-"
print "Price: " + price['price']
print "Size:  " + price['human_estimated_data_size']

# Complete the order
if job_response != 201:
    print job_output
Example #12
0
import sys
import WeoGeoAPI

# define globals for authentication
if len(sys.argv) > 3:
    LIBRARY, USERNAME, PASSWORD = sys.argv[1],sys.argv[2],sys.argv[3]
else:
    LIBRARY, USERNAME, PASSWORD = ("YOUR-LIBRARY-SUBDOMAIN.weogeo.com",
                                   "YOUR-LIBRARY-USERNAME",
                                   "YOUR-LIBRARY-PASSWORD")

# instantiate the weosession and connect
weos = WeoGeoAPI.weoSession(LIBRARY, USERNAME, PASSWORD)
if not weos.connect():
    print "Couldn't connect with the credentials provided."
    sys.exit()

http_response, groups_dict = weos.getGroups("JSON")
print "="*20,"\n","Group Name,ID","\n","="*20
for item in groups_dict["items"]:
    print "%s,%s" % (item["name"], item["id"])
print "="*20,"\n"
    
http_response, roles_dict = weos.getRoles("JSON")
print "="*20,"\n","Role Name,ID","\n","="*20
for item in roles_dict["items"]:
    print "%s,%s" % (item["name"], item["id"])
print "="*20,"\n"
    
http_response, datasets_dict = weos.getDatasets("JSON")
print "="*20,"\n","Dataset Name,token","\n","="*20
Example #13
0
"""
In this example we will demonstrate how to order a raster job with customization using
information we logged from the getdatasets_by_area.py example. The dataset used in this
example is "Natural Earth Shaded Relief", which can be found here:
http://market.trimbledata.com/#/datasets/5dbdb7db-1acf-4f19-b629-04b54f907552
"""

import WeoGeoAPI

# Establish connection to Trimble Data Marketplace
session = WeoGeoAPI.weoSession('market.trimbledata.com', 'username', 'password')
session.connect()
print session

# Set initial job parameters. The 'note' variable is optional.
# Here we only want one layer, '10m High Res'.
# Spatial resolution is 1 (native). Use 2, 3 or 4 to deliver as 2x/3x/4x coarser.
# Reproject our order to NAD83-Geo(EPSG:4269).
newJob = WeoGeoAPI.weoJob(dataset_token='5dbdb7db-1acf-4f19-b629-04b54f907552',
                          layers=['10m High Res'],
                          job_file_format='GeoTIFF',
                          job_datum_projection='EPSG:4269',
                          job_spatial_resolution='1',
                          note='Extract of area around Oregon.',
                          content_license_acceptance=True)

# Set crop box around Oregon. EPSG must be GEO(EPSG:4326) or Spherical Mercator(EPSG:3857).
newJob.setBoxCropArea('EPSG:4326', 46.17, 42.13, -116.28, -124.33)

# Create job object to be used for ordering.
response = session.createJob(newJob)
"""
For this example we will show you how to get a list of free datasets available inside of a bounding box 
with supplied north, south, east and west coordinates on the WeoGeo Market and log the results to 
a CSV file.
"""

# First Party Modules
import csv
import json
# Third Party Modules
import WeoGeoAPI

# Establish connection to WeoGeo Market
weos = WeoGeoAPI.weoSession('market.weogeo.com', 'username', 'password')
weos.connect()
print weos

# Create CSV file to log our results
outfile = open('tokenlist.csv', 'wb')
writer = csv.writer(outfile)
headers = ['token', 'name', 'max_price', 'data_type','data_files_size', 
           'file_format', 'north', 'south', 'east', 'west','layers']
writer.writerow(headers)

# List for datasets returned from parameters supplied
datasets = []
page = 1

# Parse through page of results (15 per page) and log any free datasets.
while True:
    response, results = weos.getDatasets('JSON',
Example #15
0
"""
In this example we will demonstrate how to use the cart to order multiple datasets using
information that was logged from the getdatasets_by_area.py example. We will use the vector,
raster and standard datasets found in previous examples. We will log the job tokens for use
in another example to download the jobs.
"""

import WeoGeoAPI

# Establish connection to Trimble Data Marketplace
session = WeoGeoAPI.weoSession('market.trimbledata.com', 'username', 'password')
session.connect()
print session

# Create text file to log tokens
outfile = open('job_tokens.txt', 'w')

# Create lists for job instances and another list for their tokens
jobs = []
jobtokens = []

# Create standard job object and append to jobs list
standardJob = WeoGeoAPI.weoJob(dataset_token='5416193f-ba3d-f45e-bd9c-6dbf8193bfad',
                               content_license_acceptance=True)
jobs.append(standardJob)

# Create vector job object and append to jobs list
vectorJob = WeoGeoAPI.weoJob(dataset_token='3d52ffef-50cf-41e5-aadf-a5ec3dc5fc11',
                             layers=['All Roads', 'Census Tract'],
                             job_file_format='SHAPE',
                             note='Extract of area around Portland, OR.',
#this script is a library that I made to handle the various functions involved in making the kml
#and getting all of the links, as well as getting the user's input and processing it, to a degree.
import WeoGeoAPI
import BeautifulSoup
import random
import getpass  #trying this for password input
baseUrlString = 'http://market.weogeo.com/'  #baseUrlString is a string we will need later on, for the api calls
username = ''
password = ''
http = WeoGeoAPI.httpController(
)  #an httpController object to handle the requests and such


#get params asks the user for their input on a few relevant questions
def getParams():
    paramsEntered = False  #this boo will be turned true if the parameters entered are valid
    validSubDomain = False
    while not paramsEntered:  #this part will take some params
        try:
            print "enter your parameters:"
            while not validSubDomain:
                subdomain = raw_input(
                    "enter your desired subdomain ('http://market.weogeo.com' is default):\n"
                )
                if subdomain == '':
                    subdomain = 'market.weogeo.com'
                thisUsername = raw_input("enter your username:\n")
                thisPassword = getpass.getpass(
                    "enter your password:\n")  #haven't tested this yet
                urlExt = 'datasets.json?page=1&east=0&north=0&south=0&west=0'
                resultCode, resultString = http.Get(subdomain, urlExt,