Exemple #1
0
def main(jsonfile, startdate, enddate, outputdir):
    PLANET_API_KEY = os.getenv('PL_API_KEY')

    client = api.ClientV1()

    with open(jsonfile, 'r') as f:
        data = json.load(f)

    aoi = data['features'][0]['geometry']

    query = api.filters.and_filter(api.filters.geom_filter(aoi), \
      api.filters.date_range('acquired', gt=startdate, lt=enddate))
    ## api.filters.range_filter('cloud_cover', gte=0.1), \

    item_types4 = ['PSScene4Band']
    request4 = api.filters.build_search_request(query, item_types4)
    item_types3 = ['PSScene3Band']
    request3 = api.filters.build_search_request(query, item_types3)

    results3 = client.quick_search(request3)
    results4 = client.quick_search(request4)

    myreps3 = []
    myreps4 = []
    list3 = []
    list4 = []

    for item in results4.items_iter(limit=100):
        list4.append(item)
        myreps4.append(item['id'])
        if (item['properties']['instrument'] == 'PS2.SD'):
            print(r'%s : %s' % (item['id'], 'Super Dove'))
        else:
            print(r'%s : %s' % (item['id'], 'Dove'))

    for item in results3.items_iter(limit=100):
        ## print(r'%s' % item['id'])
        myreps3.append(item['id'])

    if (len(myreps3) > len(myreps4)):
        diff34 = np.setdiff1d(myreps3, myreps4).tolist()
        print("\nPossible 3Band data that could be made to 4Band:")
        [print("%s" % thisid) for thisid in diff34]

    print("\n")

    mydownloader = downloader.create(client)

    print((r'Starting Download of %d images.') % len(myreps4))
    mydownloader.download(results4.items_iter(limit=100), ['udm2'], outputdir)
    print(('Finished with Download of udm2.'))
    mydownloader.download(results4.items_iter(limit=100), ['analytic_sr'],
                          outputdir)
    print(('Finished with Download of analytic_sr.'))
    mydownloader.download(results4.items_iter(limit=100), ['analytic_xml'],
                          outputdir)
    print(('Finished with Download of metadata_xml.'))
    mydownloader.shutdown()
    print(('Downloader has been shut down.'))
    return (0)
Exemple #2
0
def download_order(order_id, dest, quiet, pretty):
    '''Download an order by given order ID'''
    cl = clientv1()
    dl = downloader.create(cl, order=True)

    output = downloader_output(dl, disable_ansi=quiet)
    output.start()

    items = cl.get_individual_order(order_id).items_iter(limit=None)
    handle_interrupt(dl.shutdown, dl.download, items, [], dest)
Exemple #3
0
def download(asset_type, dest, limit, sort, search_id, dry_run, activate_only,
             quiet, **kw):
    '''Activate and download'''
    cl = clientv1()
    page_size = min(limit or 250, 250)
    asset_type = list(chain.from_iterable(asset_type))
    # even though we're using functionality from click.Path, this was needed
    # to detect inability to write on Windows in a read-only vagrant mount...
    # @todo check/report upstream
    if not activate_only and not check_writable(dest):
        raise click.ClickException(
            'download destination "%s" is not writable' % dest)
    if search_id:
        if dry_run:
            raise click.ClickException(
                'dry-run not supported with saved search')
        if any(kw[s] for s in kw):
            raise click.ClickException(
                'search options not supported with saved search')
        search, search_arg = cl.saved_search, search_id
    else:
        # any requested asset-types should be used as permission filters
        kw['asset_type'] = [AssetTypePerm.to_permissions(asset_type)]
        req = search_req_from_opts(**kw)
        if dry_run:
            req['interval'] = 'year'
            stats = cl.stats(req).get()
            item_cnt = sum([b['count'] for b in stats['buckets']])
            asset_cnt = item_cnt * len(asset_type)
            click.echo(
                'would download approximately %d assets from %s items' %
                (asset_cnt, item_cnt)
            )
            return
        else:
            search, search_arg = cl.quick_search, req

    dl = downloader.create(cl)
    output = downloader_output(dl, disable_ansi=quiet)
    # delay initial item search until downloader output initialized
    output.start()
    try:
        items = search(search_arg, page_size=page_size, sort=sort)
    except Exception as ex:
        output.cancel()
        click_exception(ex)
    func = dl.activate if activate_only else dl.download
    args = [items.items_iter(limit), asset_type]
    if not activate_only:
        args.append(dest)
    # invoke the function within an interrupt handler that will shut everything
    # down properly
    handle_interrupt(dl.shutdown, func, *args)
Exemple #4
0
def download_quads(name, rbox, quiet, dest, limit):
    '''Download quads from a mosaic'''
    cl = clientv1()

    dl = downloader.create(cl, mosaic=True)
    output = downloader_output(dl, disable_ansi=quiet)
    output.start()
    try:
        mosaic, = cl.get_mosaic_by_name(name).items_iter(1)
        items = cl.get_quads(mosaic, rbox).items_iter(limit)
    except Exception as ex:
        output.cancel()
        click_exception(ex)
    # invoke the function within an interrupt handler that will shut everything
    # down properly
    handle_interrupt(dl.shutdown, dl.download, items, [], dest)
def main(jsonfile, startdate, enddate, outputdir):
    client = api.ClientV1()

    with open(jsonfile, 'r') as f:
        data = json.load(f)

    aoi = data['features'][0]['geometry']
    ## aoi = {
    ##   "type": "Polygon",
    ##   "coordinates": [
    ##     [
    ##       [float(ullatlon[0]), float(ullatlon[1])],
    ##       [float(lrlatlon[0]), float(ullatlon[1])],
    ##       [float(lrlatlon[0]), float(lrlatlon[1])],
    ##       [float(ullatlon[0]), float(lrlatlon[1])],
    ##       [float(ullatlon[0]), float(ullatlon[1])],
    ##     ]
    ##   ]
    ## }

    query = api.filters.and_filter(api.filters.geom_filter(aoi), \
      api.filters.range_filter('cloud_cover', lt=0.1), \
      api.filters.date_range('acquired', gt=startdate, lt=enddate))

    item_types = ['PSScene4Band']
    request = api.filters.build_search_request(query, item_types)

    results = client.quick_search(request)

    myreps = []

    for item in results.items_iter(limit=100):
        ## sys.stdout.write(r'%s\n' % item['id'])
        print(r'%s' % item['id'])
        myreps.append(item)

    mydownloader = downloader.create(client)

    print((r'Starting Download of %d images.') % len(myreps))
    mydownloader.download(results.items_iter(len(myreps)), ['analytic_sr'],
                          outputdir)
    mydownloader.shutdown()
    print(('Finished with Download.'))
    return (0)
def download_planet(ullatlon, lrlatlon, outputdir):
    client = api.ClientV1()

    ## done = download_planet(ullatlon, lrlatlon, outputdir)

    aoi = {
        "type":
        "Polygon",
        "coordinates": [[
            [ullatlon[0], ullatlon[1]],
            [lrlatlon[0], ullatlon[1]],
            [lrlatlon[0], lrlatlon[1]],
            [ullatlon[0], lrlatlon[1]],
            [ullatlon[0], ullatlon[1]],
        ]]
    }

    query = api.filters.and_filter(api.filters.geom_filter(aoi), \
      api.filters.range_filter('cloud_cover', lt=0.1), \
      api.filters.date_range('acquired', gt='2016-08-01', lt='2018-04-30'))

    ## item_types = ['PSScene4Band']
    item_types = ['SkySatScene']
    request = api.filters.build_search_request(query, item_types)

    results = client.quick_search(request)

    myreps = []

    for item in results.items_iter(limit=100):
        ## sys.stdout.write(r'%s\n' % item['id'])
        print(r'%s' % item['id'])
        myreps.append(item)

    mydownloader = downloader.create(client)

    print((r'Starting Download of %d images.') % len(myreps))
    mydownloader.download(results.items_iter(len(myreps)), ['analytic'],
                          outputdir)
    mydownloader.shutdown()
    print(('Finished with Download.'))
    return (0)
def test_pipeline():
    logging.basicConfig(
        stream=sys.stderr, level=logging.INFO,
        format='%(asctime)s %(message)s', datefmt='%M:%S'
    )
    cl = HelperClient()
    items = items_iter(100)
    asset_types = ['a', 'b']
    dl = downloader.create(
        cl, no_sleep=True,
        astage__size=10, pstage__size=10, pstage__min_poll_interval=0,
        dstage__size=2)
    completed = []
    dl.on_complete = lambda *a: completed.append(a)
    stats = handle_interrupt(dl.shutdown, dl.download, items,
                             asset_types, 'dest')
    assert stats == {
        'downloading': 0, 'complete': 200, 'paging': False,
        'downloaded': '0.20MB', 'activating': 0, 'pending': 0
    }
    assert 200 == len(completed)
Exemple #8
0
def get_downloader(client):
    pl_downloader = downloader.create(client)
    return pl_downloader
Exemple #9
0
			aoi = polyJSON
			#pprint(aoi)
			# build a filter for the AOI
			query = filters.and_filter(
				filters.geom_filter(aoi),
				filters.range_filter('cloud_cover', lt=CLOUDCOVERLT),
			)

			# we are requesting PlanetScope 4 Band imagery
			item_types = BANDS
			request = api.filters.build_search_request(query, item_types)
			# this will cause an exception if there are any API related errors
			results = client.quick_search(request)

			# items_iter returns an iterator over API response pages
			dl = downloader.create(client)
			dl.on_complete = lambda *a: completed.append(a)

			for item in results.items_iter(limit=ITEMLIMIT):
				perms = item['_permissions']
				if DEBUG:
					pprint(perms)
				assetTypes = []
				for p in perms:
					res = re.search("assets.(.*):download", p)
					if res:
						assetTypes.append(res.group(1))
				if ("assets.visual:download" in perms and "assets.visual_xml:download" in perms):
					sceneType=["visual", "visual_xml"]
				elif ("assets.analytic:download" in perms and "assets.analytic_xml:download" in perms):
					sceneType=["analytic", "analytic_xml"]				
def main(jsonfile, outputdir):
    PLANET_API_KEY = os.getenv('PL_API_KEY')

    root = os.path.basename(outputdir)
    if (root == ''):
        root = os.path.split(os.path.split(outputdir)[0])[1]
    today = datetime.today()
    ## today = datetime.today() - timedelta(days=1)
    logfilename = "download_log_" + (
        "%s_%04d%02d%02d.txt" % (root, today.year, today.month, today.day))
    f = open(logfilename, 'w+')

    settime1 = datetime(today.year, today.month, today.day, 23, 0, 0, 0,
                        timezone.utc)
    back1 = timedelta(days=3)
    yesterday = settime1 - back1
    timetxt1 = yesterday.isoformat(timespec='minutes')[0:16]
    timetxt2 = settime1.isoformat(timespec='minutes')[0:16]

    f.write('Searching for files between %s and %s\n' % (timetxt1, timetxt2))

    client = api.ClientV1()

    ## jsonfile1 = '/scratch/dknapp4/Western_Hawaii/NW_Big_Island_Intensive_Study_Area.json'
    ## jsonfile2 = '/scratch/dknapp4/Western_Hawaii/SW_Big_Island_Intensive_Study_Area.json'

    with open(jsonfile, 'r') as f2:
        data = json.load(f2)

    aoi = data['features'][0]['geometry']

    query = api.filters.and_filter(api.filters.geom_filter(aoi), \
      api.filters.date_range('acquired', gt=timetxt1, lt=timetxt2))
    ## api.filters.range_filter('cloud_cover', lt=0.1), \

    item_types4 = ['PSScene4Band']
    request4 = api.filters.build_search_request(query, item_types4)
    item_types3 = ['PSScene3Band']
    request3 = api.filters.build_search_request(query, item_types3)

    results3 = client.quick_search(request3)
    results4 = client.quick_search(request4)
    pdb.set_trace()

    myreps3 = []
    myreps4 = []
    list3 = []
    list4 = []

    for item in results4.items_iter(limit=100):
        list4.append(item)
        myreps4.append(item['id'])
        if (item['properties']['instrument'] == 'PS2.SD'):
            f.write(('%s : %s\n') % (item['id'], 'Dove-R'))
        else:
            f.write(('%s : %s\n') % (item['id'], 'Dove-Classic'))

    for item in results3.items_iter(limit=100):
        ## print(r'%s' % item['id'])
        myreps3.append(item['id'])

    if (len(myreps3) > len(myreps4)):
        diff34 = np.setdiff1d(myreps3, myreps4).tolist()
        f.write("\nPossible 3Band data that could be made to 4Band:")
        ## [ f.write("%s\n" % thisid) for thisid in diff34 ]
        for thisid in diff34:
            f.write("%s\n" % thisid)

    f.write("\n")

    ## urlform = 'https://api.planet.com/data/v1/item-types/{}/items/{}/assets'
    ## for myid in myreps4:
    ##   theassets = client.get_assets(myid).get()
    ##   if ('analytic_sr' in theassets):
    ##     activation = client.activate(assets['analytic_sr'])
    ##     ## wait for activation
    ##     theassets = client.get_assets(myid).get()
    ##     callback = api.write_to_file(directory=outputdir, callback=None, overwrite=True)
    ##     body = client.download(assets['analytic_sr'], callback=callback)
    ##     body.await()
    ## resget = requests.get(urlform.format('analytic_sr', myid), auth=HTTPBasicAuth(PLANET_API_KEY, ''))

    mydownloader = downloader.create(client,
                                     no_sleep=True,
                                     astage__size=10,
                                     pstage__size=10,
                                     pstage__min_poll_interval=0,
                                     dstage__size=2)

    ## put the results into a regular list
    ## mylist = []
    ## for item in list4:
    ##   item
    ##  whole[1]['properties']['instrument'] == 'PS2.SD'

    f.write(('Starting Download of %d scenes.\n') % len(myreps4))
    mydownloader.download(results4.items_iter(limit=100), ['udm2'], outputdir)
    f.write(('Finished with Download of udm2.\n'))
    mydownloader.download(results4.items_iter(limit=100), ['analytic_sr'],
                          outputdir)
    f.write(('Finished with Download of analytic_sr.\n'))
    mydownloader.download(results4.items_iter(limit=100), ['analytic_xml'],
                          outputdir)
    f.write(('Finished with Download of analytic_xml.\n'))
    mydownloader.shutdown()
    f.write(('Downloader has been shut down.\n'))
    f.close()
    return (0)