import os
import json
import glob
from shapefile import shapefile

psbaseurl = "http://planetserver.jacobs-university.de/classic/?productid="
sfin_name = "footprints/mars_mro_crism_trdr_frthrlhrs07_c0a.shp"
insf = shapefile("read", sfin_name)

ingested = []
f = open("inrasdaman.txt","r")
frthrlhrs = ["frt","hrl","hrs"]
for line in f:
    line = line.strip()
    if line[:3] in frthrlhrs:
        productid = line[:-5].upper()
        if not productid in ingested:
            ingested.append(productid)

outfieldslist = []
for line in insf.fieldslist:
    outfieldslist.append(line)
outfieldslist.append(['PSURL',4,254,0])
outfieldslist.append(['XMIN',4,20,0])
outfieldslist.append(['XMAX',4,20,0])
outfieldslist.append(['YMIN',4,20,0])
outfieldslist.append(['YMAX',4,20,0])
outfieldslist.append(['WIDTH',4,20,0])
outfieldslist.append(['HEIGHT',4,20,0])

outsf = shapefile("write", sfin_name[:-4] + "_planetserver.shp", insf.type, outfieldslist, insf.projection)
Ejemplo n.º 2
0
    except StandardError:
        return None


def check_url(url):
    """
    Check if a URL exists without downloading the whole file.
    We only check the URL header.
    """
    # see also http://stackoverflow.com/questions/2924422
    good_codes = [httplib.OK, httplib.FOUND, httplib.MOVED_PERMANENTLY]
    return get_server_status_code(url) in good_codes


shpfile = "ahn_units.shp"
inshp = shapefile("read", shpfile)
fieldslist = []
for line in inshp.fieldslist:
    fieldslist.append(line)
fieldslist.append(['DOWNLOAD', 4, 100])

outshp = shapefile("write", shpfile[:-4] + "_new.shp", inshp.type, fieldslist,
                   inshp.projection)
for feat in inshp.features:
    attr_dict = inshp.attr_dict(feat)
    name = attr_dict["UNIT"].upper()
    url = "https://geodata.nationaalgeoregister.nl/ahn3/extract/ahn3_laz/C_%s.LAZ" % (
        name)
    attr_dict["DOWNLOAD"] = url
    if check_url(url):
        print url
from shapefile import shapefile
import sys

sfname = "mars_mro_sharad_rdr_c0l.shp"

insf = shapefile("read", sfname)
outfieldslist = []
for line in insf.fieldslist:
    outfieldslist.append(line)
outfieldslist.append(['BROWSEURL',4,254,0])

outsf = shapefile("write", sfname[:-4] + "_browse.shp", insf.type, outfieldslist, insf.projection)
featurelist = insf.feats2list()

for features in featurelist:
    feature = features[0]
    table = features[1]
    labelurl = table['LabelURL']
    url = labelurl.replace(".lbl","_b.jpg")
    url = url.replace("/data/","/browse/")
    table['BROWSEURL'] = url
    outsf.createfeatfromlist(feature, table)
outsf.finish()
insf.finish()
import os, sys, glob
from shapefile import shapefile

if not os.path.exists("download"):
    os.makedirs("download")

footprintfile = 'footprints/mars_mro_crism_trdrddrfrt07_c0a.shp'
shp = shapefile("read", footprintfile)
urls = []
for feat in shp.features:
    attr_dict = shp.attr_dict(feat)
    LabelURL = attr_dict["LabelURL"]
    LabelURL = LabelURL.strip()
    LabelURL = LabelURL.replace(".lbl", ".img")
    urls.append(LabelURL)
shp.finish()

for productidfile in glob.glob('regions/*.txt'):
    f = open(productidfile,"r")
    wgetdownloadlist = 'download/' + os.path.basename(productidfile)[:-4] + '_urllist.txt'
    o = open(wgetdownloadlist,'w')
    for productid in f:
        productid = productid.strip().lower()[:17]
        if "_if" in productid:
            for url in urls:
                ddrid = productid[:-2] + "de"
                if productid in url or ddrid in url:
                    o.write("%s\n" % (url))
                    o.write("%s\n" % (url[:-4] + ".lbl"))
    f.close()
    o.close()
        conn.request('HEAD', path)
        return conn.getresponse().status
    except StandardError:
        return None
 
def check_url(url):
    """
    Check if a URL exists without downloading the whole file.
    We only check the URL header.
    """
    # see also http://stackoverflow.com/questions/2924422
    good_codes = [httplib.OK, httplib.FOUND, httplib.MOVED_PERMANENTLY]
    return get_server_status_code(url) in good_codes

shpfile = "ahn_units.shp"
inshp = shapefile("read", shpfile)
fieldslist = []
for line in inshp.fieldslist:
    fieldslist.append(line)
fieldslist.append(['DOWNLOAD', 4, 100])
    
outshp = shapefile("write", shpfile[:-4] + "_new.shp", inshp.type, fieldslist, inshp.projection)
for feat in inshp.features:
    attr_dict = inshp.attr_dict(feat)
    name = attr_dict["UNIT"].upper()
    url = "https://geodata.nationaalgeoregister.nl/ahn3/extract/ahn3_laz/C_%s.LAZ" % (name)
    attr_dict["DOWNLOAD"] = url
    if check_url(url):
        print url
        outshp.createfeat(feat, attr_dict)
inshp.finish()