Esempio n. 1
0
def publish_data_GriddedLivestock(input_folder):
    input_files = glob.glob(input_folder + "/*.tif")
    product = "Gridded Livestock of the World v. 2.01"
    sldname = "ghg_"
    for input_file in input_files:
        info = str.split(get_filename(input_file), "_")
        title = info[0].capitalize() + " " + info[1] + " - " + info[2]
        sldname += get_filename(input_file).lower()
        date = info[2] + "01"
        metadata_def = create_metadata(title, product, sldname, date, None)
        print metadata_def
Esempio n. 2
0
def publish_data_GriddedLivestock(input_folder):
    input_files = glob.glob(input_folder +"/*.tif")
    product = "Gridded Livestock of the World v. 2.01"
    sldname = "ghg_"
    for input_file in input_files:
        info = str.split(get_filename(input_file), "_")
        title = info[0].capitalize() + " " + info[1] + " - " + info[2]
        sldname += get_filename(input_file).lower()
        date = info[2] + "01"
        metadata_def = create_metadata(title, product, sldname, date, None)
        print metadata_def
Esempio n. 3
0
def publish_data_GriddedLivestock(input_folder):
    input_files = glob.glob(input_folder +"/*.tif")
    product = "Gridded Livestock of the World v. 2.01"
    for input_file in input_files:
        info = str.split(get_filename(input_file), "_")
        title = info[0].capitalize() + " " + info[1] + " - " + info[2]
        sldname = "ghg_" + get_filename(input_file).lower() + "_EN"
        date = info[2]
        metadata_def = create_metadata(title, product, date, sldname)
        try:
            print json.dumps(metadata_def)
            data_manager.publish_coveragestore(input_file, metadata_def, False, False, True)
        except Exception, e:
            print e
Esempio n. 4
0
def process(input_folder, output_folder, mask_file):

    # create the folder
    if os.path.isdir(output_folder):
        shutil.rmtree(output_folder)
    os.mkdir(output_folder)

    try:
        input_files = glob.glob(input_folder + "/*.tif")
        for input_file in input_files:
            output_filename = output_folder + "/" + get_filename(
                input_file) + ".tif"
            #gdal_calc.py -A wheat_seasonal_act_biomprod_doukkala_mask.tif -B asd --outfile=processed.tif --calc="B*A" --NoDataValue=0
            #gdal_calc.py -A ndvi_mask.tif -B  --outfile=result.tif --calc="B*(A>0)" --NoDataValue=-3000
            cmd = 'gdal_calc.py -A ' + input_file + ' -B ' + mask_file + ' --outfile=' + output_filename + ' --calc="B*A" --NoDataValue=-3000'
            process = subprocess.Popen(cmd,
                                       stdout=subprocess.PIPE,
                                       stderr=subprocess.PIPE,
                                       shell=True)
            output, error = process.communicate()
            print output
            print error

    except Exception, e:
        print e
        pass
Esempio n. 5
0
def process(input_folder, output_folder):

    # create the folder
    if os.path.isdir(output_folder):
        shutil.rmtree(output_folder)
    os.mkdir(output_folder)

    try:
        input_files = glob.glob(input_folder + "/*.tif")
        for input_file in input_files:
            output_filename = output_folder + "/" + get_filename(
                input_file) + ".tif"
            #cmd = 'gdalwarp -srcnodata 0 -dstnodata -3000 -te -1010580.0921138877747580 3810893.3240307914093137 -894408.7707638647407293 3894810.6912959185428917 ' + input_file + ' ' + output_filename
            cmd = 'gdalwarp -srcnodata -3000 -dstnodata -3000 -te -1010580.0921138877747580 3810893.3240307914093137 -894408.7707638647407293 3894810.6912959185428917 ' + input_file + ' ' + output_filename
            print cmd
            process = subprocess.Popen(cmd,
                                       stdout=subprocess.PIPE,
                                       stderr=subprocess.PIPE,
                                       shell=True)
            output, error = process.communicate()
            print output
            print error

    except Exception, e:
        print e
        pass
Esempio n. 6
0
def process(input_folder, output_folder, srcproj, dstproj):
    print "Processing data %s %s %s ", input_folder, output_folder

    ext = "shp"

    if os.path.isdir(output_folder):
        shutil.rmtree(output_folder)
    os.makedirs(output_folder)

    try:
        input_files = glob.glob(input_folder + "/*." + ext)
        for input_file in input_files:
            output_filename = output_folder + "/" + get_filename(
                input_file) + "." + ext
            cmd = "ogr2ogr -f 'ESRI Shapefile' -s_srs " + srcproj + " -t_srs " + dstproj + " " + output_filename + " " + input_file
            print cmd
            process = subprocess.Popen(cmd,
                                       stdout=subprocess.PIPE,
                                       stderr=subprocess.PIPE,
                                       shell=True)
            output, error = process.communicate()
            print output
            print error

    except Exception, e:
        print e
        pass
Esempio n. 7
0
def publish_data_wheat(input_folder):
    input_files = glob.glob(input_folder + "/*.tif")
    for input_file in input_files:
        info = str.split(get_filename(input_file), "_")
        #print "name %s, sld %s, date %s, area %s" % (info[0], info[1], info[2], info[3])
        if "water_productivity" in input_file:
            name = info[0] + " " + info[1] + " " + info[2]
            sldname = info[1] + "_" + info[2]
            metadata_def = create_metadata(name, sldname, "201410", info[3])
            manager.publish_coverage(input_file, metadata_def)
        if "yieldgap" in input_file:
            name = info[0] + " " + info[1]
            sldname = info[1]
            metadata_def = create_metadata(name, sldname, "201410", info[3])
            manager.publish_coverage(input_file, metadata_def)
        if "yield" in input_file:
            name = info[0] + " " + info[1] + " " + info[2]
            sldname = info[2]
            metadata_def = create_metadata(name, sldname, "201410", info[3])
            manager.publish_coverage(input_file, metadata_def)
        if "seasonal" in input_file:
            name = info[0] + " " + info[1] + " " + info[2] + " " + info[3]
            sldname = info[3]
            metadata_def = create_metadata(name, sldname, "201410", info[4])
            manager.publish_coverage(input_file, metadata_def)
Esempio n. 8
0
def import_data_earthstat(input_folder):
    dir = glob.glob(input_folder + "*")
    for d in dir:
        if os.path.isdir(d):
            input_files = glob.glob(d + "/*.tif")
            for input_file in input_files:
                import_data(input_file, "es10_10", get_filename(input_file))
Esempio n. 9
0
def import_data_earthstat(input_folder):
    dir = glob.glob(input_folder + "*")
    for d in dir:
        if os.path.isdir(d):
            input_files = glob.glob(d + "/*.tif")
            for input_file in input_files:
                import_data(input_file, "es10_10", get_filename(input_file))
Esempio n. 10
0
def publish_burnerdareas():
    path = "/home/vortex/Desktop/LAYERS/GHG_13_NOVEMEBRE/GFED4_BURNEDAREAS_BY_LANDCOVER/"
    input_dir = glob.glob(path + "*")
    for d in input_dir:
        input_files = glob.glob(d + "/*.tiff")

        # sld and workspace
        sldname = "ghg_burnedareas" + "_EN"
        workspace = "fenix:"

        for input_file in input_files:
            #print input_file
            if "3857" in input_file:
                if "humid" in input_file.lower() or "allforests" in input_file.lower():
                    print input_file
                    info = str.split(get_filename(input_file), "_")
                    date = info[3]
                    filename = get_filename(input_file).rsplit('_', 1)[0]
                    uid = workspace + get_filename(filename).lower()
                    product = burned_areas_switch(filename)
                    title = product + " " + info[3]
                    metadata_def = create_metadata(title, product, date, sldname)
                    try:
                        print json.dumps(metadata_def)
                        data_manager.publish_coveragestore(input_file, metadata_def, False, False, True)
                    except Exception, e:
                        print e
                    #manager.publish_coverage(input_file, metadata_def, False, False)
                else:
                    info = str.split(get_filename(input_file), "_")
                    date = info[4]
                    filename = get_filename(input_file).rsplit('_', 1)[0]
                    uid = workspace + get_filename(filename).lower()
                    product = burned_areas_switch(input_file)
                    title = product + " " + info[4]
                    metadata_def = create_metadata(title, product, date, sldname)
                    try:
                        print json.dumps(metadata_def)
                        data_manager.publish_coveragestore(input_file, metadata_def, False, False, True)
                    except Exception, e:
                        print e
                    #manager.publish_coverage(input_file, metadata_def, False, False)
            else: #4326
                info = str.split(get_filename(input_file), "_")
                if len(info) >= 5:
                    title = info[0] + " " + info[1] + " " + info[2] + " " + info[3] + " " + info[4] + " - 4326"
                    date = info[4]
                else:
                    title = info[0] + " " + info[1] + " " + info[2] + " " + info[3]
                    date = info[3]
                product = get_filename(input_file).replace('_', ' ') + " (4326)"
                metadata_def = create_metadata(title, product, date, sldname)
                try:
                    print json.dumps(metadata_def)
                    data_manager.publish_coveragestore(input_file, metadata_def, False, False, True)
                except Exception, e:
                    print e
Esempio n. 11
0
def process():
    if os.path.isdir(output_folder):
        log.info("already exists")
    else:
        os.mkdir(output_folder)
    output_file = output_folder + get_filename(input_file) + ".tif"
    process_layers(input_file, output_file)
    print output_file
Esempio n. 12
0
def publish_data_winter_crop_classification(input_folder):
    input_files = glob.glob(input_folder +"/*.tif")
    for input_file in input_files:
        info = str.split(get_filename(input_file), "_")
        product = "Doukkala - wheat seasonal"
        title = "Winter Crop Classification"
        sldname = "morocco_winter_crop_classification"
        date = "201401" # FAKE DATE!!!
        metadata_def = create_metadata(title, product, sldname, date, get_measurement_unit(title))
        print metadata_def
Esempio n. 13
0
def publish_data_winter_crop_classification(input_folder):
    input_files = glob.glob(input_folder + "/*.tif")
    for input_file in input_files:
        info = str.split(get_filename(input_file), "_")
        product = "Doukkala - wheat seasonal"
        title = "Winter Crop Classification"
        sldname = "morocco_winter_crop_classification"
        date = "201401"  # FAKE DATE!!!
        metadata_def = create_metadata(title, product, sldname, date,
                                       get_measurement_unit(title))
        print metadata_def
Esempio n. 14
0
def publish_data_temperature(input_folder):
    input_files = glob.glob(input_folder +"/*.tif")
    for input_file in input_files:
        info = str.split(get_filename(input_file), "_")
        #print "name %s, sld %s, date %s, area %s" % (info[0], info[1], info[2], info[3])
        title = "Temperature " + info[2] + " - " + info[3].capitalize()
        sldname = "morocco_temperature"
        date = info[2]
        product = info[3].capitalize() + " - Temperature"
        metadata_def = create_metadata(title, product, sldname, date, get_measurement_unit(title))
        print metadata_def
Esempio n. 15
0
def publish_data_temperature(input_folder):
    input_files = glob.glob(input_folder + "/*.tif")
    for input_file in input_files:
        info = str.split(get_filename(input_file), "_")
        #print "name %s, sld %s, date %s, area %s" % (info[0], info[1], info[2], info[3])
        title = "Temperature " + info[2] + " - " + info[3].capitalize()
        sldname = "morocco_temperature"
        date = info[2]
        product = info[3].capitalize() + " - Temperature"
        metadata_def = create_metadata(title, product, sldname, date,
                                       get_measurement_unit(title))
        print metadata_def
Esempio n. 16
0
def process_file(input_file, output_folder, process_layer_parameters):

    try:
        output_filename = output_folder + "/" + get_filename(input_file) + ".tif"
        print(output_filename)

        # create a geotiff + overviews
        process_layers(input_file, output_filename, process_layer_parameters)

    except Exception, e:
        print e
        pass
Esempio n. 17
0
def process_earthstat():
    if os.path.isdir(output_folder):
        log.info("already exists")
    else:
        os.mkdir(output_folder)
    dir = glob.glob(input_folder + "*")
    for d in dir:
        if os.path.isdir(d):
            input_files = glob.glob(d + "/*.tif")
            for input_file in input_files:
                output_file = output_folder + get_filename(input_file) + ".tif"
                process_layers(input_file, output_file)
                print output_file
Esempio n. 18
0
 def process(self):
     # process zip file
     path = unzip(self.xls_parser["file_path"])
     files_path = glob.glob(os.path.join(path, "*"))
     data = []
     for file_path in files_path:
         print file_path
         self.xls_parser["file_path"] = file_path
         d = {}
         d["name"], d["year"] = self.parse_filename(get_filename(file_path))
         d["data"] = self.process_xls()
         data.append(d)
     return data
Esempio n. 19
0
def publish_data_modis_landcover(input_folder):
    input_files = glob.glob(input_folder +"/*.tif")
    product = "MODIS - Land Cover Type UMD"
    sldname = "modis_land_cover"
    for input_file in input_files:
        info = str.split(get_filename(input_file), "_")
        title = info[0] + " " + info[1] + " " + info[2] + " " + info[3] + " " + info[4] + " " + info[5] + " - " + info[6]
        if "4326" in info[4]:
            title += " (4326)"
            product += " (4326)"
        date = info[6] + info[7]
        metadata_def = create_metadata(title, product, sldname, date, None)
        print metadata_def
Esempio n. 20
0
 def process(self):
     # process zip file
     path = unzip(self.xls_parser["file_path"])
     files_path = glob.glob(os.path.join(path, "*"))
     data = []
     for file_path in files_path:
         print file_path
         self.xls_parser["file_path"] = file_path
         d = {}
         d["name"], d["year"] = self.parse_filename(get_filename(file_path))
         d["data"] = self.process_xls()
         data.append(d)
     return data
Esempio n. 21
0
def process_earthstat():
    if os.path.isdir(output_folder):
        log.info("already exists")
    else:
        os.mkdir(output_folder)
    dir = glob.glob(input_folder + "*")
    for d in dir:
        if os.path.isdir(d):
            input_files = glob.glob(d + "/*.tif")
            for input_file in input_files:
                output_file = output_folder + get_filename(input_file) + ".tif"
                process_layers(input_file, output_file)
                print output_file
Esempio n. 22
0
def publish_data_Climate_Zones_processed(input_folder):
    input_files = glob.glob(input_folder + "/*.tif")
    product = "JRC climate zone"
    sldname = "ghg_jrc_climate_zone_0.25deg"
    for input_file in input_files:
        info = str.split(get_filename(input_file), "_")
        title = info[0] + " " + info[1].lower() + " - " + info[2].lower()
        if "4326" in info[4]:
            title += " (4326)"
            product += " (4326)"
        date = None
        metadata_def = create_metadata(title, product, sldname, date, None)
        print metadata_def
Esempio n. 23
0
def publish_data_Climate_Zones_processed(input_folder):
    input_files = glob.glob(input_folder +"/*.tif")
    product = "JRC climate zone"
    sldname = "ghg_jrc_climate_zone_0.25deg"
    for input_file in input_files:
        info = str.split(get_filename(input_file), "_")
        title = info[0] + " " + info[1].lower() + " - " + info[2].lower()
        if "4326" in info[4]:
            title += " (4326)"
            product += " (4326)"
        date = None
        metadata_def = create_metadata(title, product, sldname, date, None)
        print metadata_def
Esempio n. 24
0
def publish_layers():
    print "Publish Layers"
    files = glob.glob(output_folder + "/*.tif")
    print files
    for f in files:
        print f
        # read filename
        name = get_filename(f)

        # get commodity name
        title = name.replace("_", " ").replace("1", "").capitalize()

        print title

        # create metadata
        creationDate = calendar.timegm(datetime.datetime.now().timetuple())
        metadata_def = {}
        metadata_def["title"] = {}
        metadata_def["title"]["EN"] = title
        metadata_def["creationDate"] = creationDate
        metadata_def["meContent"] = {}
        metadata_def["meContent"]["seCoverage"] = {}
        metadata_def["meContent"]["seCoverage"]["coverageTime"] = {}
        # TODO: in theory 1970-2010?
        # metadata_def["meContent"]["seCoverage"]["coverageTime"]["from"] = "1970"
        # metadata_def["meContent"]["seCoverage"]["coverageTime"]["to"] = "2010"
        metadata_def["meContent"]["seCoverage"]["coverageSector"] = {}
        metadata_def["meContent"]["seCoverage"]["coverageSector"][
            "codeList"] = "Products"
        metadata_def["meContent"]["seCoverage"]["coverageSector"]["codes"] = [{
            "code":
            "EARTHSTAT"
        }]
        metadata_def["meContent"]["seCoverage"]["coverageSector"]["codes"] = [{
            "code":
            "EARTHSTAT"
        }]
        metadata_def["meSpatialRepresentation"] = {}
        metadata_def["meSpatialRepresentation"]["seDefaultStyle"] = {}
        metadata_def["meSpatialRepresentation"]["seDefaultStyle"][
            "name"] = "earthstat_area"

        # merging metata to raster metadata
        metadata_def = merge_layer_metadata("raster", metadata_def)

        print metadata_def

        # get type
        if "area" in title:
            print title
            manager.publish_coverage(f, metadata_def)
Esempio n. 25
0
def publish_data_modis_landcover(input_folder):
    input_files = glob.glob(input_folder + "/*.tif")
    product = "MODIS - Land Cover Type UMD"
    sldname = "modis_land_cover"
    for input_file in input_files:
        info = str.split(get_filename(input_file), "_")
        title = info[0] + " " + info[1] + " " + info[2] + " " + info[
            3] + " " + info[4] + " " + info[5] + " - " + info[6]
        if "4326" in info[4]:
            title += " (4326)"
            product += " (4326)"
        date = info[6] + info[7]
        metadata_def = create_metadata(title, product, sldname, date, None)
        print metadata_def
Esempio n. 26
0
def publish_data(input_folder):
    input_files = glob.glob(input_folder + "/*.tif")
    for input_file in input_files:
        info = str.split(get_filename(input_file), "_")
        #print "name %s, sld %s, date %s, area %s" % (info[0], info[1], info[2], info[3])
        if "irrigated" not in info[0]:
            pass
            # metadata_def = create_metadata(info[0], info[1], info[2], info[3])
            # manager.publish_coverage(input_file, metadata_def)
        else:
            name = info[0] + " " + info[1] + " " + info[2]
            sldname = info[0] + "_" + info[1] + "_" + info[2]
            metadata_def = create_metadata(name, sldname, "201410", info[3])
            manager.publish_coverage(input_file, metadata_def)
Esempio n. 27
0
def publish_data_timeseries(input_folder):
    input_files = glob.glob(input_folder +"/*.tif")

    for input_file in input_files:
        info = str.split(get_filename(input_file), "_")
        if "ndvi" in input_file:
            #print "name %s, sld %s, date %s, area %s" % (info[0], info[1], info[2], info[3])
            title = info[0].upper() + " " + info[1] + " - " + info[2].capitalize()
            product = info[2].capitalize() + " - " + info[0].upper()
        if "precipitation" in input_file:
            title = info[0].capitalize() + " " + info[1] + " - " + info[2].capitalize()
            product = info[2].capitalize() + " - " + info[0].capitalize()

        sldname = "morocco_" + info[0]
        date = info[1]
        metadata_def = create_metadata(title, product, sldname, date,  get_measurement_unit(title))
        print metadata_def
Esempio n. 28
0
def publish_data_wheat_seasonal_irrigated(input_folder):
    input_files = glob.glob(input_folder +"/*.tif")
    for input_file in input_files:
        info = str.split(get_filename(input_file), "_")
        product = "Doukkala - wheat seasonal"
        sldname = "morocco_"
        date = "201401" # FAKE DATE!!!
        if "water" in input_file:
            title = info[0] + " " + info[2] + " " + info[3]
            sldname += "water_productivity"
        if "yieldgap" in input_file:
            title = info[0] + " " + info[2]
            sldname += "yieldgap"
        if "yield" in input_file:
            title = info[0] + " " + info[2] + " " + info[3]
            sldname += "yield"
        metadata_def = create_metadata(title, product, sldname, date, get_measurement_unit(title))
        print metadata_def
Esempio n. 29
0
def publish_data_wheat_seasonal_irrigated(input_folder):
    input_files = glob.glob(input_folder + "/*.tif")
    for input_file in input_files:
        info = str.split(get_filename(input_file), "_")
        product = "Doukkala - wheat seasonal"
        sldname = "morocco_"
        date = "201401"  # FAKE DATE!!!
        if "water" in input_file:
            title = info[0] + " " + info[2] + " " + info[3]
            sldname += "water_productivity"
        if "yieldgap" in input_file:
            title = info[0] + " " + info[2]
            sldname += "yieldgap"
        if "yield" in input_file:
            title = info[0] + " " + info[2] + " " + info[3]
            sldname += "yield"
        metadata_def = create_metadata(title, product, sldname, date,
                                       get_measurement_unit(title))
        print metadata_def
Esempio n. 30
0
def publish_layers():
    print "Publish Layers"
    files = glob.glob(output_folder + "/*.tif")
    print files
    for f in files:
        print f
        # read filename
        name = get_filename(f)

        # get commodity name
        title = name.replace("_", " ").replace("1", "").capitalize()

        print title

        # create metadata
        creationDate =  calendar.timegm(datetime.datetime.now().timetuple())
        metadata_def = {}
        metadata_def["title"] = {}
        metadata_def["title"]["EN"] = title
        metadata_def["creationDate"] = creationDate
        metadata_def["meContent"] = {}
        metadata_def["meContent"]["seCoverage"] = {}
        metadata_def["meContent"]["seCoverage"]["coverageTime"] = {}
        # TODO: in theory 1970-2010?
        # metadata_def["meContent"]["seCoverage"]["coverageTime"]["from"] = "1970"
        # metadata_def["meContent"]["seCoverage"]["coverageTime"]["to"] = "2010"
        metadata_def["meContent"]["seCoverage"]["coverageSector"] = {}
        metadata_def["meContent"]["seCoverage"]["coverageSector"]["codeList"] = "Products"
        metadata_def["meContent"]["seCoverage"]["coverageSector"]["codes"] = [{"code" : "EARTHSTAT"}]
        metadata_def["meContent"]["seCoverage"]["coverageSector"]["codes"] = [{"code" : "EARTHSTAT"}]
        metadata_def["meSpatialRepresentation"] = {}
        metadata_def["meSpatialRepresentation"]["seDefaultStyle"] = {}
        metadata_def["meSpatialRepresentation"]["seDefaultStyle"]["name"] = "earthstat_area"

        # merging metata to raster metadata
        metadata_def = merge_layer_metadata("raster", metadata_def)

        print metadata_def

        # get type
        if "area" in title:
            print title
            manager.publish_coverage(f, metadata_def)
Esempio n. 31
0
def publish_data_Climate_Zones_processed(input_folder):
    input_files = glob.glob(input_folder +"/*.tif")
    product = "JRC climate zone"
    sldname = "ghg_jrc_climate_zone_0.25deg" + "_EN"
    for input_file in input_files:
        info = str.split(get_filename(input_file), "_")
        title = info[0] + " " + info[1].lower() + " - " + info[2].lower()
        map_projection_code = "EPSG:3857"
        if "4326" in info[4]:
            title += " (4326)"
            product += " (4326)"
            map_projection_code = "EPSG:4326"
        date = "2010"
        metadata_def = create_metadata(title, product, date, sldname, map_projection_code)
        try:
            print json.dumps(metadata_def)
            data_manager.publish_coveragestore(input_file, metadata_def, False, False, True)
        except Exception, e:
            print e
Esempio n. 32
0
def publish_data_modis_landcover(input_folder):
    input_files = glob.glob(input_folder +"/*.tif")
    product = "MODIS - Land Cover Type UMD"
    sldname = "modis_land_cover" + "_EN"
    for input_file in input_files:
        info = str.split(get_filename(input_file), "_")
        title = info[0] + " " + info[1] + " " + info[2] + " " + info[3] + " " + info[4] + " " + info[5] + " - " + info[6]
        map_projection_code = "EPSG:3857"
        if "4326" in info[4]:
            title += " (4326)"
            product += " (4326)"
            map_projection_code = "EPSG:4326"
        date = info[6]
        metadata_def = create_metadata(title, product, date, sldname, map_projection_code)
        try:
            print json.dumps(metadata_def)
            data_manager.publish_coveragestore(input_file, metadata_def, False, False, True)
        except Exception, e:
            print e
Esempio n. 33
0
def publish_data_meteo_evapotranspiration(input_folder):
    input_files = glob.glob(input_folder +"/*.tif")
    for input_file in input_files:

        info = str.split(get_filename(input_file), "_")
        if "actual" in input_file:
            type = "actual"
        if "potential" in input_file:
            type = "potential"
        if "ETref" in input_file:
            type = "reference"


        #print "name %s, sld %s, date %s, area %s" % (info[0], info[1], info[2], info[3])
        title = type.capitalize() + " evapotransipiration " + " " + info[1] + " - " + info[2].capitalize()
        sldname = "morocco_evapotransipiration"
        date = info[1]
        product = info[2].capitalize() + " - " + type + " evapotransipiration"
        metadata_def = create_metadata(title, product, sldname, date, get_measurement_unit(title))
Esempio n. 34
0
def process(input_folder, output_folder, mask_file):

    # create the folder
    if os.path.isdir(output_folder):
        shutil.rmtree(output_folder)
    os.mkdir(output_folder)

    try:
        input_files = glob.glob(input_folder +"/*.tif")
        for input_file in input_files:
            output_filename = output_folder + "/wheat_" + get_filename(input_file) + ".tif"
            #gdal_calc.py -A wheat_seasonal_act_biomprod_doukkala_mask.tif -B asd --outfile=processed.tif --calc="B*A" --NoDataValue=0
            cmd = 'gdal_calc.py -A ' + input_file + ' -B ' + mask_file + ' --outfile=' + output_filename + ' --calc="B*A" --NoDataValue=0'
            process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
            output, error = process.communicate()

    except Exception, e:
        print e
        pass
Esempio n. 35
0
def harvest_folder(path):
    """
    Harvest files in a path
    :param path:
    :return:
    """
    manager = Manager(settings)
    types = ('*.tiff', '*.geotiff', "*.gtiff", "*.tif")
    # for each tiff, geotiff, tif, gtiff
    files_grabbed = []
    for file_type in types:
        files_grabbed.extend(glob.glob(os.path.join(path, file_type)))

    for file_path in files_grabbed:

        path, filename, name = filesystem.get_filename(file_path, True)
        #log.info("%s %s %s " % (path, filename, name))
        default_workspace = manager.geoserver.get_default_workspace()
        name = sanitize_name(name)
        if manager.geoserver.check_if_layer_exist(name, default_workspace) is False:
            metadata_file = os.path.join(path, name + ".json")
            log.info("Check Metadata File: %s" % metadata_file)
            if os.path.isfile(metadata_file):
                metadata_json = json.loads(open(metadata_file).read())
                log.info(metadata_json)
            else:
                log.warn("Metadata '%s' doesn't exists " % metadata_file)
                log.warn("Setting the default metadata file for '%s' " % name)
                # TODO: create a default metadata if the file doesn't exists it doesn't exists
                metadata = deepcopy(default_metadata_json)

            log.info("Prossessing the raster '%s' " % (name))
            metadata = merge_layer_metadata(raster_template, metadata_json)

            # process metadata with the default workspace if uid is not set in the metadata
            if "uid" not in metadata:
                metadata["uid"] = default_workspace + ":" + name
            manager.publish_coverage(file_path, metadata)

            log.info("Raster published '%s' " % (name))
        else:
            log.warn("Coverage '%s' already exists " % name)
Esempio n. 36
0
def publish_data_timeseries(input_folder):
    input_files = glob.glob(input_folder + "/*.tif")

    for input_file in input_files:
        info = str.split(get_filename(input_file), "_")
        if "ndvi" in input_file:
            #print "name %s, sld %s, date %s, area %s" % (info[0], info[1], info[2], info[3])
            title = info[0].upper(
            ) + " " + info[1] + " - " + info[2].capitalize()
            product = info[2].capitalize() + " - " + info[0].upper()
        if "precipitation" in input_file:
            title = info[0].capitalize(
            ) + " " + info[1] + " - " + info[2].capitalize()
            product = info[2].capitalize() + " - " + info[0].capitalize()

        sldname = "morocco_" + info[0]
        date = info[1]
        metadata_def = create_metadata(title, product, sldname, date,
                                       get_measurement_unit(title))
        print metadata_def
Esempio n. 37
0
def publish_data_meteo_evapotranspiration(input_folder):
    input_files = glob.glob(input_folder + "/*.tif")
    for input_file in input_files:

        info = str.split(get_filename(input_file), "_")
        if "actual" in input_file:
            type = "actual"
        if "potential" in input_file:
            type = "potential"
        if "ETref" in input_file:
            type = "reference"

        #print "name %s, sld %s, date %s, area %s" % (info[0], info[1], info[2], info[3])
        title = type.capitalize() + " evapotransipiration " + " " + info[
            1] + " - " + info[2].capitalize()
        sldname = "morocco_evapotransipiration"
        date = info[1]
        product = info[2].capitalize() + " - " + type + " evapotransipiration"
        metadata_def = create_metadata(title, product, sldname, date,
                                       get_measurement_unit(title))
def process_landcover():
    path = "/home/vortex/Desktop/LAYERS/GHG_13_NOVEMEBRE/GFED4_BURNEDAREAS_BY_LANDCOVER/"
    input_dir = glob.glob(path + "*")
    print input_dir
    for d in input_dir:
        input_files = glob.glob(d + "/*.tif")
        for input_file in input_files:
            print d
            print input_file
            filename = get_filename(input_file)
            source_path = [input_file]
            print filename
            output_path = d + "/"
            output_file_name = filename + "_3857"
            print output_path
            for obj in objs:
                obj["source_path"] = source_path
                obj["output_path"] = output_path
                obj["output_file_name"] = output_file_name
                print obj
                source_path = process_step(obj)
Esempio n. 39
0
def process(input_folder, output_folder, process_layer_parameters):
    print "Processing data %s %s %s ", input_folder, output_folder, process_layer_parameters

    if os.path.isdir(output_folder):
        shutil.rmtree(output_folder)
    os.makedirs(output_folder)

    try:
        input_files = glob.glob(input_folder +"/*.tif")
        for input_file in input_files:
            print input_file

            output_filename = output_folder + "/" + get_filename(input_file) + ".tif"
            print(output_filename)

            # create a geotiff + overviews
            process_layers(input_file, output_filename, process_layer_parameters)

    except Exception, e:
        print e
        pass
Esempio n. 40
0
def process_landcover():
    path = "/home/vortex/Desktop/LAYERS/GHG_13_NOVEMEBRE/GFED4_BURNEDAREAS_BY_LANDCOVER/"
    input_dir = glob.glob(path + "*")
    print input_dir
    for d in input_dir:
        input_files = glob.glob(d + "/*.tif")
        for input_file in input_files:
            print d
            print input_file
            filename = get_filename(input_file)
            source_path = [input_file]
            print filename
            output_path = d + "/"
            output_file_name = filename + "_3857"
            print output_path
            for obj in objs:
                obj["source_path"] = source_path
                obj["output_path"] = output_path
                obj["output_file_name"] = output_file_name
                print obj
                source_path = process_step(obj)
Esempio n. 41
0
def publish_data_wheat_seasonal(input_folder):
    input_files = glob.glob(input_folder + "/*.tif")
    for input_file in input_files:
        info = str.split(get_filename(input_file), "_")
        if "wheat_productivity" not in input_file and "yieldgap" not in input_file:
            #print "name %s, sld %s, date %s, area %s" % (info[0], info[1], info[2], info[3])
            #title = info[4].capitalize() + " - " + info[0] + " " + info[1] + " " + info[2] + " - " + info[3]
            # title = info[4].capitalize() + " - " + info[0] + " " + info[1] + " " + info[2] + " - " + info[3]
            title = info[2] + " - " + info[3]
            sldname = "morocco_" + info[2]
            date = "201401"  # FAKE DATE!!!
            product = info[4].capitalize() + " - " + info[0] + " " + info[1]
            metadata_def = create_metadata(title, product, sldname, date,
                                           get_measurement_unit(title))
            #print metadata_def
            #manager.publish_coverage(input_file, metadata_def)
        if "water" in input_file:
            title = info[2] + " " + info[3]
            sldname = "morocco_" + info[2] + "_" + info[3]
            date = "201401"  # FAKE DATE!!!
            product = info[4].capitalize() + " - " + info[0] + " " + info[1]
            metadata_def = create_metadata(title, product, sldname, date,
                                           get_measurement_unit(title))
            #print metadata_def
        if "yieldgap" in input_file:
            title = info[2]
            sldname = "morocco_" + info[2]
            date = "201401"  # FAKE DATE!!!
            product = info[3].capitalize() + " - " + info[0] + " " + info[1]
            metadata_def = create_metadata(title, product, sldname, date,
                                           get_measurement_unit(title))
        if "evapotransipiration" in input_file:
            title = info[2] + " - " + info[3]
            sldname = "morocco_evapotransipiration_wheat"
            date = "201401"  # FAKE DATE!!!
            product = info[4].capitalize() + " - " + info[0] + " " + info[1]
            metadata_def = create_metadata(title, product, sldname, date,
                                           get_measurement_unit(title))
        # publishing
        print metadata_def
Esempio n. 42
0
def process(input_folder, output_folder):

    # create the folder
    if os.path.isdir(output_folder):
        shutil.rmtree(output_folder)
    os.mkdir(output_folder)

    try:
        input_files = glob.glob(input_folder +"/*.tif")
        for input_file in input_files:
            output_filename = output_folder + "/" + get_filename(input_file) + ".tif"
            #cmd = 'gdalwarp -srcnodata 0 -dstnodata -3000 -te -1010580.0921138877747580 3810893.3240307914093137 -894408.7707638647407293 3894810.6912959185428917 ' + input_file + ' ' + output_filename
            cmd = 'gdalwarp -srcnodata -3000 -dstnodata -3000 -te -1010580.0921138877747580 3810893.3240307914093137 -894408.7707638647407293 3894810.6912959185428917 ' + input_file + ' ' + output_filename
            print cmd
            process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
            output, error = process.communicate()
            print output
            print error

    except Exception, e:
        print e
        pass
Esempio n. 43
0
def calc():

    # take folders
    folders = glob.glob(input_folder)
    for folder in folders:
        print input_folder
        # create output folder /output
        # covert to geotiff 3857 the file
        output_folder = folder + "/output"
        if os.path.isdir(output_folder):
            remove(output_folder + "/*.tif")
        else:
            os.mkdir(output_folder)


        input_files = glob.glob(folder +"/*.tif")
        for input_file in input_files:

            output_filename = output_folder + "/" + get_filename(input_file) + ".tif"
            print(output_filename)
            # create a geotiff + overviews
            process_layers(input_file, output_filename)
Esempio n. 44
0
def calc_trmm():

    # take folders
    folders = glob.glob(input_folder)
    for folder in folders:
        print input_folder
        # create output folder /output
        # covert to geotiff 3857 the file
        output_folder = folder + "/output"
        if os.path.isdir(output_folder):
            remove(output_folder + "/*.tif")
        else:
            os.mkdir(output_folder)

        input_files = glob.glob(folder + "/*.tif")
        for input_file in input_files:

            output_filename = output_folder + "/" + get_filename(
                input_file) + ".tif"
            print(output_filename)
            # create a geotiff + overviews
            process_layers(input_file, output_filename)
def process(input_folder, output_folder, srcproj, dstproj):
    print "Processing data %s %s %s ", input_folder, output_folder

    ext = "shp"

    if os.path.isdir(output_folder):
        shutil.rmtree(output_folder)
    os.makedirs(output_folder)

    try:
        input_files = glob.glob(input_folder + "/*." + ext)
        for input_file in input_files:
            output_filename = output_folder + "/" + get_filename(input_file) + "." + ext
            cmd = "ogr2ogr -f 'ESRI Shapefile' -s_srs " + srcproj + " -t_srs " + dstproj + " " + output_filename + " " + input_file
            print cmd
            process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
            output, error = process.communicate()
            print output
            print error

    except Exception, e:
        print e
        pass
Esempio n. 46
0
def publish_data_wheat_seasonal(input_folder):
    input_files = glob.glob(input_folder +"/*.tif")
    for input_file in input_files:
        info = str.split(get_filename(input_file), "_")
        if "wheat_productivity" not in input_file and "yieldgap" not in input_file:
            #print "name %s, sld %s, date %s, area %s" % (info[0], info[1], info[2], info[3])
            #title = info[4].capitalize() + " - " + info[0] + " " + info[1] + " " + info[2] + " - " + info[3]
            # title = info[4].capitalize() + " - " + info[0] + " " + info[1] + " " + info[2] + " - " + info[3]
            title = info[2] + " - " + info[3]
            sldname = "morocco_" + info[2]
            date = "201401" # FAKE DATE!!!
            product = info[4].capitalize() + " - " + info[0] + " " + info[1]
            metadata_def = create_metadata(title, product, sldname, date, get_measurement_unit(title))
            #print metadata_def
            #manager.publish_coverage(input_file, metadata_def)
        if "water" in input_file:
            title =  info[2] + " " + info[3]
            sldname = "morocco_" + info[2] + "_" + info[3]
            date = "201401" # FAKE DATE!!!
            product = info[4].capitalize() + " - " + info[0] + " " + info[1]
            metadata_def = create_metadata(title, product, sldname, date, get_measurement_unit(title))
            #print metadata_def
        if "yieldgap" in input_file:
            title = info[2]
            sldname = "morocco_" + info[2]
            date = "201401" # FAKE DATE!!!
            product = info[3].capitalize() + " - " + info[0] + " " + info[1]
            metadata_def = create_metadata(title, product, sldname, date, get_measurement_unit(title))
        if "evapotransipiration" in input_file:
            title = info[2] + " - " + info[3]
            sldname = "morocco_evapotransipiration_wheat"
            date = "201401" # FAKE DATE!!!
            product = info[4].capitalize() + " - " + info[0] + " " + info[1]
            metadata_def = create_metadata(title, product, sldname, date, get_measurement_unit(title))
        # publishing
        print metadata_def
Esempio n. 47
0
def publish_layers():
    files = glob.glob(output_folder + "output/*.tif")

    for f in files:
        log.info("--------------------------------")

        name = get_filename(f)

        log.info("name %s %s" %(name, f))

        month = int(name[5:7])

        log.info("month %s" % month)
        year = None
        try:
            year = int(name[8:12])
        except Exception, e:
            log.error("year error")

        # TODO: what year for the DA?
        if year is None:
            year = "2014"

        # get title name
        title = name.replace("_", " ").capitalize()

        # get metadata
        from_date, to_date = get_range_dates_metadata(month, year)
        creationDate = calendar.timegm(datetime.datetime.now().timetuple())

        # Sample of Metadata json
        log.info("Creating metadata")
        metadata_def = {}
        metadata_def["title"] = {}
        metadata_def["title"]["EN"] = title
        metadata_def["creationDate"] = creationDate
        metadata_def["meContent"] = {}
        metadata_def["meContent"]["seCoverage"] = {}
        metadata_def["meContent"]["seCoverage"]["coverageTime"] = {}
        metadata_def["meContent"]["seCoverage"]["coverageTime"]["from"] = from_date
        metadata_def["meContent"]["seCoverage"]["coverageTime"]["to"] = to_date
        metadata_def["meContent"]["seCoverage"]["coverageSector"] = {}
        metadata_def["meContent"]["seCoverage"]["coverageSector"]["codeList"] = "Products"
        metadata_def["meContent"]["seCoverage"]["coverageSector"]["codes"] = [{"code" : "TRMM"}]
        metadata_def["meContent"]["seCoverage"]["coverageSector"]["codes"] = [{"code" : "TRMM"}]

        # TODO: in theory should be the original file the onlineResource
        metadata_def["meAccessibility"] = {}
        metadata_def["meAccessibility"]["seDistribution"] = {}
        metadata_def["meAccessibility"]["seDistribution"]["onlineResource"] = f

        # TODO: added new field for the original resource (should we have two different metadata?)
        #metadata_def["meAccessibility"]["seDistribution"]["originalResource"] = output_filename

        # adding type of layer
        aggregationProcessing = "none"
        if "_avg" in name:
            aggregationProcessing = "avg"
        elif "_da" in name:
            aggregationProcessing = "da"
        metadata_def["meStatisticalProcessing"] = {}
        metadata_def["meStatisticalProcessing"]["seDatasource"] = {}
        metadata_def["meStatisticalProcessing"]["seDatasource"]["seDataCompilation"] = {}
        metadata_def["meStatisticalProcessing"]["seDatasource"]["seDataCompilation"]["aggregationProcessing"] = aggregationProcessing;

        # default style
        metadata_def["meSpatialRepresentation"] = {}
        metadata_def["meSpatialRepresentation"]["seDefaultStyle"] = {}
        if aggregationProcessing == "da":
            metadata_def["meSpatialRepresentation"]["seDefaultStyle"]["name"] = "rainfall_" + aggregationProcessing
        else:
            metadata_def["meSpatialRepresentation"]["seDefaultStyle"]["name"] = "rainfall"


        # merging metadata to the base raster one
        metadata_def = merge_layer_metadata("raster", metadata_def)

        # "seCoverage" : {
        #     "coverageTime" : {
        #         "from" : 1328405808080,
        #         "to": 1328405808080
        #     },
        #     "coverageGeographic" : {
        #         "codeList" : "...",
        #         "version" : "...",
        #         "codes" : [{"code" : "world"}]
        #     },
        #     "coverageSector" : {
        #         "codeList" : "...",
        #         "version" : "...",
        #         "codes" : [{"code" : "MODISQ1"}]
        #     }
        # }]

        log.info(metadata_def)

        # publish layer
        print manager.publish_coverage(f, metadata_def)
Esempio n. 48
0
    def publish_coveragestore(self, data, overwrite=False):

        name = data["name"]
        workspace = self.get_default_workspace() if "workspace" not in data else data["workspace"]
        path = data["path"]

        if workspace is not self.get_default_workspace():
            if self.check_if_workspace_exist(workspace) is False:
                raise PGeoException(errors[523]+": %s" % workspace)

        if not overwrite:
            if self.check_if_layer_exist(name, workspace):
                raise PGeoException(errors[520]+": %s" % name)

        # default geotiff headers and extension
        headers = get_headers("tiff")
        ext = "geotiff"

        # check if the layer is a tfw (a zipfile)
        #TODO: make it work with tif+tfw
        path_folder_tif, filename_tif, name_tif  = get_filename(path, True)
        file_tfw = os.path.join(path_folder_tif, name_tif + ".tfw")
        if os.path.isfile(file_tfw):
            # handle 'tfw' (worldimage)
            bundle = [path, file_tfw]
            log.info(bundle)
            archive = zip_files(name, bundle, path_folder_tif)
            log.info(archive)
            #message = open(archive, 'rb')
            message = archive
            headers = get_headers("zip")
            ext = "worldimage"
        elif isinstance(path, basestring):
            message = open(path, 'rb')
        else:
            message = path

        log.info(message)

        try:
            # Add layer to coveragestore
            cs_url = url(self.service_url, ["workspaces", workspace, "coveragestores", name, "file." + ext])
            self._publish_layer(cs_url, "PUT", message, headers, 201)

            #  Update metadata of the layer
            headers = get_headers("json")
            json_data = deepcopy(data)
            del json_data['name']

            # # TODO: REMOVE IT!!!!!!
            # json_data['nativeCRS'] = 'PROJCS["World_Mollweide",GEOGCS["GCS_WGS_1984",DATUM["WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295]],PROJECTION["Mollweide"],PARAMETER["False_Easting",0],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",0],UNIT["Meter",1],AUTHORITY["EPSG","54009"]]'
            # json_data['srs'] = "EPSG:54009"

            # set has default enabled the layer
            if "enabled" not in json_data:
                json_data["enabled"] = True
            # json to send to geoserver
            update_layer = {
                "coverage" : json_data
            }
            cs_url = url(self.service_url, ["workspaces", workspace, "coveragestores", name, "coverages", name  + ".json"])
            self._publish_layer(cs_url, "PUT", json.dumps(update_layer), headers, 200)

            # TODO: check why doesn't update the default style
            if 'defaultStyle' in json_data:
                if 'name' in json_data['defaultStyle']:
                    self.set_default_style(name, json_data['defaultStyle']['name'])
        except PGeoException, e:
            log.error(e.get_message())
            raise PGeoException(e.get_message(), e.get_status_code())
Esempio n. 49
0
def publish():
    folders = glob.glob(input_folder)

    for folder in folders:
        files = glob.glob(folder +"/output/*.tif")

        for f in files:
            log.info("--------------------------------")
            log.info(f)
            if "avg" in f:
                print "avg"

                name = get_filename(f)
                to_day = name[name.index("_")+1:name.rindex("_")]
                year = 1990
                print year
                from_day = int(to_day) - 15
                print from_day

                from_date = calendar.timegm(day_of_the_year_to_date(to_day, year).timetuple())
                to_date = calendar.timegm(day_of_the_year_to_date(from_day, year).timetuple())

                #title = name.replace("", " ").upper()

                title = "TRMM SADC Avg - " + to_day

            else:
                log.info("--------------------------------")
                log.info(f)

                name = get_filename(f)

                d = name[name.index("_")+1:name.rindex("_")]
                year = d[0:4]
                to_day = name[name.rindex("_")+1:]
                print d
                print year
                print to_day
                from_day = int(to_day) - 15
                print from_day

                fromdate = day_of_the_year_to_date(to_day, year)
                todate = day_of_the_year_to_date(from_day, year)

                from_date = calendar.timegm(fromdate.timetuple())
                to_date = calendar.timegm(todate.timetuple())

                print from_date
                print to_date

                # get title name
                #title = name.replace("_", " ").capitalize()
                if "Anomaly" in f:
                    title = "TRMM SADC Anomaly - " + str(fromdate)
                else:
                    title = "TRMM SADC - " + str(fromdate)



            # get metadata
            creationDate = calendar.timegm(datetime.datetime.now().timetuple())

            # Sample of Metadata json
            log.info("Creating metadata")
            metadata_def = {}
            metadata_def["title"] = {}
            metadata_def["title"]["EN"] = title
            metadata_def["creationDate"] = creationDate
            metadata_def["meContent"] = {}
            metadata_def["meContent"]["seCoverage"] = {}
            metadata_def["meContent"]["seCoverage"]["coverageTime"] = {}
            metadata_def["meContent"]["seCoverage"]["coverageTime"]["from"] = from_date
            metadata_def["meContent"]["seCoverage"]["coverageTime"]["to"] = to_date
            metadata_def["meContent"]["seCoverage"]["coverageSector"] = {}
            metadata_def["meContent"]["seCoverage"]["coverageSector"]["codeList"] = "Products"
            metadata_def["meContent"]["seCoverage"]["coverageSector"]["codes"] = [{"code" : "TRMM-SADC"}]
            metadata_def["meContent"]["seCoverage"]["coverageSector"]["codes"] = [{"code" : "TRMM-SADC"}]

            # TODO: in theory should be the original file the onlineResource
            metadata_def["meAccessibility"] = {}
            metadata_def["meAccessibility"]["seDistribution"] = {}
            metadata_def["meAccessibility"]["seDistribution"]["onlineResource"] = f

            # TODO: added new field for the original resource (should we have two different metadata?)
            #metadata_def["meAccessibility"]["seDistribution"]["originalResource"] = output_filename

            # adding type of layer
            aggregationProcessing = "none"
            if "3B42a_" in f:
                aggregationProcessing = "anomaly"
            elif "avg" in f:
                aggregationProcessing = "avg"
            metadata_def["meStatisticalProcessing"] = {}
            metadata_def["meStatisticalProcessing"]["seDatasource"] = {}
            metadata_def["meStatisticalProcessing"]["seDatasource"]["seDataCompilation"] = {}
            metadata_def["meStatisticalProcessing"]["seDatasource"]["seDataCompilation"]["aggregationProcessing"] = aggregationProcessing;

            # default style
            metadata_def["meSpatialRepresentation"] = {}
            metadata_def["meSpatialRepresentation"]["seDefaultStyle"] = {}
            if aggregationProcessing == "anomaly":
                metadata_def["meSpatialRepresentation"]["seDefaultStyle"]["name"] = "rainfall_sadc_" + aggregationProcessing
            else:
                metadata_def["meSpatialRepresentation"]["seDefaultStyle"]["name"] = "rainfall_sadc"


            # merging metadata to the base raster one
            metadata_def = merge_layer_metadata("raster", metadata_def)

            # "seCoverage" : {
            #     "coverageTime" : {
            #         "from" : 1328405808080,
            #         "to": 1328405808080
            #     },
            #     "coverageGeographic" : {
            #         "codeList" : "...",
            #         "version" : "...",
            #         "codes" : [{"code" : "world"}]
            #     },
            #     "coverageSector" : {
            #         "codeList" : "...",
            #         "version" : "...",
            #         "codes" : [{"code" : "MODISQ1"}]
            #     }
            # }]

            log.info(metadata_def)

            # publish layer
            print manager.publish_coverage(f, metadata_def)
Esempio n. 50
0
def publish():
    folders = glob.glob(input_folder)

    for folder in folders:
        files = glob.glob(folder + "/output/*.tif")

        for f in files:
            if "Average" in f:
                print "Average"
                log.info("--------------------------------")
                log.info(f)

                name = get_filename(f)
                to_day = name[name.index("_") + 1:name.rindex("_")]
                year = 1990
                print d
                print year
                from_day = int(to_day) - 15
                print from_day

                from_date = calendar.timegm(
                    day_of_the_year_to_date(to_day, year).timetuple())
                to_date = calendar.timegm(
                    day_of_the_year_to_date(from_day, year).timetuple())

                #title = name.replace("", " ").upper()

                title = "NDVI SADC Avg - " + to_day

            else:
                log.info("--------------------------------")
                log.info(f)

                name = get_filename(f)

                d = name[name.index("_") + 1:name.rindex("_")]
                year = d[0:4]
                to_day = d[4:]
                print d
                print year
                print to_day
                from_day = int(to_day) - 15
                print from_day

                fromdate = day_of_the_year_to_date(to_day, year)
                todate = day_of_the_year_to_date(from_day, year)

                from_date = calendar.timegm(fromdate.timetuple())
                to_date = calendar.timegm(todate.timetuple())

                print from_date
                print to_date

                # get title name
                #title = name.replace("_", " ").capitalize()
                if "Anomaly" in f:
                    title = "NDVI SADC Anomaly - " + str(fromdate)
                else:
                    title = "NDVI SADC - " + str(fromdate)

            # get metadata
            creationDate = calendar.timegm(datetime.datetime.now().timetuple())

            # Sample of Metadata json
            log.info("Creating metadata")
            metadata_def = {}
            metadata_def["title"] = {}
            metadata_def["title"]["EN"] = title
            metadata_def["creationDate"] = creationDate
            metadata_def["meContent"] = {}
            metadata_def["meContent"]["seCoverage"] = {}
            metadata_def["meContent"]["seCoverage"]["coverageTime"] = {}
            metadata_def["meContent"]["seCoverage"]["coverageTime"][
                "from"] = from_date
            metadata_def["meContent"]["seCoverage"]["coverageTime"][
                "to"] = to_date
            metadata_def["meContent"]["seCoverage"]["coverageSector"] = {}
            metadata_def["meContent"]["seCoverage"]["coverageSector"][
                "codeList"] = "Products"
            metadata_def["meContent"]["seCoverage"]["coverageSector"][
                "codes"] = [{
                    "code": "MODIS-NDVI-SADC"
                }]
            metadata_def["meContent"]["seCoverage"]["coverageSector"][
                "codes"] = [{
                    "code": "MODIS-NDVI-SADC"
                }]

            # TODO: in theory should be the original file the onlineResource
            metadata_def["meAccessibility"] = {}
            metadata_def["meAccessibility"]["seDistribution"] = {}
            metadata_def["meAccessibility"]["seDistribution"][
                "onlineResource"] = f

            # TODO: added new field for the original resource (should we have two different metadata?)
            #metadata_def["meAccessibility"]["seDistribution"]["originalResource"] = output_filename

            # adding type of layer
            aggregationProcessing = "none"
            if "Anomaly" in f:
                aggregationProcessing = "anomaly"
            elif "Average" in f:
                aggregationProcessing = "avg"
            metadata_def["meStatisticalProcessing"] = {}
            metadata_def["meStatisticalProcessing"]["seDatasource"] = {}
            metadata_def["meStatisticalProcessing"]["seDatasource"][
                "seDataCompilation"] = {}
            metadata_def["meStatisticalProcessing"]["seDatasource"][
                "seDataCompilation"][
                    "aggregationProcessing"] = aggregationProcessing

            # default style
            metadata_def["meSpatialRepresentation"] = {}
            metadata_def["meSpatialRepresentation"]["seDefaultStyle"] = {}
            if aggregationProcessing == "anomaly":
                metadata_def["meSpatialRepresentation"]["seDefaultStyle"][
                    "name"] = "ndvi_" + aggregationProcessing
            else:
                metadata_def["meSpatialRepresentation"]["seDefaultStyle"][
                    "name"] = "ndvi"

            # merging metadata to the base raster one
            metadata_def = merge_layer_metadata("raster", metadata_def)

            # "seCoverage" : {
            #     "coverageTime" : {
            #         "from" : 1328405808080,
            #         "to": 1328405808080
            #     },
            #     "coverageGeographic" : {
            #         "codeList" : "...",
            #         "version" : "...",
            #         "codes" : [{"code" : "world"}]
            #     },
            #     "coverageSector" : {
            #         "codeList" : "...",
            #         "version" : "...",
            #         "codes" : [{"code" : "MODISQ1"}]
            #     }
            # }]

            log.info(metadata_def)

            # publish layer
            print manager.publish_coverage(f, metadata_def)
Esempio n. 51
0
def publish_layers():
    files = glob.glob(output_folder + "output/*.tif")

    for f in files:
        log.info("--------------------------------")

        name = get_filename(f)

        log.info("name %s %s" % (name, f))

        month = int(name[5:7])

        log.info("month %s" % month)
        year = None
        try:
            year = int(name[8:12])
        except Exception, e:
            log.error("year error")

        # TODO: what year for the DA?
        if year is None:
            year = "2014"

        # get title name
        title = name.replace("_", " ").capitalize()

        # get metadata
        from_date, to_date = get_range_dates_metadata(month, year)
        creationDate = calendar.timegm(datetime.datetime.now().timetuple())

        # Sample of Metadata json
        log.info("Creating metadata")
        metadata_def = {}
        metadata_def["title"] = {}
        metadata_def["title"]["EN"] = title
        metadata_def["creationDate"] = creationDate
        metadata_def["meContent"] = {}
        metadata_def["meContent"]["seCoverage"] = {}
        metadata_def["meContent"]["seCoverage"]["coverageTime"] = {}
        metadata_def["meContent"]["seCoverage"]["coverageTime"][
            "from"] = from_date
        metadata_def["meContent"]["seCoverage"]["coverageTime"]["to"] = to_date
        metadata_def["meContent"]["seCoverage"]["coverageSector"] = {}
        metadata_def["meContent"]["seCoverage"]["coverageSector"][
            "codeList"] = "Products"
        metadata_def["meContent"]["seCoverage"]["coverageSector"]["codes"] = [{
            "code":
            "TRMM"
        }]
        metadata_def["meContent"]["seCoverage"]["coverageSector"]["codes"] = [{
            "code":
            "TRMM"
        }]

        # TODO: in theory should be the original file the onlineResource
        metadata_def["meAccessibility"] = {}
        metadata_def["meAccessibility"]["seDistribution"] = {}
        metadata_def["meAccessibility"]["seDistribution"]["onlineResource"] = f

        # TODO: added new field for the original resource (should we have two different metadata?)
        #metadata_def["meAccessibility"]["seDistribution"]["originalResource"] = output_filename

        # adding type of layer
        aggregationProcessing = "none"
        if "_avg" in name:
            aggregationProcessing = "avg"
        elif "_da" in name:
            aggregationProcessing = "da"
        metadata_def["meStatisticalProcessing"] = {}
        metadata_def["meStatisticalProcessing"]["seDatasource"] = {}
        metadata_def["meStatisticalProcessing"]["seDatasource"][
            "seDataCompilation"] = {}
        metadata_def["meStatisticalProcessing"]["seDatasource"][
            "seDataCompilation"][
                "aggregationProcessing"] = aggregationProcessing

        # default style
        metadata_def["meSpatialRepresentation"] = {}
        metadata_def["meSpatialRepresentation"]["seDefaultStyle"] = {}
        if aggregationProcessing == "da":
            metadata_def["meSpatialRepresentation"]["seDefaultStyle"][
                "name"] = "rainfall_" + aggregationProcessing
        else:
            metadata_def["meSpatialRepresentation"]["seDefaultStyle"][
                "name"] = "rainfall"

        # merging metadata to the base raster one
        metadata_def = merge_layer_metadata("raster", metadata_def)

        # "seCoverage" : {
        #     "coverageTime" : {
        #         "from" : 1328405808080,
        #         "to": 1328405808080
        #     },
        #     "coverageGeographic" : {
        #         "codeList" : "...",
        #         "version" : "...",
        #         "codes" : [{"code" : "world"}]
        #     },
        #     "coverageSector" : {
        #         "codeList" : "...",
        #         "version" : "...",
        #         "codes" : [{"code" : "MODISQ1"}]
        #     }
        # }]

        log.info(metadata_def)

        # publish layer
        print manager.publish_coverage(f, metadata_def)
Esempio n. 52
0
def publish():
    path = "/home/vortex/Desktop/LAYERS/GHG/"
    for dir in os.listdir(path):
        filepath = os.path.join(path, dir, dir + ".geotiff")
        p, fp, name = get_filename(filepath, True)
        print p
        print fp
        print name
        date = name[len(name)-4:]
        product_code = name[:len(name)-5]
        print date
        print product_code

        creationDate = calendar.timegm(datetime.datetime.now().timetuple())


        from_date = datetime.datetime(int(date), int(1), 1)
        to_date = datetime.datetime(int(date), int(12), 31)


        # Sample of Metadata json
        metadata_def = {}
        metadata_def["title"] = {}
        metadata_def["title"]["EN"] = name
        metadata_def["creationDate"] = creationDate
        metadata_def["meContent"] = {}
        metadata_def["meContent"]["seCoverage"] = {}
        metadata_def["meContent"]["seCoverage"]["coverageTime"] = {}
        metadata_def["meContent"]["seCoverage"]["coverageTime"]["from"] = from_date
        metadata_def["meContent"]["seCoverage"]["coverageTime"]["to"] = to_date

        metadata_def["meContent"]["seCoverage"]["coverageSector"] = {}
        metadata_def["meContent"]["seCoverage"]["coverageSector"]["codeList"] = "Products"
        metadata_def["meContent"]["seCoverage"]["coverageSector"]["codes"] = [{"code" : product_code}]
        metadata_def["meContent"]["seCoverage"]["coverageSector"]["codes"] = [{"code" : product_code}]


        # TODO: in theory should be the original file the onlineResource
        metadata_def["meAccessibility"] = {}
        metadata_def["meAccessibility"]["seDistribution"] = {}
        # metadata_def["meAccessibility"]["seDistribution"]["onlineResource"] = "/media/vortex/16DE-3364/MODIS_250m.tif"

        # TODO: added new field for the original resource (should we have two different metadata?)
        #metadata_def["meAccessibility"]["seDistribution"]["originalResource"] = output_filename

        # adding type of layer
        aggregationProcessing = "none"
        metadata_def["meStatisticalProcessing"] = {}
        metadata_def["meStatisticalProcessing"]["seDatasource"] = {}
        metadata_def["meStatisticalProcessing"]["seDatasource"]["seDataCompilation"] = {}
        metadata_def["meStatisticalProcessing"]["seDatasource"]["seDataCompilation"]["aggregationProcessing"] = aggregationProcessing;

        # default style
        metadata_def["meSpatialRepresentation"] = {}
        metadata_def["meSpatialRepresentation"]["seDefaultStyle"] = {}
        metadata_def["meSpatialRepresentation"]["seDefaultStyle"]["name"] = default_style


        # merging metadata to the base raster one
        metadata_def = merge_layer_metadata("raster", metadata_def)

        #print manager.publish_coverage(filepath, metadata_def)
        print manager.geoserver.set_default_style(name, default_style)
Esempio n. 53
0
    def _publish_shapefile(self, file_path, metadata_def=None, geoserver_def=None, overwrite=False, publish_on_geoserver=True, publish_metadata=True):
        """
        @param file_path:
        @param layer_def:
        @param overwrite:
        @return:
        """
        try:
            # layer_def = layer_def["featureType"]

            # Unzip the shapefile
            if file_path is not None:
                shp_folder = filesystem.unzip(file_path)
                shp_name = filesystem.get_filename(file_path)

                # creating shp path
                shp_folder_and_name = os.path.join(shp_folder, shp_name) + ".shp"
                log.info(shp_folder_and_name)

            # sanitize the layer_name
            # name = sanitize_name(shp_name)
            name = sanitize_name(metadata_def["title"]["EN"])

            # getting the default workspace
            workspace = self.geoserver.get_default_workspace()
            if "workspace" in metadata_def["meSpatialRepresentation"]:
                workspace = metadata_def["meSpatialRepresentation"]["workspace"]

            # setting up the uid
            if "uid" not in metadata_def:
                metadata_def["uid"] = workspace + ":" + name

            # publish shapefile on geoserver
            # TODO: merge the metadata with the default vector metadata
            if "name" not in geoserver_def:
                geoserver_def["name"] = name
            if "title" not in geoserver_def:
                geoserver_def["title"] = name
            if "workspace" not in geoserver_def:
                geoserver_def["workspace"] = workspace
            if "datastore" not in geoserver_def:
                geoserver_def["datastore"] = self.geoserver.get_default_datastore()

            # clean layer name
            geoserver_def["name"] = sanitize_name(geoserver_def["name"])

            # import the shapefile on postgis
            # TODO: this should be another import check like: if publish_postgis_table is True
            if publish_on_geoserver is True:
                shapefile.import_shapefile(self.spatial_db, shp_folder_and_name, shp_name, True)

            # publish on metadata
            if publish_metadata is True:
                self.metadata.db_metadata.insert_metadata(metadata_def)

            # publish table on geoserver cluster
            if publish_on_geoserver is True:
                self.geoserver.publish_postgis_table(geoserver_def, True)

            # remove files in input_shapefile
            if file_path is not None:
                filesystem.remove_folder(shp_folder)

        except PGeoException, e:
            log.error(e)
            self.rollback_shapefile()