Exemplo n.º 1
0
    def create_csv_merge(self, output_file, stats1, stats2):
        # with open(output_file, 'wb') as csvfile:
            csv_file = csv.writer(output_file, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL)

            json_string1 = json.dumps(stats1)
            json_string2 = json.dumps(stats2)
            # log.info(json_string1)
            # log.info(json_string2)
            json_data1 = json.loads(json_string1)
            json_data2 = json.loads(json_string2)
            # log.info(json_data1)
            # log.info(json_data2)
            csv_file.writerow(["code", "label", "valueX", "valueY"])
            for data1 in json_data1:
                #log.info(data1)
                for data2 in json_data2:
                    #log.info(data2)
                    try:
                        if data1["code"] == data2["code"]:
                            if "stats" in data1["data"]:
                                log.info(data2)
                                if not math.isnan(data1["data"]["stats"][0]["mean"]) and not math.isnan(data2["data"]["stats"][0]["mean"]):
                                    csv_file.writerow([data1["code"], data1["label"], data1["data"]["stats"][0]["mean"], data2["data"]["stats"][0]["mean"]])
                            pass
                    except Exception, e:
                        print e
Exemplo n.º 2
0
    def start_manager(self):

        exit_flags[self.tab_id] = 0

        log.info('START | Layers Download Manager')

        thread_list = ['Alpha', 'Bravo', 'Charlie', 'Delta', 'Echo', 'Foxtrot', 'Golf', 'Hotel', 'India', 'Juliet']
        queue_lock = Lock()
        work_queue = Queue.Queue(len(self.file_paths_and_sizes))
        threads = []

        for thread_name in thread_list:
            key = str(uuid.uuid4())
            thread = LayerDownloadThread(self.source, thread_name, work_queue, queue_lock, key, self.target_dir, self.tab_id)
            thread.start()
            if not threads_map_key in thread_manager_processes:
                thread_manager_processes[threads_map_key] = {}
            thread_manager_processes[threads_map_key][key] = thread
            threads.append(thread)

        queue_lock.acquire()
        for word in self.file_paths_and_sizes:
            work_queue.put(word)
        queue_lock.release()

        while not work_queue.empty():
            pass

        exit_flags[self.tab_id] = 1

        for t in threads:
            t.join()

        log.info('DONE | Layers Download Manager')
    def start_manager(self):

        exit_flags[self.tab_id] = 0

        log.info('START | Bulk Download Manager')

        thread_list = ['Alpha']
        queue_lock = Lock()
        work_queue = Queue.Queue(len(self.bulk_download_objects))
        threads = []

        for thread_name in thread_list:
            thread = BulkDownloadThread(self.source, thread_name, work_queue, queue_lock, self.tab_id, self.aggregation)
            thread.start()
            threads.append(thread)

        queue_lock.acquire()
        for obj in self.bulk_download_objects:
            work_queue.put(obj)
        queue_lock.release()

        while not work_queue.empty():
            pass

        exit_flags[self.tab_id] = 1

        for t in threads:
            t.join()

        log.info('END   | Bulk Download Manager')
Exemplo n.º 4
0
    def _publish_coverage(self, file_path, metadata_def=None, geoserver_def=None, overwrite=False, publish_on_geoserver=True, publish_metadata=True):
        """
        @param file_path:
        @param layer_def:
        @param overwrite:
        @return:
        """
        try:
            log.info(geoserver_def)
            # layer_def = layer_def["coverageStore"]

            # sanitize the layer_name
            #name = sanitize_name(filesystem.get_filename(file_path))
            name = sanitize_name(metadata_def["title"]["EN"])

            # getting the default workspace
            workspace = self.geoserver.get_default_workspace()
            if "workspace" in metadata_def["meSpatialRepresentation"]:
                workspace = metadata_def["meSpatialRepresentation"]["workspace"]

            # setting up the uid TODO: this will be changed i guess with Ivano's metadata structure
            if "uid" not in metadata_def:
                metadata_def["uid"] = workspace + ":" + name
            else:
                workspace = metadata_def["uid"].split(",")[0]


            # publish coveragestore on geoserver
            # TODO: merge the metadata with the default vector metadata
            if "name" not in geoserver_def:
                geoserver_def["name"] = name
            if "title" not in geoserver_def:
                geoserver_def["title"] = name
            if "workspace" not in geoserver_def:
                geoserver_def["workspace"] = workspace

            # clean layer name
            #geoserver_def["name"] = sanitize_name(geoserver_def["name"])

            # publish on metadata
            if publish_metadata is True:
                self.metadata.db_metadata.insert_metadata(metadata_def)

            # publish table on geoserver cluster
            if publish_on_geoserver is True:
                self.geoserver.publish_coveragestore(geoserver_def, True)

            # remove files and folder of the shapefile
            if file_path is not None:
                filesystem.remove_folder(file_path)

        except PGeoException, e:
            log.error(e)
            self.rollback_raster()
Exemplo n.º 5
0
 def __init__(self, datasource):
     if DBConnection.con is None:
         try:
             log.info("---PostGIS connection initialization---")
             self.datasource = datasource
             if self.datasource["schema"]:
                 self.schema = self.datasource["schema"]
             db_connect_string = self.get_connection_string(False)
             self.con = psycopg2.connect(db_connect_string)
             log.info("Database '%s' connection opened. " % datasource['dbname'] )
         except psycopg2.DatabaseError as db_error:
             log.warn("Error :\n{0}".format(db_error))
             pass
Exemplo n.º 6
0
def process(obj):
    p = Process()

    output_path = obj["output_path"]
    output_file_name = obj["output_file_name"]
    source_path = obj["source_path"]
    band = obj["band"]

    process = obj["process"]

    # deal with pixel size
    pixel_size = None
    #pixel_size = "0.0020833325"

    # defualt init is the source_path
    output_processed_files = source_path

    # looping throught processes
    for process_values in process:
        for key in process_values:
            print output_processed_files
            # print key_function
            # for key, value in my_dict.iteritems():
            if key in key_function:
                # explicit functions
                if "extract_bands" in key:
                    output_processed_files = p.extract_bands(
                        output_processed_files, band, output_path)
                # get the pixel size
                elif "get_pixel_size" in key:
                    print "get_pixel_size"
                    pixel_size = p.get_pixel_size(output_processed_files[0],
                                                  process_values[key])
                    log.info(pixel_size)

            else:
                # STANDARD GDAL FUNCTIONS
                print "not function"
                print "parameters"
                print key
                print process_values[key]
                process_values[key] = change_values(process_values[key],
                                                    pixel_size)

                # reflection calls
                output_processed_files = getattr(p,
                                                 key)(process_values[key],
                                                      output_processed_files,
                                                      output_path)

    return output_processed_files
Exemplo n.º 7
0
 def extract_band_files(self, input_files, output_path, ext=None):
     output_files = []
     i = 0;
     for f in input_files:
         output_file_path = os.path.join(output_path, str(i) + ext)
         cmd = "gdal_translate '" + f + "' " + output_file_path
         log.info(cmd)
         process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
         output, error = process.communicate()
         #TODO catch the error
         log.info(output)
         log.warn(error)
         output_files.append(output_file_path)
         i += 1
     return output_files
Exemplo n.º 8
0
def unzip(filezip, path=create_tmp_folder(), folder_tmp=folder_tmp_default):
    """
    Unzip a file in the tmp folder

    @type filezip: string
    @param filezip: path to the zip file
    @type path: string
    @param path: path from the tmp folder
    @return: path to the unzipped folder
    """
    path = os.path.join(folder_tmp, path)
    log.info(path)
    with zipfile.ZipFile(filezip, "r") as z:
        z.extractall(path)
    return path
Exemplo n.º 9
0
def zip_files(name, files, path=folder_tmp_default):
    extension = ".zip"
    if ".zip" in name:
        extension = ""
    zip_path = os.path.join(path, name + extension)
    log.info("Zip: '%s' from zip_files %s - %s" % (zip_path, name, files))
    zf = zipfile.ZipFile(zip_path, "w")
    zip_subdir = path
    for fpath in files:
        fdir, fname = os.path.split(fpath)

        #Add file, at correct path
        log.info(fname)
        zf.write(fpath, fname)
    zf.close()
    return zip_path
Exemplo n.º 10
0
    def reload_configuration_geoserver_slaves(self, force_master_reload=False):
        geoserver_cluster = self.config["geoserver_slaves"]

        if force_master_reload is True:
            geoserver_cluster.append(self.config["geoserver_master"])

        for geoserver in geoserver_cluster:
            cs_url =  url(geoserver, ["reload?recurse=true"])
            headers, response = self.http.request(cs_url, "POST")
            log.info(headers)
            if headers.status == 200:
                log.info("Geoserver updated %s" % cs_url)
            else:
                log.warn("Raise error?")
                return False
        return True
Exemplo n.º 11
0
def process(obj):
    p = Process()

    output_path = obj["output_path"]
    output_file_name = obj["output_file_name"]
    source_path = obj["source_path"]
    band = obj["band"]

    process = obj["process"]

    # deal with pixel size
    pixel_size = None
    #pixel_size = "0.0020833325"

    # defualt init is the source_path
    output_processed_files = source_path

    # looping throught processes
    for process_values in process:
        for key in process_values:
            print output_processed_files
            # print key_function
            # for key, value in my_dict.iteritems():
            if key in key_function:
                # explicit functions
                if "extract_bands" in key:
                    output_processed_files = p.extract_bands(output_processed_files, band, output_path)
                # get the pixel size
                elif "get_pixel_size" in key:
                    print "get_pixel_size"
                    pixel_size = p.get_pixel_size(output_processed_files[0], process_values[key])
                    log.info(pixel_size)

            else:
                # STANDARD GDAL FUNCTIONS
                print "not function"
                print "parameters"
                print key
                print process_values[key]
                process_values[key] = change_values(process_values[key], pixel_size)

                # reflection calls
                output_processed_files = getattr(p, key)(process_values[key], output_processed_files, output_path)


    return output_processed_files
Exemplo n.º 12
0
 def gdaladdo(self, parameters, input_files, output_path=None):
     output_files = []
     cmd = "gdaladdo "
     for key in parameters["parameters"].keys():
         cmd += " " + key + " " + str(parameters["parameters"][key])
     for input_file in input_files:
         cmd += " " + input_file
         output_files.append(input_file)
     cmd += " " + parameters["overviews_levels"]
     log.info(cmd)
     try:
         process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
         output, error = process.communicate()
         log.info(output)
         return output_files
     except Exception, e:
         log.error(e)
         raise PGeoException(e.message, 500)
Exemplo n.º 13
0
 def extract_band_files(self, input_files, output_path, ext=None):
     output_files = []
     i = 0
     for f in input_files:
         output_file_path = os.path.join(output_path, str(i) + ext)
         cmd = "gdal_translate '" + f + "' " + output_file_path
         log.info(cmd)
         process = subprocess.Popen(cmd,
                                    stdout=subprocess.PIPE,
                                    stderr=subprocess.PIPE,
                                    shell=True)
         output, error = process.communicate()
         #TODO catch the error
         log.info(output)
         log.warn(error)
         output_files.append(output_file_path)
         i += 1
     return output_files
Exemplo n.º 14
0
 def extract_band_files(self, input_files, output_path, ext=None):
     output_files = []
     i = 0
     try:
         for f in input_files:
             print get_filename(f, True)
             output_file_path = os.path.join(output_path, str(i) + ext)
             cmd = "gdal_translate '" + f + "' " + output_file_path
             log.info(cmd)
             process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
             output, error = process.communicate()
             log.info(output)
             output_files.append(output_file_path)
             i += 1
         return output_files
     except Exception, e:
         log.error(e)
         raise PGeoException(e.message, 500)
Exemplo n.º 15
0
 def update_layer_metadata(self, name, data, c_type="json"):
     """
     Update the layer by json or xml
     @param name: name of the layer
     @type name: string
     @param data: json
     @type name: string cointaining the data to update i.e. json '{"layer":{"title":"title of the layer"}}' or xml <layer><title>title of the layer</title></layer>
     @param type: string that can be "json" or "xml"
     @type name: string
     @return: True if updated
     """
     try:
         headers = get_headers(c_type)
         cs_url = url(self.service_url, ["layers", name + "." + c_type])
         log.info(cs_url)
         self._publish_layer(cs_url, "PUT", data, headers, 200)
     except Exception, e:
         log.error(e)
         raise PGeoException(e)
Exemplo n.º 16
0
 def gdal_translate(self, parameters, input_files, output_path):
     output_files = []
     output_file = os.path.join(output_path, self.output_file_name)
     output_files.append(output_file)
     cmd = "gdal_translate "
     if "opt" in parameters:
         for key in parameters["opt"].keys():
             cmd += " " + key + " " + str(parameters["opt"][key])
     for input_file in input_files:
         cmd += " " + input_file
     cmd += " " + output_file
     log.info(cmd)
     try:
         process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
         output, error = process.communicate()
         log.info(output)
         return output_files
     except Exception, e:
         log.error(e)
         raise PGeoException(e.message, 500)
Exemplo n.º 17
0
 def set_default_style(self, name, stylename, enabled=True):
     """
     Method used to change the default style of a layer
     :param stylename: the name of the style to set ad default one
     :param layername: the name of the layer
     :param enabled: enable/disable the style
     :return:
     """
     # curl -v -u $GEOSERVER_PASSWORD -XPUT -H "Content-type: text/xml" -d "<layer><defaultStyle><name>$DEFAULT_STYLE</name></defaultStyle><enabled>$ENABLED</enabled></layer>" $GEOSERVER_URL/rest/layers/$GEOSERVER_WORKSPACE:$NAME
     headers = get_headers("xml")
     xml = "<layer><defaultStyle><name>{0}</name></defaultStyle><enabled>{1}</enabled></layer>".format(unicode(stylename).lower(),  unicode(str(enabled).upper()))
     cs_url = url(self.service_url, ["layers", name])
     log.info("Change Layer: %s  default style in %s" % (name, stylename))
     headers, response = self.http.request(cs_url, "PUT", xml, headers)
     log.info(cs_url)
     if headers.status == 200:
         # reload geoserver cluster
         self.reload_configuration_geoserver_slaves()
     else:
         raise PGeoException(response, headers.status)
     return True
Exemplo n.º 18
0
def change_values(obj, pixel_size):
    s = json.dumps(obj)
    log.info(pixel_size)
    log.info(s)
    s = s.replace("{{PIXEL_SIZE}}", str(pixel_size))
    log.info(s)
    return json.loads(s)
Exemplo n.º 19
0
    def gdalwarp(self, parameters, input_files, output_path):
        print "gdalwarp input_files"
        print input_files
        output_files = []
        output_file = os.path.join(output_path, "warp")
        output_files.append(output_file)

        cmd = "gdalwarp "
        for key in parameters["opt"].keys():
            cmd += " " + key + " " + str(parameters["opt"][key])

        for input_file in input_files:
            cmd += " " + input_file

        cmd += " " + output_file

        log.info(cmd)
        process = subprocess.Popen(cmd,
                                   stdout=subprocess.PIPE,
                                   stderr=subprocess.PIPE,
                                   shell=True)
        output, error = process.communicate()
        log.info(output)
        log.warn(error)
        log.info(output_files)
        return output_files
Exemplo n.º 20
0
    def gdal_merge(self, parameters, input_files, output_path):
        print "gdal_merge"
        output_files = []

        output_file = os.path.join(output_path, "gdal_merge.hdf")
        output_files.append(output_file)

        # creating the cmd
        cmd = "gdal_merge.py "
        for key in parameters.keys():
            cmd += " " + key + " " + str(parameters[key])

        for input_file in input_files:
            cmd += " " + input_file

        cmd += " -o " + output_file

        log.info(cmd)
        process = subprocess.Popen(cmd,
                                   stdout=subprocess.PIPE,
                                   stderr=subprocess.PIPE,
                                   shell=True)
        output, error = process.communicate()
        log.info(output)
        log.warn(error)
        log.info(output_files)
        return output_files
Exemplo n.º 21
0
    def _publish_layer(self, cs_url, c_type, message, headers, expected_code=201):
        try:
            log.info("%s %s" % (cs_url, headers))
            headers, response = self.http.request(cs_url, c_type, message, headers)
            self._cache.clear()
            log.info("%s %s %s" % (message, headers, response))

            if headers.status != expected_code:
            #raise UploadError(response)
                log.info(headers)
                log.info(response)
                raise PGeoException(response, headers.status)
        except PGeoException, e:
            log.error(e)
            raise PGeoException(e.get_message(), e.get_status_code())
Exemplo n.º 22
0
 def __init__(self, settings):
     self.settings = settings
     print settings
     self.db_metadata = DBMetadata(settings["db"]["metadata"])
     self.search = MongoSearch(settings["db"]["metadata"]['connection'], settings["db"]["metadata"]["database"], settings["db"]["metadata"]['document']['layer'])
     log.info("---Metadata initialization---")
     log.info(self.db_metadata)
     log.info(self.search)
Exemplo n.º 23
0
def change_values(obj, pixel_size):
    s = json.dumps(obj)
    log.info(pixel_size)
    log.info(s)
    s = s.replace("{{PIXEL_SIZE}}", str(pixel_size))

    log.info(s)
    return json.loads(s)
Exemplo n.º 24
0
def get_scatter_analysis():
    try:
        # Module to process statistics
        stats = Stats(settings)

        user_json = request.get_json()
        log.info(user_json)
        response = []
        for uid in user_json["raster"]["uids"]:
            log.info(user_json)
            json_stat = copy.deepcopy(user_json)
            json_stat["raster"]["uid"] = uid
            response.append(stats.zonal_stats(json_stat))

        log.info(response[0])
        log.info(response[1])
        # io.BytesIO()
        si = StringIO.StringIO()
        result = stats.create_csv_merge(si, response[0], response[1])
        log.info(result.getvalue())

        return Response(result.getvalue())
    except PGeoException, e:
        raise PGeoException(e.get_message(), e.get_status_code())
Exemplo n.º 25
0
    def publish_postgis_table(self, data, overwrite=False):
        """
        :param datasource: datasource stored in geoserver
        :param name: name of the table in postgis
        :return:
        """
        #curl -v -u admin:geoserver -XPOST -H "Content-type: text/xml" -d "<featureType><name>buildings</name></featureType>"
        #http://localhost:8080/geoserver/rest/workspaces/acme/datasources/nyc/featuretypes

        #data = data["featureType"]

        name = data["name"]
        log.info(name)
        workspace = self.get_default_workspace() if "workspace" not in data else data["workspace"]
        datastore = self.get_default_datastore() if "datastore" not in data else data["datastore"]


        if not overwrite:
            log.warn("TODO: shapefile")
            #if self.check_if_layer_exist(name, workspace):
            #  raise PGeoException(errors[520]+": %s" % name)

        try:
            # TODO this can be done with just one request

            # Add layer to coveragestore
            headers = get_headers("xml")
            xml = "<featureType><name>{0}</name></featureType>".format(unicode(name).lower())
            cs_url = url(self.service_url, ["workspaces", workspace, "datastores", datastore, 'featuretypes'])
            self._publish_layer(cs_url, "POST", xml, headers)

            #  Update metadata of the layer
            headers = get_headers("json")
            json_data = deepcopy(data)
            del json_data['name']
            # set has default enabled the layer
            if "enabled" not in json_data:
                json_data["enabled"] = True
            # json to send to geoserver
            update_layer = {
                "featureType" : json_data
            }
            cs_url = url(self.service_url, ["workspaces", workspace, "datastores", datastore, "featuretypes", name + ".json"] )
            log.info(cs_url)
            self._publish_layer(cs_url, "PUT", json.dumps(update_layer), headers, 200)

            # TODO: check why doesn't update the default style
            if 'defaultStyle' in json_data:
                if 'name' in json_data['defaultStyle']:
                    log.info("change default style style")
                    self.set_default_style(name, json_data['defaultStyle']['name'])
        except PGeoException, e:
            log.error(e.get_message())
            raise PGeoException(e.get_message(), e.get_status_code())
Exemplo n.º 26
0
 def get_pixel_size(self, input_file, formula=None):
     # TODO: get pixel value with rasterio library?
     cmd = "gdalinfo "
     cmd += input_file
     cmd += " | grep Pixel"
     log.info(cmd)
     try:
         process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
         output, error = process.communicate()
         log.info(output)
         if "Pixel Size" in output:
             pixel_size = output[output.find("(")+1:output.find(",")]
             log.info(pixel_size)
             formula = formula.replace("{{PIXEL_SIZE}}", str(pixel_size))
             log.info(formula)
             return eval(formula)
         return None
     except Exception, e:
         log.error(e)
         raise PGeoException(e.message, 500)
Exemplo n.º 27
0
    def gdaladdo(self, parameters, input_files, output_path=None):
        log.info(parameters)
        log.info(input_files)
        output_files = []
        cmd = "gdaladdo "
        for key in parameters["parameters"].keys():
            cmd += " " + key + " " + str(parameters["parameters"][key])

        for input_file in input_files:
            cmd += " " + input_file
            output_files.append(input_file)


        cmd += " " + parameters["overviews_levels"]

        log.info(cmd)
        process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
        output, error = process.communicate()
        log.info(output)
        log.warn(error)
        return output_files
Exemplo n.º 28
0
 def check_if_layer_exist(self, name, workspace=None):
     if workspace is None:
         workspace = self.get_default_workspace()
     layername = workspace +":" + name
     cs_url = url(self.service_url, ["layers", layername + ".json"])
     log.info("checking coverage exists: %s (%s)" % (name, cs_url))
     response, content = self.http.request(cs_url, "GET")
     log.info(response)
     log.info(content)
     if response.status == 200:
         return True
     elif response.status == 404:
         return False
     return False
Exemplo n.º 29
0
def overviews_tif_file(output_file, parameters=None, overviews_levels=None):
    log.info("Create overviews")

    cmd = "gdaladdo "
    for key in parameters.keys():
        cmd += " " + key + " " + str(parameters[key])
    cmd += " " + output_file
    cmd += " " + overviews_levels

    log.info(cmd)
    process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
    output, error = process.communicate()
    log.info(output)
    log.warn(error)
    return output_file
Exemplo n.º 30
0
    def gdaladdo(self, parameters, input_files, output_path=None):
        log.info(parameters)
        log.info(input_files)
        output_files = []
        cmd = "gdaladdo "
        for key in parameters["parameters"].keys():
            cmd += " " + key + " " + str(parameters["parameters"][key])

        for input_file in input_files:
            cmd += " " + input_file
            output_files.append(input_file)

        cmd += " " + parameters["overviews_levels"]

        log.info(cmd)
        process = subprocess.Popen(cmd,
                                   stdout=subprocess.PIPE,
                                   stderr=subprocess.PIPE,
                                   shell=True)
        output, error = process.communicate()
        log.info(output)
        log.warn(error)
        return output_files
Exemplo n.º 31
0
 def extract_bands(self, input_files, band, output_path):
     log.info("extract_files_and_band_names")
     log.info(input_files)
     log.info("band: " + str(band))
     bands = []
     ext = None
     try:
         files = glob.glob(input_files[0])
         for f in files:
             gtif = gdal.Open(f)
             sds = gtif.GetSubDatasets()
             bands.append(sds[int(band) - 1][0])
             if ext is None:
                 filename, ext = os.path.splitext(f)
         return self.extract_band_files(bands, output_path, ext)
     except Exception, e:
         log.error(e)
         raise PGeoException(e.message, 500)
Exemplo n.º 32
0
def overviews_tif_file(output_file, parameters=None, overviews_levels=None):
    log.info("Create overviews")

    cmd = "gdaladdo "
    for key in parameters.keys():
        cmd += " " + key + " " + str(parameters[key])
    cmd += " " + output_file
    cmd += " " + overviews_levels

    log.info(cmd)
    process = subprocess.Popen(cmd,
                               stdout=subprocess.PIPE,
                               stderr=subprocess.PIPE,
                               shell=True)
    output, error = process.communicate()
    log.info(output)
    log.warn(error)
    return output_file
Exemplo n.º 33
0
    def gdalwarp(self, parameters, input_files, output_path):
        print "gdalwarp input_files"
        print input_files
        output_files = []
        output_file = os.path.join(output_path, "warp")
        output_files.append(output_file)

        cmd = "gdalwarp "
        for key in parameters["opt"].keys():
            cmd += " " + key + " " + str(parameters["opt"][key])

        for input_file in input_files:
            cmd += " " + input_file

        cmd += " " + output_file

        log.info(cmd)
        process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
        output, error = process.communicate()
        log.info(output)
        log.warn(error)
        log.info(output_files)
        return output_files
Exemplo n.º 34
0
    def gdal_merge(self, parameters, input_files, output_path):
        print "gdal_merge"
        output_files = []

        output_file = os.path.join(output_path, "gdal_merge.hdf")
        output_files.append(output_file)

        # creating the cmd
        cmd = "gdal_merge.py "
        for key in parameters.keys():
            cmd += " " + key + " " + str(parameters[key])

        for input_file in input_files:
            cmd += " " + input_file

        cmd += " -o " + output_file

        log.info(cmd)
        process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
        output, error = process.communicate()
        log.info(output)
        log.warn(error)
        log.info(output_files)
        return output_files
Exemplo n.º 35
0
    def run(self):

        while not exit_flags[self.tab_id]:

            self.queue_lock.acquire()

            if not self.queue.empty():

                self.bulk_download_object = self.queue.get()
                self.total_files = len(self.bulk_download_object['file_list'])
                progress_map[self.tab_id]['total_files'] = self.total_files
                progress_map[self.tab_id]['downloaded_files'] = 0
                progress_map[self.tab_id]['status'] = 'START'
                progress_map[self.tab_id]['progress'] = 0

                self.queue_lock.release()

                self.target_folder = create_filesystem(self.source, self.bulk_download_object['filesystem_structure'])

                ftp = FTP(self.bulk_download_object['ftp_base_url'])

                try:
                    ftp.login()
                except Exception, e:
                    progress_map[self.tab_id]['status'] = 'ERROR'
                    exit_flags[self.tab_id] = 1
                    log.error(e)
                    continue

                ftp.cwd(self.bulk_download_object['ftp_data_dir'])
                remote_files = ftp.nlst()

                for file_name in self.bulk_download_object['file_list']:

                    log.info('Downloading: ' + file_name)

                    if file_name in remote_files:


                        ftp.sendcmd('TYPE i')
                        file_obj = file_name
                        local_file = os.path.join(self.target_folder, file_obj)
                        progress_map[self.tab_id]['status'] = 'ONGOING'

                        if not os.path.isfile(local_file):

                            with open(local_file, 'w') as f:

                                def callback(chunk):
                                    f.write(chunk)
                                ftp.retrbinary('RETR %s' % file_obj, callback)
                                self.downloaded_files += 1
                                progress_map[self.tab_id]['status'] = 'COMPLETE'
                                progress_map[self.tab_id]['progress'] = self.percent_done()

                        else:
                            self.downloaded_files += 1
                            progress_map[self.tab_id]['status'] = 'COMPLETE'
                            progress_map[self.tab_id]['progress'] = self.percent_done()

                ftp.quit()
                log.info('Download Complete. Start aggregation.')
                self.aggregate_layers()

            else:

                self.queue_lock.release()

            time.sleep(1)
Exemplo n.º 36
0
def process_data(obj):

    output_path = obj["output_path"]
    output_file_name = None
    output_file_extension = None
    try:
        output_file_name = obj["output_file_name"]
    except:
        pass

    source_path = obj["source_path"]
    band = obj["band"]

    p = Process(output_file_name)

    process = obj["process"]

    # deal with pixel size
    pixel_size = None
    #pixel_size = "0.0020833325"

    # defualt init is the source_path
    output_processed_files = source_path

    # looping throught processes
    for process_values in process:
        for key in process_values:
            log.info(output_processed_files)
            if key in key_function:

                # explicit functions
                if "extract_bands" in key:
                    output_processed_files = p.extract_bands(output_processed_files, band, output_path)
                # get the pixel size
                elif "get_pixel_size" in key:
                    log.info("get_pixel_size")
                    pixel_size = p.get_pixel_size(output_processed_files[0], process_values[key])
                    log.info(pixel_size)

            else:
                # STANDARD GDAL FUNCTIONS
                log.info("not function")
                log.info("parameters")
                log.info(key)
                log.info(process_values[key])
                process_values[key] = change_values(process_values[key], pixel_size)

                # reflection calls
                output_processed_files = getattr(p, key)(process_values[key], output_processed_files, output_path)
    return output_processed_files
Exemplo n.º 37
0
 def close_connection(self):
     if self.con is not None:
         self.con.close()
         log.info("Database '%s' connection closed. " % self.datasource['dbname'] )
Exemplo n.º 38
0
    def get_pixel_size(self, input_file, formula=None):
        log.info("get_pixel_size")
        log.info(input_file)
        log.info(formula)
        pixel_size = None

        # creating the cmd
        cmd = "gdalinfo "
        cmd += input_file

        # gdalinfo 'HDF4_EOS:EOS_GRID:"/home/vortex/Desktop/LAYERS/MODIS/033/MOD13Q1.A2014033.h23v09.005.2014050114129.hdf":MODIS_Grid_16DAY_250m_500m_VI:250m 16 days NDVI' | grep size

        cmd += " | grep Pixel"

        log.info(cmd)
        process = subprocess.Popen(cmd,
                                   stdout=subprocess.PIPE,
                                   stderr=subprocess.PIPE,
                                   shell=True)
        output, error = process.communicate()
        log.info(output)
        log.warn(error)
        if "Pixel Size" in output:
            pixel_size = output[output.find("(") + 1:output.find(",")]
            log.info(pixel_size)
            formula = formula.replace("{{PIXEL_SIZE}}", str(pixel_size))
            log.info(formula)
            return eval(formula)
        return None