import Queue import os import uuid import time from threading import Timer import urllib2 from pgeo.utils import log from pgeo.utils.filesystem import create_filesystem from pgeo.error.custom_exceptions import PGeoException from importlib import import_module thread_manager_processes = {} multi_progress_map = {} threads_map_key = 'FENIX' log = log.logger('download_threads_manager.py') out_template = { 'download_size': 0, 'layer_name': 'unknown', 'progress': 0, 'total_size': 'unknown', 'status': 'unknown', 'thread': 'unknown', 'key': None } exit_flags = {} class LayerDownloadThread(Thread): file_obj = None
import os import uuid import zipfile import tempfile # from pgeo.config.settings import settings # from pgeo.config.settings import read_config_file_json from pgeo.utils import log import shutil from pgeo.error.custom_exceptions import PGeoException from pgeo.error.custom_exceptions import errors log = log.logger("pgeo.utils.filesystem") # temporary folder # folder_tmp = settings['folders']['tmp'] folder_tmp_default = tempfile.gettempdir() def create_tmp_filename(path='', extension='', folder_tmp=folder_tmp_default, add_uuid=True): """ Create the path for a tmp file and filename @type path: string @param path: path from the tmp folder @type extension: extension @param extension: i.e. .geotiff """ if extension != '' and "." not in extension: extension = "." + extension if add_uuid: return (os.path.join(folder_tmp, path) + str(uuid.uuid4()) + extension).encode('utf-8') else:
from pgeo.dataproviders import trmm2 as t from pgeo.utils import log from pgeo.thread.bulk_download_threads_manager import BulkDownloadManager log = log.logger(__name__) year = '2001' month = '01' from_day = 1 to_day = 1 days = map(lambda x: str(x) if x > 9 else '0' + str(x), range(int(from_day), 1 + int(to_day))) tab_id = 'tab_0' file_list = t.list_layers_subset(year, month, from_day, to_day) bulk_download_objects = [] for day in days: bdo = { 'ftp_base_url': 'trmmopen.gsfc.nasa.gov', 'ftp_data_dir': '/trmmdata/GIS/' + year + '/' + month + '/' + day + '/', 'file_list': file_list, 'filesystem_structure': { 'product': '3B42', 'year': year, 'month': month, 'day': day } } bulk_download_objects.append(bdo)
# import rasterio from pysal.esda import mapclassify import brewer2mpl from threading import Thread # import Queue from pgeo.utils.log import logger from pgeo.error.custom_exceptions import PGeoException from scipy.optimize import curve_fit from itertools import izip from multiprocessing import Process, Manager, Lock, Queue, Pool import multiprocessing import threading from scipy.stats import linregress from os import kill log = logger("pgeo.gis.raster_scatter") # print "here" # cal= mapclassify.load_example() # print cal # ei=mapclassify.Equal_Interval(cal,k=5) # print ei def create_scatter(raster_path1, raster_path2, band1=1, band2=1, buckets=200, intervals=6, workers=3, forced_min1=0, forced_min2=0, color='Reds', color_type='Sequential', reverse=False): log.info(workers) ds1 = gdal.Open(raster_path1) ds2 = gdal.Open(raster_path2) rows1 = ds1.RasterYSize
from osgeo import gdal, osr, ogr import os import subprocess import glob import math import json from pgeo.utils import log from pgeo.utils import filesystem from pgeo.error.custom_exceptions import PGeoException, errors log = log.logger("processing") key_function = ["extract_bands", "get_pixel_size"] def process(obj): p = Process() output_path = obj["output_path"] output_file_name = obj["output_file_name"] source_path = obj["source_path"] band = obj["band"] process = obj["process"] # deal with pixel size pixel_size = None #pixel_size = "0.0020833325" # defualt init is the source_path output_processed_files = source_path
from pgeo.utils.log import logger from pgeo.gis.raster import get_authority from pgeo.utils.filesystem import get_filename log = logger("pgeo.metadata.metadata_bridge") def translate_from_metadata_to_geoserver(metadata_json, file_path=None): geoserver_json = { #"name": "" "title": "", "abstract": "", "enabled": True, # "workspace": "fenix", # "datastore": "pgeo", "defaultStyle": { } } try: log.info(metadata_json) l = metadata_json["uid"].split(":") if len(l) > 1: geoserver_json["name"] = l[1] geoserver_json["workspace"] = l[0] else: log.error("there isn't a workspace associated to the uid of the layer %s" % l[0]) geoserver_json["name"] = l[0] except Exception: pass
from pgeo.config.settings import settings from pgeo.manager.manager import Manager from pgeo.utils import log import json import copy import random log = log.logger("pgeo.manager.manager_test") manager = Manager(settings) # Publish a Shapefile layer_def = { "title" : "ne_110m_dddgeography_regions3333222_poly", "abstract" : "ne_110m_geography_reeeeegions_polys33332", # "workspace" : "fenix", # "datastore" : "pgeo", "enabled" : True, "defaultStyle" : { "name" : "population" } } metadata_def = copy.deepcopy(layer_def) manager.publish_shapefile("/home/vortex/Desktop/LAYERS/test_imports/shapefile/ne_110m_geography_regions_polys.zip", metadata_def, layer_def) # Publish a Coveragestore # randomName = random.random() # name = "test" + str(randomName).replace(".", "")
import time import json import os import glob from pgeo.config.settings import settings, read_template from pgeo.metadata.metadata import Metadata from pgeo.manager.manager import Manager from pgeo.utils.log import logger from pgeo.metadata.metadata import merge_layer_metadata from data_processing.processing import process_layers from pgeo.utils.filesystem import get_filename # TODO: remove all metadata trmm layers on mongo # db.layer.remove( { uid: { $regex: 'trmm_*', $options: 'i' } } ); log = logger("playground.data_processing.trmm") input_folder = "/home/vortex/Desktop/LAYERS/TRMM/" output_folder = "/home/vortex/Desktop/LAYERS/TRMM/monthly/" manager = Manager(settings) # def dt2unix(dt): # return int(time.mktime(dt.timetuple()) + (dt.microsecond / 10.0 ** 6)) def calc_trmm_monthly(year, month, file_prefix="trmm", calc=False): try: files_path = input_folder + year + "/" + month + "/*.tif" output_filename = file_prefix + "_" + month + "_" + year + ".tif" output_file = output_folder + output_filename
import datetime import time import json import os import glob from pgeo.utils.filesystem import get_filename from pgeo.config.settings import settings, read_template from pgeo.metadata.metadata import Metadata from pgeo.manager.manager import Manager from pgeo.utils.log import logger from pgeo.metadata.metadata import merge_layer_metadata from data_processing.processing import process_layers import re import sys log = logger("playground.data_processing.earthstat") input_file = "/home/vortex/Desktop/LAYERS/AFRIPOP/to_publish/ap10v4_TOTAL.tif" output_folder = "/home/vortex/Desktop/LAYERS/AFRIPOP/to_publish/output/" manager = Manager(settings) def process(): if os.path.isdir(output_folder): log.info("already exists") else: os.mkdir(output_folder) output_file = output_folder + get_filename(input_file) + ".tif" process_layers(input_file, output_file) print output_file
from pgeo.geoserver.geoserver import Geoserver from pgeo.config.settings import settings from pgeo.utils import log from pgeo.error.custom_exceptions import PGeoException import sys import random log = log.logger("pgeo.geoserver.geoserver_test") g = Geoserver(settings["geoserver"]) try: g.reload_configuration_geoserver_slaves() except PGeoException, e: log.error(e)
import glob import os import json from copy import deepcopy from osgeo import gdal,osr from pgeo.config.settings import settings from pgeo.manager.manager import Manager, sanitize_name from pgeo.utils.log import logger from pgeo.utils import filesystem from pgeo.metadata.metadata import merge_layer_metadata log = logger("pgeo.manager.layer_utils") # default options raster_template = "raster" default_metadata_json = {} def harvest_folder(path): """ Harvest files in a path :param path: :return: """ manager = Manager(settings) types = ('*.tiff', '*.geotiff', "*.gtiff", "*.tif") # for each tiff, geotiff, tif, gtiff files_grabbed = [] for file_type in types: files_grabbed.extend(glob.glob(os.path.join(path, file_type))) for file_path in files_grabbed:
import time import json import os import glob from pgeo.utils.filesystem import get_filename from pgeo.config.settings import settings, read_template from pgeo.metadata.metadata import Metadata from pgeo.manager.manager import Manager from pgeo.utils.log import logger from pgeo.metadata.metadata import merge_layer_metadata from data_processing.processing import process_layers import re import sys log = logger("playground.data_processing.earthstat") input_folder = "/home/vortex/Desktop/LAYERS/earthstat/175CropsYieldArea_geotiff/*" output_folder = "/home/vortex/Desktop/LAYERS/earthstat/output/" manager = Manager(settings) def process_earthstat(): if os.path.isdir(output_folder): log.info("already exists") else: os.mkdir(output_folder) dir = glob.glob(input_folder + "*") for d in dir: if os.path.isdir(d):
from flask import Blueprint from flask.ext.cors import cross_origin from flask import request from flask import Response import calendar import datetime from pgeo.error.custom_exceptions import PGeoException from pgeorest.config.settings import read_config_file_json from pgeo.gis.processing import process_data from pgeo.manager.manager import Manager from pgeorest.config.settings import settings from pgeo.utils import log processing = Blueprint('processing', __name__) log = log.logger('process.py') @processing.route('/') def index(): return 'Welcome to the Process module!' @processing.route('/list/<source_name>', methods=['GET']) @processing.route('/list/<source_name>/', methods=['GET']) @cross_origin(origins='*', headers=['Content-Type']) def list_steps_service(source_name): try: conf = read_config_file_json(source_name, 'data_providers') obj = conf['processing'] try:
from osgeo import gdal import os import subprocess import glob import json from pgeo.utils import log from pgeo.error.custom_exceptions import PGeoException log = log.logger("processing") key_function = ["extract_bands", "get_pixel_size"] def process_data(obj): output_path = obj["output_path"] output_file_name = None output_file_extension = None try: output_file_name = obj["output_file_name"] except: pass source_path = obj["source_path"] band = obj["band"] p = Process(output_file_name) process = obj["process"] # deal with pixel size pixel_size = None
import glob from pgeo.config.settings import settings, read_template from pgeo.metadata.metadata import Metadata from pgeo.manager.manager import Manager from pgeo.utils.log import logger from pgeo.metadata.metadata import merge_layer_metadata from data_processing.processing import process_layers from pgeo.utils.filesystem import get_filename, remove from pgeo.utils.date import day_of_the_year_to_date # TODO: remove all metadata trmm layers on mongo # db.layer.remove( { uid: { $regex: 'trmm_*', $options: 'i' } } ); log = logger("playground.data_processing.trmm") input_folder = "/home/vortex/Desktop/LAYERS/TRMM_alex/output/*" output_folder = "" manager = Manager(settings) # def dt2unix(dt): # return int(time.mktime(dt.timetuple()) + (dt.microsecond / 10.0 ** 6)) def calc(): # take folders folders = glob.glob(input_folder)
import pymongo from pgeo.db.mongo import common from pgeo.utils import log log = log.logger(__name__) class DBMetadata: # client = pymongo.MongoClient(settings['db']['metadata']['connection']) # database = settings['db']['metadata']['database'] # document_layer = settings['db']['metadata']['document']['layer'] def __init__(self, config): """ @param config: config parameters to configure the metadata db @return: """ self.config = config self.client = pymongo.MongoClient(config['connection']) self.database = config['database'] self.document_layer = config['document']['layer'] def insert_metadata(self, json): """ Insert Layer Metadata in mongodb @param json: json data @return: id """ return common.insert(self.client, self.database, self.document_layer, json)
import numpy as np from osgeo import gdal from osr import SpatialReference from pgeo.utils.log import logger from pgeo.error.custom_exceptions import PGeoException from pgeo.utils.filesystem import create_tmp_filename log = logger("pgeo.gis.raster_mapalgebra") def filter_layers(raster_path1, raster_path2, min1=0, max1=None, min2=0, max2=None, band1=1, band2=1): print raster_path1, raster_path2 ds1 = gdal.Open(raster_path1) ds2 = gdal.Open(raster_path2) rows1 = ds1.RasterYSize cols1 = ds1.RasterXSize rows2 = ds2.RasterYSize cols2 = ds2.RasterXSize log.info("Scatter Processing") if cols1 != cols2 or rows1 != rows2: log.error("The rasters cannot be processed because they have different dimensions") log.error("%sx%s %sx%s" % (rows1, cols1, rows2, cols2)) raise PGeoException("The rasters cannot be processed because they have different dimensions", status_code=404) band1 = ds1.GetRasterBand(band1) array1 = np.array(band1.ReadAsArray()).flatten() #array1 = np.array(band1.ReadAsArray()) nodata1 = band1.GetNoDataValue()