def default_workspace(self): """ Get a default workspace -- create if it does not exist @return: workspace instance """ name = config.get("geoserver.workspace_name", "ckan") uri = config.get("geoserver.workspace_uri", "http://localhost/ckan") ngds_workspace = self.get_workspace(name) if ngds_workspace is None: ngds_workspace = self.create_workspace(name, uri) return ngds_workspace
def default_workspace(self, name=None, uri=None): """ Get a default workspace -- create if it does not exist @return: workspace instance """ name = config.get("geoserver.workspace_name", "ckan") uri = config.get("geoserver.workspace_uri", "http://localhost/ckan") ngds_workspace = self.get_workspace(name) if ngds_workspace is None: ngds_workspace = self.create_workspace(name, uri + '#' + name) return ngds_workspace
def get_datastore(self, workspace=None, store_name=None, layer_name=None, layer_version=None): """ Make a connection to the datastore, create the datastore if it does not exist. The database we point to will be CKAN's datastore database in most cases because that's where all of our uploaded files wind up. Otherwise, specify the name of the database you want to make a connection with through the 'store_name' argument. """ # Extract values from ckan config file datastore_url = config.get('ckan.datastore.write_url', 'postgresql://*****:*****@localhost/datastore') # Extract connection details pattern = "://(?P<user>.+?):(?P<pass>.+?)@(?P<host>.+?)/(?P<database>.+)$" details = re.search(pattern, datastore_url) # Give a name to the workspace and specify the datastore if workspace is None: workspace = self.default_workspace(layer_name, layer_version) if store_name is None: store_name = details.group("database") # Check if the datastore exists, create if it does not exist try: ds = self.get_store(store_name, workspace) except Exception as ex: ds = self.create_datastore(store_name, workspace) ds.connection_parameters.update( host=details.group("host"), port="5432", database=details.group("database"), user=details.group("user"), passwd=details.group("pass"), dbtype="postgis" ) self.save(ds) # Return datastore object return ds
def from_ckan_config(cls): """ Setup the Geoserver Catalog from CKAN configuration @param cls: This class. @return: a Geoserver catalog """ url = config.get("geoserver.rest_url", "http://*****:*****@)?.+", url) user = userInfo.group("user") or "admin" pwd = userInfo.group("pass") or "geoserver" # Remove it from the connection URL if it was there url = url.replace(userInfo.group("auth") or "", "") if url: url = url.replace('geoserver://', 'http://') # Make the connection return cls(url, username=user, password=pwd, disable_ssl_certificate_validation=True)
def setup_template_variables(self, context, data_dict): query_api = cf.get( 'ckan.site_url', 'http://127.0.0.1') + '/api/action/datastore_search_sql' return { 'dataset_resource_id': data_dict['resource']['id'], 'QUERY_API': query_api }
def getOGCServices(self): data = clean_dict(unflatten(tuplize_dict(parse_params(request.params)))) url = data.get('url', None) workspace = data.get('workspace', None) #USGIN MODEL WFS Validator add ? if workspace: workspace = workspace.replace('?', '') request_ogc = data.get('request', None) obj = None try: if not request_ogc or (request_ogc and request_ogc == "GetCapabilities"): if url and workspace: oResponse = requests.get(urllib.unquote_plus(url)) #Replace the (#name_workspace) from NamespaceURI obj = oResponse.text.replace('#'+workspace, '') #Add API URL in all links in order to make system go through it instead of hitting geoserver direclty to remove (#name_workspace) from all ogc services XML siteUrl = config.get('ckan.site_url', None) if siteUrl: newServiceUrl = siteUrl+"/geoserver/get-ogc-services?url=" match = re.compile('xlink:href=[\'|"](.*?)[\'"]') matches = match.findall(obj) #loop through all occurrences and replace one by one to add the link API Ckan-Geoserver for item in matches: obj = obj.replace(item, newServiceUrl+urllib.quote_plus(item)+"&workspace="+workspace, 1) else: msg = 'An error ocurred: [Bad Request - Missing parameters]' abort(400, msg) elif request_ogc and request_ogc == "GetFeature": service = data.get('service', None) typename = data.get('typename', None) version = data.get('version', None) maxfeatures = data.get('maxfeatures', None) getFeatureURL = urllib.unquote_plus(url)+"?service=%s&request=%s&typename=%s&version=%s" % (service, request_ogc, typename, version) if maxfeatures: getFeatureURL = getFeatureURL+"&maxfeatures=%s" % maxfeatures oResponse = requests.get(getFeatureURL) #Replace the (#name_workspace) from NamespaceURI obj = oResponse.text.replace('#'+workspace, '') response.content_type = 'application/xml; charset=utf-8' response.headers['Content-Length'] = len(obj) return obj.encode('utf-8') except Exception, e: msg = 'An error ocurred: [%s]' % str(e) abort(500, msg)
def command(self): self._load_config() LOGGER.info("Comenzando limpieza del Datastore") # Usando un LocalCKAN obtendo el apikey del usuario default lc = LocalCKAN() site_user = lc._get_action('get_site_user')({'ignore_auth': True}, ()) apikey = site_user.get('apikey') # Acumulo todos los ids de los recursos del nodo datajson_resource_ids = [] context = {'model': model, 'session': model.Session, 'user': site_user} data_dict = {'query': 'name:id', 'limit': None, 'offset': 0} result = logic.get_action('resource_search')(context, data_dict).get( 'results', []) for resource in result: datajson_resource_ids.append(resource.get('identifier')) # Evitamos ejecutar la limpieza si no existen datasets en el nodo if not len(datajson_resource_ids): LOGGER.info( "No existen datasets en el nodo, por lo que no se realizará ninguna limpieza" ) else: # La búsqueda de recursos en Datastore falla si la url no comienza con 'http' site_url = config.get('ckan.site_url') if not site_url.startswith('http'): site_url = 'http://' + site_url # Obtengo informacion de los elementos del datastore rc = RemoteCKAN(site_url, apikey) datastore_resources = rc.action.datastore_search( resource_id='_table_metadata') # Se borrarán los recursos del Datastore que no figuren en `datajson_resource_ids` # La función `datastore_search` muestra 100 resultados, por lo que es necesario utilizar un offset current_offset = 0 while datastore_resources.get('total') > current_offset: for datastore_resource in datastore_resources.get('records'): # En Datastore, el id del recurso se busca como `name` (y buscamos los que no sean "_table_metadata") datastore_resource_id = datastore_resource.get('name') if datastore_resource_id != "_table_metadata" and datastore_resource_id not in datajson_resource_ids: try: rc.action.datastore_delete( resource_id=datastore_resource_id, force=True) except Exception as e: LOGGER.warn( 'Intentando eliminar del Datastore el recurso %s surgió un error: %s', datastore_resource_id, e) current_offset += 100 datastore_resources = rc.action.datastore_search( resource_id='_table_metadata', offset=current_offset) LOGGER.info("Limpieza del Datastore terminada")
def get_resource_icon(resource, config): icon_url = resource.get('icon_url', None) if icon_url: return icon_url package_id = resource['package_id'] id_to_search_with = '%s_%s_%s' % (get_package_organization(package_id).get( 'id', ''), resource['package_id'], resource['id']) if not config: config = get_theme_config() resource_in_config = config.get('resources', {}).get(id_to_search_with, None) if resource_in_config is not None: return resource_in_config.get('icon_url', None) return None
def connect_datajson(self): with SubMapper(self.route_map, controller=self.datajson_controller) as m: m.connect('datajson', '/data.json', action='datajson') self.redirect( ('/datajson', '/datajson'), ) disable_catalog_xlsx_url = config.get('andino.disable_catalog_xlsx_url') if disable_catalog_xlsx_url in ('True', 'true', '1', 'Yes', 'yes', ): self.redirect( ('/catalog.xlsx', '/'), # Redirecciono a la home ) else: with SubMapper(self.route_map, controller=self.datajson_controller) as m: m.connect('catalog_xlsx', '/catalog.xlsx', action='catalog_xlsx')
def store_object_data_excluded_from_datajson(object_dict_name, data_dict): ''' :param object_dict_name: string con el tipo de la entidad que se está manejando (ej. groups, resources, etc) :param data_dict: diccionario que contiene el id del objeto a guardar y la información que necesitamos almacenar pero que no corresponde tener en el data.json (dict); debería poder utilizarse siempre de la misma manera, sin importar el tipo del objeto que se desee guardar :return: None ''' config = get_theme_config() data_dict_id = data_dict.get('id') if len(data_dict) > 1: data_dict.pop('id') config_item = config.get(object_dict_name, {}) config_item.update({data_dict_id: data_dict}) config[object_dict_name] = config_item GobArConfigController.set_theme_config(config) return config[object_dict_name][data_dict.get('id', data_dict_id)] return None
def get_datastore(self, workspace=None, store_name=None, layer_name=None, layer_version=None): """ Make a connection to the datastore, create the datastore if it does not exist. The database we point to will be CKAN's datastore database in most cases because that's where all of our uploaded files wind up. Otherwise, specify the name of the database you want to make a connection with through the 'store_name' argument. """ # Extract values from ckan config file datastore_url = config.get( 'ckan.datastore.write_url', 'postgresql://*****:*****@localhost/datastore') # Extract connection details pattern = "://(?P<user>.+?):(?P<pass>.+?)@(?P<host>.+?)/(?P<database>.+)$" details = re.search(pattern, datastore_url) # Give a name to the workspace and specify the datastore if workspace is None: workspace = self.default_workspace(layer_name, layer_version) if store_name is None: store_name = details.group("database") # Check if the datastore exists, create if it does not exist try: ds = self.get_store(store_name, workspace) except Exception as ex: ds = self.create_datastore(store_name, workspace) ds.connection_parameters.update(host=details.group("host"), port="5432", database=details.group("database"), user=details.group("user"), passwd=details.group("pass"), dbtype="postgis") self.save(ds) # Return datastore object return ds
def get_default_background_configuration(): background_opacity = config.get('andino.background_opacity') return background_opacity
def get_gtm_code(): return get_theme_config('google_tag_manager.container-id') or \ config.get('ckan.google_tag_manager.gtm_container_id', '')
def get_default_series_api_url(): return config.get('seriestiempoarexplorer.default_series_api_uri', '')
def get_google_analytics_id(): return get_theme_config('google_analytics.id') or \ config.get('googleanalytics.id', '')
def get_andino_base_page(): return config.get('andino.base_page', 'gobar_page.html')
def get_recommended_datasets(pkg_id): package = toolkit.get_action('package_show')(None, {'id': pkg_id.strip()}) response_data = {} RtpaApi = cf.get( 'ckan.extensions.rtpa_tet_dataset_automatic_recommendations.rtpa_api', False) if "linked_datasets" in package and package["linked_datasets"] != "": l = [] pkgs = package["linked_datasets"].split(",") for pkg in pkgs: #log.debug("PKG_ID:"+pkg_id) #log.debug("type of:"+str(type(pkg_id))) p = toolkit.get_action('package_show')(None, {'id': pkg}) item = {} item["name"] = pkg item["title"] = p["title"] item["notes"] = p["notes"] l.append(item) response_data["datasets"] = l if RtpaApi: relateddatasets = [] url = RtpaApi + package["id"] + "/3" try: data = json.loads(urllib2.urlopen(url).read()) except Exception as e: print(e) i = 0 for element in data['result']: item = {} item["name"] = element['id'] item["title"] = element['title'] item["notes"] = element['notes'] relateddatasets.append(item) i += 1 if (i == 10): break response_data["datasets"] = relateddatasets else: q = '' category_string = '' taget_audience_string = '' if "category" in package and not package["category"] == "": category_string = "category:\"" + package["category"] + "\"~25" if "target_audience" in package and not package[ "target_audience"] == "": taget_audience_string = "target_audience:\"" + package[ "target_audience"] + "\"~25" if (category_string and taget_audience_string): q = category_string + " OR " + taget_audience_string elif (category_string): q = category_string elif (taget_audience_string): q = taget_audience_string data_dict = { 'qf': 'target_audience^4 category^4 name^4 title^4 tags^2 groups^2 text', 'q': q, 'rows': 5 } #log.debug(q) response_data["datasets"] = toolkit.get_action('package_search')( None, data_dict)["results"] for ds in response_data["datasets"]: if ds["name"] == pkg_id: response_data["datasets"].remove(ds) return response_data
def get_current_url_for_resource(package_id, resource_id): return os.path.join(config.get('ckan.site_url'), 'dataset', package_id, 'resource', resource_id)
def get_gtm_code(): return config.get('ckan.google_tag_manager.gtm_container_id', None)
def is_distribution_local(distribution_metadata): ckan_site_url = config.get('ckan.site_url') accessURL = distribution_metadata.get('accessURL', '') return accessURL.startswith(ckan_site_url)