def publish(table, title, company, identifier,url=DEFAULT_AUTHORITY_URL): """ Create a layer & authority metadata for a postgis table @oaram table(string): Postgis table name e.g. sid-* @oaram title(string): Human readble title for direct users of WFS @param company(string): the company name to add @param identifier(string): the identifier. NOTE: This is the same as the PostGIS table!. @param url(string): the name of a URI for the company. Can be "http://SOME_URI" @returns : the XML string """ log.debug("Publishing {0} as {1} and url {2}".format(COMPANY,identifier,url)) if ( config.get("geoserver","enable") == "true"): endpoint = config.get("geoserver","endpoint") username = config.get("geoserver","username") password = config.get("geoserver","password") data = message_add_layer(table, title) msg = rest_request(endpoint, username, password, "POST", data, ADD_LAYER_PATH) log.debug("Geoserver add layer reponse: %s" % msg) data = message_authority(company, identifier, url) # this call returns nothing rest_request(endpoint, username, password, "PUT", data, PUBLISH_LAYER_PATH_TPL.format(table)) return msg log.debug("Geoserver support is disabled!") return None #no-op
def describefeaturetype(params): """WFS DescribeFeatureType interface @param params(dict): request headers @returns dictionary with { response, mimetype, error } """ ## Mandatory parametres if "typename" not in params: return {"error": 1, "response": "Missing 'typeName'"} TYPENAME = params["typename"] FEATURES_FILE=os.path.join(config.get("path", "ows_template_dir"), "features.json") SID = None with open(FEATURES_FILE) as f: FEATURES = json.load(f) if TYPENAME in FEATURES: SID=FEATURES[TYPENAME]["sid"] TPL_FILE=FEATURES[TYPENAME]["template"] if not SID: return {"error": 1, "response": "TypeName %s not found in features.json" % TYPENAME} XSD_FILE=os.path.join(config.get("path", "ows_template_dir"), TPL_FILE + ".xsd") try: with open(XSD_FILE) as f: return {"error": 0, "response": f.read(), "mimetype":'text/xml; charset=utf-8'} except IOError: return _error("Cannot open file %s.xsd" % SID)
def getfeature(params): """WFS GetFeature interface @param params(dict): request headers @returns dictionary with { response, mimetype, error } """ ## Mandatory parametres if "typename" not in params: return {"error": 1, "response": "Missing 'typeName'"} TYPENAME = params["typename"] ENDPOINT=config.get("ows", "endpoint") FEATURES_FILE=os.path.join(config.get("path", "ows_template_dir"), "features.json") ## Optional parametres OUTPUTFORMAT = params["outputformat"] if "outputformat" in params else "text/xml; subtype=gml/3.1.1" SID = None TPL_FILE = None with open(FEATURES_FILE) as f: FEATURES = json.load(f) if TYPENAME in FEATURES: SID=FEATURES[TYPENAME]["sid"] TPL_FILE=FEATURES[TYPENAME]["template"] if not SID: return {"error": 1, "response": "featureType %s not found in features.json" % TYPENAME} if "test" in params and params["test"] == "1": uid = config.get("test", "test_uid") else: uid = config.get("path", "public_uid") fp = fs_provider.FsProvider(uid) # Join all records all_records = Records.create_records_cache(fp, "/records") # Filter by survey id filtered_records = Records.filter_data(all_records, "editor", uid, {"id":SID}) # export ot GeoJSON (as Python dictionary) res = Records.convertToGeoJSON(filtered_records) if res: if (OUTPUTFORMAT == "application/json") or (OUTPUTFORMAT == "json"): return {"error": 0, "response": res, "mimetype": "application/json"} # If not JSON assume XML and pipe through template APPSCHEMA_FILE=os.path.join(config.get("path", "ows_template_dir"), TPL_FILE) if os.path.isfile(APPSCHEMA_FILE): with open(APPSCHEMA_FILE) as f: res = template(f.read(), FC=res, OWS_ENDPOINT=ENDPOINT,) return {"error": 0, "response": res, "mimetype":'text/xml; charset=utf-8'} else: return {"error": 1, "response": "Template not found for %s" % SID} else: return {"error": 1, "response": "WFS GetFeature unsuccessful"}
def getLogger(name, parent=None): """ Create a logger with some sane configuration Args: name (str): name of logger. Should be the name of the file. parent (str): name of parent logger to inherit its properties """ if parent: # create child logger that inherits properties from father logger = logging.getLogger(parent + "." + name) else: #create parent logger with new properties logger = logging.getLogger(name) logger.setLevel(logging.DEBUG) # create file handler which logs even debug messages #fh = logging.FileHandler(config.get("path","log_file")) fh = handlers.TimedRotatingFileHandler(config.get("path", "log_file"), when='midnight') fh.setLevel(logging.DEBUG) # create formatter and add it to the handler formatter = logging.Formatter( '%(asctime)s - %(name)s:%(lineno)d - %(levelname)s - %(message)s') fh.setFormatter(formatter) # add the handlers to the logger logger.addHandler(fh) return logger
def test_get_feature(self): def post_rec(url, name): with open(os.path.join(config.get("test", "test_resources"), name), "r") as f: resp = self.app.post(url, params=f.read()).json self.assertEquals(resp['error'], 0) gf_url = '{0}?SERVICE=WFS&VERSION=1.1.0&REQUEST=GETFEATURE'.format( self.base_url) resp = self.app.get(gf_url) self.assertEquals(self._error(resp), "Missing 'typeName'") type_name = 'XXX' url = '{0}&typename={1}'.format(gf_url, type_name) resp = self.app.get(url) self.assertEquals( self._error(resp), 'featureType {0} not found in features.json'.format(type_name)) uid = config.get("test", "test_uid") # post initial record to ensure test directory is setup rname = 'record1' r_url = '/records/local/{0}/{1}'.format(uid, rname) post_rec(r_url, 'testfile-sd1.rec') # delete all records self.app.delete('/records/local/{0}//'.format(uid)).json # test no records ft = self._get_feature_types(1)[0].text url = '{0}&typename={1}&OUTPUTFORMAT=json&test=1'.format(gf_url, ft) resp = self.app.get(url).json self.assertEquals(len(resp['features']), 0) # post a record for sd1 post_rec(r_url, 'testfile-sd1.rec') url = '{0}&typename={1}&OUTPUTFORMAT=json&test=1'.format(gf_url, ft) resp = self.app.get(url).json self.assertEquals(len(resp['features']), 1) self.assertEquals(resp['features'][0]['properties']['fields'][0]['val'], 'val1') # gml url = '{0}&typename={1}&OUTPUTFORMAT&test=1'.format(gf_url, ft) resp = self.app.get(url) # TODO: missing template #print resp # post a record for sd2 post_rec(r_url, 'testfile-sd2.rec') ft = self._get_feature_types(2)[0].text url = '{0}&typename={1}&OUTPUTFORMAT=json&test=1'.format(gf_url, ft) resp = self.app.get(url).json self.assertEquals(len(resp['features']), 1) self.assertEquals(resp['features'][0]['properties']['fields'][0]['val'], 'val2') # gml url = '{0}&typename={1}&OUTPUTFORMAT&test=1'.format(gf_url, ft) resp = self.app.get(url)
def _create_rec(self, name): with open( os.path.join(config.get('test', 'test_resources'), 'record_filter', '{0}.json'.format(name)), 'r') as f: url = '{0}/{1}'.format(self.REC_PREFIX, name) resp = self.app.post(url, params=f.read()).json self.assertEquals(resp["error"], 0)
def create_rec(name): with open( os.path.join(config.get('test', 'test_resources'), 'record_filter', '{0}.json'.format(name)), 'r') as f: url = '/records/{0}/{1}/{2}'.format(provider, userid, name) resp = app.post(url, params=f.read()).json self.assertEquals(resp["error"], 0)
def get_surveys(uid): """ fetching all surveys a users is registered for. @param uid(string): the SAML UUID of the user @returns : A class that represents all the surveys found or None """ log.debug("Quering surveys for {0}".format(uid)) endpoint = config.get("geonetwork","endpoint") username = config.get("geonetwork","username") password = config.get("geonetwork","password") # just in case, since urllib2 is not threadsafe according to docs with lock: msg = msg_get_surveys(uid) resj = get_request(endpoint, username, password, msg) res = json.loads(resj) return Surveys(res)
def _create_rec(self, name): with open(os.path.join( config.get('test', 'test_resources'), 'record_filter', '{0}.json'.format(name)), 'r') as f: url = '{0}/{1}'.format(self.REC_PREFIX, name) resp = self.app.post(url, params=f.read()).json self.assertEquals(resp["error"], 0)
def copy_to_public_folder(self, path): """ Copies file to public folder as configued in pcapi.ini returns destination path (should be the same as path) """ public_uid = config.get("path", "public_uid") pubfs = FsProvider(public_uid) with open(self.realpath(path)) as fp: return pubfs.put_file(path, fp, True)
def copy_to_public_folder(self,path): """ Copies file to public folder as configued in pcapi.ini returns destination path (should be the same as path) """ public_uid = config.get("path", "public_uid") pubfs = FsProvider(public_uid) with open(self.realpath(path)) as fp: return pubfs.put_file(path, fp, True)
def __init__(self, userid): """ Args: userid (string) : Userid (aka request key) of user. Only valid emails or hexnums are allowed """ if not ( FsProvider.EMAIL_RE.match(userid) or FsProvider.HEX_RE.match(userid) ): raise FsException("Illegal userid: %s -- should either HEX or EMAIL" % userid) self.userid = userid # Full path pointing to the user's sandbox *directory* self.basedir = config.get("path", "data_dir") + "/" + userid
def upgrade_all_data(): # normally ~/.pcapi/data data_dir = config.get("path", "data_dir") for f in find_json(data_dir): j = json.load(open(f)) gj = rec2geojson(j) if not gj: print "Ignoring %s which is already converted." % f else: print "Overwriting new version of %s" % f with open(f, 'w') as fp: json.dump(gj, fp)
def upgrade_all_data(): # normally ~/.pcapi/data data_dir = config.get("path","data_dir") for f in find_json(data_dir): j = json.load(open(f)) gj = rec2geojson(j) if not gj: print "Ignoring %s which is already converted." % f else: print "Overwriting new version of %s" % f with open(f,'w') as fp: json.dump(gj,fp)
def __init__(self, userid): """ Args: userid (string) : Userid (aka request key) of user. Only valid emails or hexnums are allowed """ if not (FsProvider.EMAIL_RE.match(userid) or FsProvider.HEX_RE.match(userid)): raise FsException( "Illegal userid: %s -- should either HEX or EMAIL" % userid) self.userid = userid # Full path pointing to the user's sandbox *directory* self.basedir = config.get("path", "data_dir") + "/" + userid
def getcapabilities(params): """WFS GetCapalities interface @param params(dict): request headers @returns dictionary with { response, mimetype, error } """ FEATURES = None # Check mandatory arguments ENDPOINT=config.get("ows", "endpoint") FEATURES_FILE=os.path.join(config.get("path", "ows_template_dir"), "features.json") GETCAPABILITIES_FILE=os.path.join(config.get("path", "ows_template_dir"), "wfs_getcapabilities_response-%s.tpl" % params["version"]) with open(FEATURES_FILE) as f: FEATURES = json.load(f) with open(GETCAPABILITIES_FILE) as f: res = template(f.read(), OWS_ENDPOINT=ENDPOINT, WFS_FEATURES=FEATURES) if res: return {"error": 0, "response": res, "mimetype":'text/xml; charset=utf-8'} else: return {"error": 1, "response": "WFS GetCapalities unsuccessful"}
def test_post_editor(self): """ post an editor """ url = '/fs/{0}/{1}/editors/test.json'.format(provider, userid) editor = editorfile.read() with open( os.path.join(config.get("test", "test_resources"), 'form.json'), "r") as f: resp = app.post(url, params=f.read()).json self.assertEquals(resp["error"], 0) # Contents of /editors/ should be the "/editors/test.json" (always receives absolute paths) resp = app.get('/fs/{0}/{1}/editors'.format(provider, userid)).json #print `resp` self.assertTrue("/editors/test.json" in resp["metadata"])
def toPostGIS(data, userid): """ Export "/data.json" to configured PostGIS database. Assumes an up-to-date data.json. Returns: JSON object with status, new tablename, message """ # If an email is used for userid we need to change `@' and `.' to something valid # for Postgres tables tablename = userid.replace('@','_at_').replace('.','_dot_') host = config.get("pg","database_host") database = config.get("pg","database_database") user = config.get("pg","database_user") password = config.get("pg","database_password") target = TARGET_POSTGIS.format( USER=user, DATABASE=database, HOST=host, PASSWORD=password ) source = data call_array = [ OGR2OGR, "-overwrite", "-update", "-f", "PostgreSQL", target, \ source, "OGRGeoJSON", "-nln", tablename] LOG.debug("OGR export: " + `call_array`) status = subprocess.call( call_array ) if (status): return { "error": status, "msg":"OGR Export failed"} return {"error": 0, "table": tablename, "msg":"Successfully exported to {0}".format(tablename) }
def test_get_invalid_records(self): url = '/records/{0}/{1}//'.format(provider, userid) app.delete(url).json # post invalid record url = '/records/{0}/{1}/myrecord'.format(provider, userid) with open( os.path.join(config.get("test", "test_resources"), 'invalid.rec'), "r") as f: resp = app.post(url, params=f.read()).json self.assertEquals(resp["error"], 0) url = '/records/{0}/{1}/'.format(provider, userid) resp = app.get(url).json # get all returns 0 but no error self.assertEquals(len(resp["records"]), 0) self.assertEquals(resp["error"], 0)
def _verify_token(): """ Get user id from token file and validate it. If it is not there or invalid generate a new one. """ global userid tokenfile = os.sep.join((config.get('test', 'test_resources'), 'token.txt')) with open(tokenfile, 'w+') as f: token = f.read() if len(token) > 0: resp = app.get('/auth/dropbox/{0}'.format(token)).json if resp["state"] == 1: userid = token if userid is None: def get_json(): obj = None try: f = urllib2.urlopen( 'http://127.0.0.1:8080/auth/dropbox?async=true') obj = json.loads(f.read()) except URLError: print 'Run python pcapi_devel.py, press Ret to continue' raw_input() obj = get_json() return obj obj = get_json() userid = obj['userid'] print 'Goto {0} press Ret to continue'.format(obj['url']) raw_input() print 'Using {0}'.format(userid) with open(tokenfile, 'w') as f: f.write(userid) return userid
def getLogger(name, parent=None): """ Create a logger with some sane configuration Args: name (str): name of logger. Should be the name of the file. parent (str): name of parent logger to inherit its properties """ if parent: # create child logger that inherits properties from father logger = logging.getLogger(parent + "." + name) else: #create parent logger with new properties logger = logging.getLogger(name) logger.setLevel(logging.DEBUG) # create file handler which logs even debug messages #fh = logging.FileHandler(config.get("path","log_file")) fh = handlers.TimedRotatingFileHandler(config.get("path","log_file"), when='midnight') fh.setLevel(logging.DEBUG) # create formatter and add it to the handler formatter = logging.Formatter('%(asctime)s - %(name)s:%(lineno)d - %(levelname)s - %(message)s') fh.setFormatter(formatter) # add the handlers to the logger logger.addHandler(fh) return logger
def test_image_upload(self): url = '/records/{0}/{1}//'.format(provider, userid) app.delete(url).json # create new record rname = 'myrecord' url = '/records/{0}/{1}/{2}'.format(provider, userid, rname) resp = app.post(url, params=localfile.read()).json self.assertEquals(resp['error'], 0) # post binary image bfname = 'image.jpg' resp = app.post('{0}/{1}'.format(url, bfname), upload_files=[('file', imagefilepath)]).json self.assertEquals(resp['error'], 0) self.assertEquals(resp['msg'], 'File uploaded') self.assertEquals(resp['path'], '/records/{0}/{1}'.format(rname, bfname)) # post base64 string (based on encoding of test image) sfname = 'imageb64.jpg' with open(imagefilepath, 'r') as f: out = base64.b64encode(f.read()) resp = app.post('{0}/{1}?base64=true'.format(url, sfname), params=out).json self.assertEquals(resp['error'], 0) self.assertEquals(resp['msg'], 'File uploaded') self.assertEquals(resp['path'], '/records/{0}/{1}'.format(rname, sfname)) # verify both files have same size d = os.path.join(config.get("path", 'data_dir'), userid, 'records', rname) self.assertEquals( os.stat(os.path.join(d, bfname)).st_size, os.stat(os.path.join(d, sfname)).st_size)
import psycopg2 import psycopg2.extensions from pcapi.fs_provider import FsProvider from pcapi.publish import mapping, geoserver # Needed for transparent unicode support psycopg2.extensions.register_type(psycopg2.extensions.UNICODE) psycopg2.extensions.register_type(psycopg2.extensions.UNICODEARRAY) from pcapi import config, logtool log = logtool.getLogger("postgis", "pcapi.publish") # full path of PostGIS database host = config.get("pg", "database_host") database = config.get("pg", "database_database") user = config.get("pg", "database_user") password = config.get("pg", "database_password") log.debug("Starting connection with PostGIS database: {0}@{1}".format( user, password)) # When host is not supplied then default to peer (UNIX sockets) authentication conn_string = "dbname={database} user={user}".format(database=database, user=user) if host: conn_string += " host={host} password={password}".format(host=host, password=password) # NOTE: mod_wsgi could initialize these global variables in *different* processes for each request.
import sys import unittest from webtest import TestApp ## Also libraries to the python path pwd = os.path.dirname(os.path.realpath(__file__)) sys.path.append(os.path.join(pwd, '../')) # to find the classes to test from pcapi.server import application from pcapi import config userid = "*****@*****.**" # where to get records ala "recordXXX.json" from envsys_records_dir = config.get("test", "records_dir") # How many records records_num = 15 # Application app = TestApp(application) provider = 'local' class TestOGRExport(unittest.TestCase): """ 1) POST all test records 2) EXPORT to postgis /records/local/USER/?filter=database&ftl """
# Database connection wrapper from pcapi import config, logtool log = logtool.getLogger("connection", "pcapi") use_sqlite = config.has_option("path", "sessionsdb") if use_sqlite: # using sqlite #from pysqlite2.dbapi2 import OperationalError import pysqlite2.dbapi2 as db # full path of sqlite3 database db_path = config.get("path", "sessionsdb") log.info('Connect to sqlite using {0}'.format(db_path)) # creating/connecting the test_db. # "check_same_thread" turns off some false alarms from sqlite3. # NOTE: mod_wsgi runs these global variables in *different* processes for each request. conn = db.connect(db_path, check_same_thread=False) else: # try postgres import psycopg2 db = 'dbname={0} user={1} password={2} host={3} port={4}'.format( config.get("pg", "database_database"), config.get("pg", "database_user"), config.get("pg", "database_password"), config.get("pg", "database_host"), config.get("pg", "database_port")) log.info('Connect to postgres using {0}'.format(db)) conn = psycopg2.connect(db)
def test_get_feature(self): def post_rec(url, name): with open(os.path.join(config.get("test", "test_resources"), name), "r") as f: resp = self.app.post(url, params=f.read()).json self.assertEquals(resp['error'], 0) gf_url = '{0}?SERVICE=WFS&VERSION=1.1.0&REQUEST=GETFEATURE'.format( self.base_url) resp = self.app.get(gf_url) self.assertEquals(self._error(resp), "Missing 'typeName'") type_name = 'XXX' url = '{0}&typename={1}'.format(gf_url, type_name) resp = self.app.get(url) self.assertEquals( self._error(resp), 'featureType {0} not found in features.json'.format(type_name)) uid = config.get("test", "test_uid") # post initial record to ensure test directory is setup rname = 'record1' r_url = '/records/local/{0}/{1}'.format(uid, rname) post_rec(r_url, 'testfile-sd1.rec') # delete all records self.app.delete('/records/local/{0}//'.format(uid)).json # test no records ft = self._get_feature_types(1)[0].text url = '{0}&typename={1}&OUTPUTFORMAT=json&test=1'.format(gf_url, ft) resp = self.app.get(url).json self.assertEquals(len(resp['features']), 0) # post a record for sd1 post_rec(r_url, 'testfile-sd1.rec') url = '{0}&typename={1}&OUTPUTFORMAT=json&test=1'.format(gf_url, ft) resp = self.app.get(url).json self.assertEquals(len(resp['features']), 1) self.assertEquals( resp['features'][0]['properties']['fields'][0]['val'], 'val1') # gml url = '{0}&typename={1}&OUTPUTFORMAT&test=1'.format(gf_url, ft) resp = self.app.get(url) # TODO: missing template #print resp # post a record for sd2 post_rec(r_url, 'testfile-sd2.rec') ft = self._get_feature_types(2)[0].text url = '{0}&typename={1}&OUTPUTFORMAT=json&test=1'.format(gf_url, ft) resp = self.app.get(url).json self.assertEquals(len(resp['features']), 1) self.assertEquals( resp['features'][0]['properties']['fields'][0]['val'], 'val2') # gml url = '{0}&typename={1}&OUTPUTFORMAT&test=1'.format(gf_url, ft) resp = self.app.get(url)
import re import time from pcapi import config, logtool from pcapi.db import tokens from dropbox import client, session, rest from db import tokens from urlparse import urlsplit, urlunsplit ### Static Variables ### APP_KEY = config.get("dropbox", "app_key") APP_SECRET = config.get("dropbox", "app_secret") ACCESS_TYPE = 'app_folder' # should be 'dropbox' or 'app_folder' as configured for your app STATE_CODES = {"verify_token": 0, "connected": 1, "non_authorized": 2} # CAPABILITIES that this provider supports CAPABILITIES = ["oauth", "search", "synchronize", "delete"] log = logtool.getLogger("DropboxProvider", "pcapi") ######################### class Metadata(object): """ metadata of files/dir as returned from dropbox. This is plain filesystem metadata and NOT high-level pcapi metadata for records or editors""" def __init__(self, md): self.md = md def __str__(self): return ` self.md `
import sys import unittest from webtest import TestApp ## Also libraries to the python path pwd = os.path.dirname(os.path.realpath(__file__)) sys.path.append(os.path.join(pwd, '../')) # to find the classes to test from pcapi.server import application from pcapi import config userid = "*****@*****.**" # where to get records ala "recordXXX.json" from envsys_records_dir = config.get("test", "records_dir") # How many records records_num = 15 # Application app = TestApp(application) provider = 'local' class TestOGRExport(unittest.TestCase): """ 1) POST all test records 2) EXPORT to postgis /records/local/USER/?filter=database&ftl """ ########### UPLOAD RECORDS ###########
returns destination path (should be the same as path) """ public_uid = config.get("path", "public_uid") pubfs = FsProvider(public_uid) with open(self.realpath(path)) as fp: return pubfs.put_file(path, fp, True) def file_delete(self,path): """ Delete file and return parsed metadata""" m = self.metadata(path) f = self.realpath(path) if (os.path.isdir(f)): shutil.rmtree(f) else: os.remove(f) return Metadata(m) if __name__ == "__main__": userid = "testuser" fp = FsProvider(userid) #list metadata fp.metadata("/") # upload uploadData = open ( config.get("test","textfile") ) print "put_file -> " + fp.put_file("/Myfile.test" , uploadData) #### now list directory ## print "================================" print "ls -> " + `fp.ls("/")` #### now delete the file ## print "================================"
def post_rec(url, name): with open(os.path.join(config.get("test", "test_resources"), name), "r") as f: resp = self.app.post(url, params=f.read()).json self.assertEquals(resp['error'], 0)
log.debug("Publishing {0} as {1} and url {2}".format(COMPANY,identifier,url)) if ( config.get("geoserver","enable") == "true"): endpoint = config.get("geoserver","endpoint") username = config.get("geoserver","username") password = config.get("geoserver","password") data = message_add_layer(table, title) msg = rest_request(endpoint, username, password, "POST", data, ADD_LAYER_PATH) log.debug("Geoserver add layer reponse: %s" % msg) data = message_authority(company, identifier, url) # this call returns nothing rest_request(endpoint, username, password, "PUT", data, PUBLISH_LAYER_PATH_TPL.format(table)) return msg log.debug("Geoserver support is disabled!") return None #no-op if __name__ == "__main__": table = "eo" title = "Test Title" create_msg = message_add_layer(table, title) endpoint = config.get("geoserver","endpoint") username = config.get("geoserver","username") password = config.get("geoserver","password") pub_msg = message_authority("cobweb", "test-id2", DEFAULT_AUTHORITY_URL) print "Create: %s" % create_msg print "Publish: %s" % pub_msg print "test add layer : %s " % \ rest_request(endpoint, username, password, "POST", create_msg, ADD_LAYER_PATH) print "test publish : %s " % \ rest_request(endpoint, username, password, "PUT", pub_msg, PUBLISH_LAYER_PATH_TPL.format(table))
""" Export filter for several formats supported by OGR. Implemented as a singleton. It is currently implemented as wrapper around ogr2ogr to facilitate prototype and easy of use. Prerequisites: ogr2ogr installed and specified under resources/config.ini file """ import subprocess from pcapi import logtool, config LOG = logtool.getLogger("ogr", "filters") OGR2OGR = config.get("ogr", "ogr2ogr") TARGET_POSTGIS = "PG:user={USER} dbname={DATABASE} host={HOST} password={PASSWORD}" def toPostGIS(data, userid): """ Export "/data.json" to configured PostGIS database. Assumes an up-to-date data.json. Returns: JSON object with status, new tablename, message """ # If an email is used for userid we need to change `@' and `.' to something valid # for Postgres tables tablename = userid.replace('@','_at_').replace('.','_dot_') host = config.get("pg","database_host") database = config.get("pg","database_database") user = config.get("pg","database_user") password = config.get("pg","database_password") target = TARGET_POSTGIS.format( USER=user, DATABASE=database, HOST=host, PASSWORD=password ) source = data call_array = [ OGR2OGR, "-overwrite", "-update", "-f", "PostgreSQL", target, \
""" import os import sys import unittest from webtest import TestApp ## Also libraries to the python path pwd = os.path.dirname(os.path.realpath(__file__)) sys.path.append(os.path.join(pwd, '../')) # to find the classes to test from pcapi.server import application from pcapi import config userid = "*****@*****.**" textfilepath = config.get("test", "testfile") imagefilepath = config.get("test", "imagefile") editorfilepath = config.get("test", "editorfile") # a record file (json) localfile = open ( textfilepath , "r") # am editor file (html5) editorfile = open ( editorfilepath , "r") # Application app = TestApp(application) provider = 'local' class TestAuthoringTool(unittest.TestCase): """ Test initial creation with authoring tool:
import pysqlite2.dbapi2 as db import psycopg2 from pcapi import config from pcapi.db import tokens # sqllite db pcapi_dir = os.path.join( os.environ['HOME'], '.pcapi', ) path = os.path.join(pcapi_dir, 'data', 'sessions.db') sconn = db.connect(path) # postgres print config.get("pg", "database_database") pg = 'dbname={0} user={1} password={2} host={3} port={4}'.format( config.get("pg", "database_database"), config.get("pg", "database_user"), config.get("pg", "database_password"), config.get("pg", "database_host"), config.get("pg", "database_port")) pconn = psycopg2.connect(pg) c = sconn.cursor() c.execute('SELECT userid, reqsec, acckey, accsec, dt FROM tokens ORDER BY id') INSERT = "INSERT INTO tokens(userid,reqsec,acckey,accsec,dt) SELECT %s,%s,%s,%s,%s WHERE NOT EXISTS (SELECT 1 FROM tokens WHERE userid = %s)" for t in c.fetchall(): pc = pconn.cursor() userid = t[0] print 'Migrating ', userid
# Database connection wrapper try: import pysqlite2.dbapi2 as db from pysqlite2.dbapi2 import OperationalError except ImportError: import sqlite3.dbapi2 as db from sqlite3.dbapi2 import OperationalError from pcapi import config, logtool log = logtool.getLogger("connection", "pcapi") # full path of sqlite3 database DB = config.get("path", "sessionsdb") log.debug(DB) # creating/connecting the test_db. # "check_same_thread" turns off some false alarms from sqlite3. # NOTE: mod_wsgi runs these global variables in *different* processes for each request. con = db.connect(DB, check_same_thread=False) def execute(sql, args=()): """ Execute *sql* statement using list *args* for sql substitution. PC-API was meant to be fault tolerant to all disk/database faults. This function tries to handle all possible errors by first regenerating missing tables and falling back to using a memory database if all else fails. Args: sql: SQL statement
import os import sys import unittest from webtest import TestApp ## Also libraries to the python path pwd = os.path.dirname(os.path.realpath(__file__)) sys.path.insert(1, os.path.join(pwd, '../')) # takes precedence over ~/.local from pcapi.server import application from pcapi import config userid = "*****@*****.**" # where to get resource and assets test_dir = config.get("test", "test_resources") test1 = os.path.join(test_dir, "test1") test2 = os.path.join(test_dir, "test 2") # Application app = TestApp(application) provider = 'local' class TestPublish(unittest.TestCase): """ Test Process: 1) Upload a Form: POST /editors/local/uid/SID.edtr 2) Upload a test record with asset to the local provider at any order POST /fs/local/uid/records/TR/record.json
import os import sys import unittest from webtest import TestApp ## Also libraries to the python path pwd = os.path.dirname(os.path.realpath(__file__)) sys.path.insert(1,os.path.join(pwd, '../')) # takes precedence over ~/.local from pcapi.server import application from pcapi import config userid = "*****@*****.**" # where to get resource and assets test_dir = config.get("test", "test_resources") test1 = os.path.join(test_dir,"test1") test2 = os.path.join(test_dir,"test 2") # Application app = TestApp(application) provider = 'local' class TestPublish(unittest.TestCase): """ Test Process: 1) Upload a Form: POST /editors/local/uid/SID.edtr 2) Upload a test record with asset to the local provider at any order POST /fs/local/uid/records/TR/record.json POST /fs/local/uid/records/TR/asset1.img
def runserver(): bottle.run(host=config.get("server", "host"), port=config.get("server", "port"), debug=config.getboolean("server", "debug"))
import os import pysqlite2.dbapi2 as db import psycopg2 from pcapi import config from pcapi.db import tokens # sqllite db pcapi_dir = os.path.join(os.environ['HOME'], '.pcapi',) path = os.path.join(pcapi_dir, 'data', 'sessions.db') sconn = db.connect(path) # postgres print config.get("pg", "database_database") pg = 'dbname={0} user={1} password={2} host={3} port={4}'.format( config.get("pg", "database_database"), config.get("pg", "database_user"), config.get("pg", "database_password"), config.get("pg", "database_host"), config.get("pg", "database_port")) pconn = psycopg2.connect(pg) c = sconn.cursor() c.execute('SELECT userid, reqsec, acckey, accsec, dt FROM tokens ORDER BY id') INSERT = "INSERT INTO tokens(userid,reqsec,acckey,accsec,dt) SELECT %s,%s,%s,%s,%s WHERE NOT EXISTS (SELECT 1 FROM tokens WHERE userid = %s)" for t in c.fetchall(): pc = pconn.cursor()
from pcapi.form_validator import FormValidator, Editor # User ID should exist in DATABASE userid = None #userid='aaabbbccbbdd' # to test path ending in a directory dirpath = "lev1/lev2/" # to test uploading an editor editorname = "myed.edtr" # to testing uploading a record recordname = "myrec.rec" textfilepath = config.get("test", "testfile") imagefilepath = config.get("test", "imagefile") editorfilepath = config.get("test", "editorfile") # the contents of the file are here (full path to local file) localfile = open(textfilepath, "r") #schemafile = open ( schemafilepath , "r") # Application app = TestApp(application) def _verify_token(): """ Get user id from token file and validate it. If it is not there or invalid
def post_rec(url, name): with open(os.path.join(config.get("test", "test_resources"), name), "r") as f: resp = self.app.post(url, params=f.read()).json self.assertEquals(resp['error'], 0)
import re import time from pcapi import config, logtool from pcapi.db import tokens from dropbox import client, session, rest from db import tokens from urlparse import urlsplit, urlunsplit ### Static Variables ### APP_KEY = config.get("dropbox","app_key") APP_SECRET = config.get("dropbox","app_secret") ACCESS_TYPE = 'app_folder' # should be 'dropbox' or 'app_folder' as configured for your app STATE_CODES = { "verify_token": 0, "connected": 1, "non_authorized": 2 } # CAPABILITIES that this provider supports CAPABILITIES = [ "oauth", "search", "synchronize", "delete" ] log = logtool.getLogger("DropboxProvider", "pcapi") ######################### class Metadata(object): """ metadata of files/dir as returned from dropbox. This is plain filesystem metadata and NOT high-level pcapi metadata for records or editors""" def __init__ (self, md):
# Database connection wrapper try: import pysqlite2.dbapi2 as db from pysqlite2.dbapi2 import OperationalError except ImportError: import sqlite3.dbapi2 as db from sqlite3.dbapi2 import OperationalError from pcapi import config, logtool log = logtool.getLogger("connection", "pcapi") # full path of sqlite3 database DB = config.get("path", "sessionsdb") log.debug(DB) # creating/connecting the test_db. # "check_same_thread" turns off some false alarms from sqlite3. # NOTE: mod_wsgi runs these global variables in *different* processes for each request. con = db.connect(DB, check_same_thread=False) def execute(sql, args=()): """ Execute *sql* statement using list *args* for sql substitution. PC-API was meant to be fault tolerant to all disk/database faults. This function tries to handle all possible errors by first regenerating missing tables and falling back to using a memory database if all else fails. Args: sql: SQL statement
""" public_uid = config.get("path", "public_uid") pubfs = FsProvider(public_uid) with open(self.realpath(path)) as fp: return pubfs.put_file(path, fp, True) def file_delete(self, path): """ Delete file and return parsed metadata""" m = self.metadata(path) f = self.realpath(path) if (os.path.isdir(f)): shutil.rmtree(f) else: os.remove(f) return Metadata(m) if __name__ == "__main__": userid = "testuser" fp = FsProvider(userid) #list metadata fp.metadata("/") # upload uploadData = open(config.get("test", "textfile")) print "put_file -> " + fp.put_file("/Myfile.test", uploadData) #### now list directory ## print "================================" print "ls -> " + ` fp.ls("/") ` #### now delete the file ## print "================================"