Пример #1
0
def dbBounds(bound):
    return ppygis.Polygon([
        ppygis.LineString([
            ppygis.Point(bound[0], bound[1], 0, srid=4336),
            ppygis.Point(bound[0], bound[3], 0, srid=4336),
            ppygis.Point(bound[2], bound[1], 0, srid=4336),
            ppygis.Point(bound[2], bound[3], 0, srid=4336)
        ])
    ]).write_ewkb()
Пример #2
0
 def savePlan(self, plan):
     cursor = self.cursor()
     if not 'journey_id' in plan or plan['journey_id'] is None or plan[
             'journey_id'] == '':
         raise BadRequestException('plan journey_id is missing')
     if not 'timestamp' in plan or plan['timestamp'] is None or plan[
             'timestamp'] == '':
         raise BadRequestException('plan timestamp is missing')
     if not 'coordinates' in plan or type(plan['coordinates']) is not list:
         raise BadRequestException('plan coordinates are missing')
     linestring = []
     for point in plan['coordinates']:
         # as per http://geojson.org/geojson-spec.html
         longitude = float(point[0])
         latitude = float(point[1])
         if len(point) > 2:
             altitude = float(point[2])
         else:
             altitude = 0
         linestring.append(
             ppygis.Point(longitude, latitude, altitude, srid=4326))
     cursor.execute(
         "INSERT INTO plan (geometry, journey_id, timestamp) VALUES (%s, %s, %s) RETURNING plan_id",
         (ppygis.LineString(
             linestring, srid=4326), plan['journey_id'], plan['timestamp']))
     plan_id = cursor.fetchone()[0]
     self.connection.commit()
     return plan_id
Пример #3
0
 def saveTraces(self, traces):
     if type(traces) is not list:
         traces = [traces]
     cursor = self.cursor()
     for trace in traces:
         if not 'journey_id' in trace or trace[
                 'journey_id'] is None or trace['journey_id'] == '':
             raise BadRequestException('trace journey_id is missing')
         if not 'timestamp' in trace or trace['timestamp'] is None or trace[
                 'timestamp'] == '':
             raise BadRequestException('trace timestamp is missing')
         if not 'latitude' in trace or trace['latitude'] is None or trace[
                 'latitude'] == '':
             raise BadRequestException('trace latitude is missing')
         if not 'longitude' in trace or trace['longitude'] is None or trace[
                 'longitude'] == '':
             raise BadRequestException('trace longitude is missing')
         if not 'altitude' in trace or trace['altitude'] is None or trace[
                 'altitude'] == '':
             trace['altitude'] = 0
         cursor.execute(
             "INSERT INTO trace (geometry, journey_id, timestamp) VALUES (%s, %s, %s)",
             (ppygis.Point(
                 float(trace['longitude']),
                 float(trace['latitude']),
                 float(trace['altitude']),
                 srid=4326), trace['journey_id'], trace['timestamp']))
     self.connection.commit()
     return
Пример #4
0
    def insert(self, cursor, oid, userid, osver, mkMod, iWth, iHht, ipA, fLen):

        nSat = 0
        if "GPS" in self.ls:
            nSat = self.ls[self.ls.index('(') + 1:self.ls.index(')')]

        ipoint = ppygis.Point(self.lon, self.lat)
        ipoint.srid = SRID
        print self.rid
        cursor.execute(OBS_INSERT,\
        (self.rid,nSat,self.acc,self.ls,osver,mkMod,iWth,iHht,fLen,self.vah,self.vav,self.ts,self.azi,self.pth,self.rll,self.temp,self.press,ipoint))

        mk = str(self.mkX) + ' ' + str(self.mkY)

        imURL = ipA + self.fn
        print imURL
        #PL+","+FL+","+MRK+","+RID
        cursor.execute(IMG_INSERT,\
        (self.pl,imURL,mk,self.rid))

        #OID+","+RID
        cursor.execute(REC_INSERT,\
        (oid,self.rid))

        #RID, USER_ID
        cursor.execute(RU_INSERT,\
        (self.rid,userid))
Пример #5
0
def tweet_to_insert_string(tweet, table, psql_cursor):
    if tweet['coordinates'] is not None:
        lat = tweet['coordinates']['coordinates'][1]
        lon = tweet['coordinates']['coordinates'][0]
    else:
        lat = lon = 0
    coordinates = ppygis.Point(lon, lat, srid=4326)
    created_at = parse_date(tweet['created_at'])
    hstore_user = make_hstore(tweet['user'])
    hstore_place = make_hstore(tweet['place'])
    hstore_entities = make_hstore(tweet['entities'])

    # Sometimes there's no lang, or filter_level. Not sure why. Fix it I guess?
    if 'filter_level' not in tweet:
        tweet['filter_level'] = ''
    if 'lang' not in tweet:
        tweet['lang'] = ''

    insert_str = psql_cursor.mogrify(
        "INSERT INTO " + table + "(contributors, " +
        "coordinates, created_at, entities, favorite_count, filter_level, " +
        "lang, id, id_str, in_reply_to_screen_name, in_reply_to_status_id, " +
        "in_reply_to_status_id_str, in_reply_to_user_id, in_reply_to_user_id_str, "
        +
        "place, retweet_count, source, twitter_user, text, user_screen_name) "
        + "VALUES (" + ','.join(['%s' for key in twitter_data_types]) + ")",
        (tweet['contributors'], coordinates, created_at, hstore_entities,
         tweet['favorite_count'], tweet['filter_level'], tweet['lang'],
         tweet['id'], tweet['id_str'], tweet['in_reply_to_screen_name'],
         tweet['in_reply_to_status_id'], tweet['in_reply_to_status_id_str'],
         tweet['in_reply_to_user_id'], tweet['in_reply_to_user_id_str'],
         hstore_place, tweet['retweet_count'], tweet['source'], hstore_user,
         tweet['text'], tweet['user']['screen_name']))
    return insert_str
Пример #6
0
    def on_success(self, data):
        self.log("Incoming tweet!")
        #Check the tweet contains needed info
        if 'text' in data:
            self.log(data['text'].encode('utf-8'))
        if 'id' in data:
            id = data['id']
        else:
            self.log("No ID found in tweet data, ignoring!")
            return
        if 'created_at' in data:
            date = self.stringToDate(data['created_at'])
        else:
            self.log("Created_at missing")
            return

        #Dump the tweet to a json string for storage in Postgres' Json type
        json = simplejson.dumps(data)

        #If coords are populated in the tweet, use them, otherwise ignore.
        if 'coordinates' in data:
            if data['coordinates'] is not None:
                self.log("It's Geotagged!")
                c = data['coordinates']
                #Create the proper ppygis object and set srid so that it gets formated for postgres properly
                point = ppygis.Point(c['coordinates'][0], c['coordinates'][1])
                point.srid = self.srid
                try:
                    self.cur.execute(self.insert_coords_sql,
                                     (id, json, point, date))
                except Exception as e:
                    self.log("Exception encountered wrtiting tweet")
                    self.log(e)
                return

        #Write non geotagged tweet
        try:
            self.cur.execute(self.insert_sql, (id, json, date))
        except Exception as e:
            self.log("Exception encountered wrtiting tweet")
            self.log(e)
Пример #7
0
 def verwerkTweets(self, data, naam):
     ntweets = len(data)
     sql = ''
     if ntweets > 50:
         ngeotagged = 0
         ngeotaggedA = 0
         locatedTweets = []
         print ""
         i = 0
         for tweet in data:
             i = i + 1
             if tweet['coordinates'] != None:
                 ngeotagged = ngeotagged + 1
                 coordinates = tweet['coordinates']['coordinates']
                 lat = float(coordinates[1])
                 lon = float(coordinates[0])
                 if (lat >= ONDER and lat <= BOVEN) and (lon >= LINKS
                                                         and lon <= RECHTS):
                     ngeotaggedA = ngeotaggedA + 1
                     datumtijd = datetime.datetime.strptime(
                         tweet['created_at'], '%a %b %d %H:%M:%S +0000 %Y')
                     locatedTweets.append(
                         (tweet['id_str'], naam, datumtijd,
                          ppygis.Point(lat, lon).getquoted(), lat, lon,
                          datumtijd.date()))
         print "Tweet %d / %d, geotagged: %d, in Amsterdam: %d" % (
             i, ntweets, ngeotagged, ngeotaggedA)
         if (ngeotaggedA > 5 and float(ngeotaggedA / ngeotagged) < 0.2):
             for tweet in locatedTweets:
                 sql = "INSERT INTO scraped_tweets (tweet_id, tweet_name, tijddatum, the_geom, lat, lon, datum) VALUES ('%s', '%s', '%s', %s, %f, %f, '%s')" % tweet
                 self.kursor.execute(sql)
                 sql = "UPDATE users SET processed=true WHERE name='%s'" % naam
                 self.kursor.execute(sql)
         else:
             sql = "UPDATE users SET useless=true WHERE name='%s'" % naam
             self.kursor.execute(sql)
     else:
         sql = "UPDATE users SET useless=true WHERE name='%s'" % naam
         self.kursor.execute(sql)
     self.kursor.connection.commit()
Пример #8
0
def instagram_to_insert_string(instagram, collection, psql_cursor):
    if instagram['caption'] != None:
        caption_from_username = instagram['caption']['from']['username']
        caption_id = int(instagram['caption']['id'])
        caption_text = instagram['caption']['text']
    else:
        caption_from_username = caption_id = caption_text = None
    comments_count = instagram['comments']['count']
    created_time = datetime.datetime.fromtimestamp(
        int(instagram['created_time']))

    filter = instagram['filter']
    id = instagram['_id']
    image_standard_res_url = instagram['images']['standard_resolution']['url']
    likes_count = instagram['likes']['count']
    link = instagram['link']

    lat = float(instagram['location']['latitude'])
    lon = float(instagram['location']['longitude'])
    location = ppygis.Point(lon, lat, srid=4326)

    tags = instagram['tags']
    type = instagram['type']
    instagram_user = make_hstore(instagram['user'])
    user_username = instagram['user']['username']
    user_id = int(instagram['user']['id'])

    insert_str = psql_cursor.mogrify(
        "INSERT INTO " + collection + "(caption_from_username," +
        "caption_id, caption_text, comments_count, created_time, filter, id," +
        "image_standard_res_url, likes_count, link, location, tags, type," +
        "instagram_user, user_username, user_id) " + "VALUES (" +
        ','.join(['%s' for key in instagram_data_types]) + ")",
        (caption_from_username, caption_id, caption_text, comments_count,
         created_time, filter, id, image_standard_res_url, likes_count, link,
         location, tags, type, instagram_user, user_username, user_id))
    return insert_str
Пример #9
0
def insert_nodes(nn):
    global nnc
    for n in nn:
        c.execute('insert into nodes values(%s,%s,%s,%s,%s,%s,%s,%s)',(n.id,n.version,n.user_id,n.timestamp,n.changeset_id,n.visible,n.tags,ppygis.Point(n.lon,n.lat,None,None,4326)))
        cursor_nodememstore.execute("insert into nodes values (?,?,?)",(n.id,n.timestamp.strftime("%Y%m%d%H%M%S"),n.version))
    conn.commit()
    conn_nodememstore.commit()
    nnc += len(nn)
    del nn[:]
Пример #10
0
def pointsFromDb(gis_points):
    result = []
    for point in gis_points.points:
        result.append(ppygis.Point(point.x, point.y, point.z, srid=4326))
    return result
 if (input_year == None and y == '2011') or (input_year != None and input_year == '2011' and y == '2011'):
   # Read locations file and insert into DB.  Clear DB first
   pp("\n[%s] Importing to stations." % y)
   db.execute("TRUNCATE TABLE stations_" + y)
   committed = conn.commit()
           
   locations_file = os.path.join(path, '../data/2011_Station_Locations.csv')
   reader = csv.reader(open(locations_file, 'rU'), delimiter=',', dialect=csv.excel_tab)
   row_count = 0
   for row in reader:
     if row_count > 0:
         if row[0]:
           # terminal_id character varying(16), common_name character varying(128), 
           # station character varying(128), lat numeric, lon numeric, install_date date,
           db.execute("INSERT INTO stations_" + y + " (terminal_id, common_name, station, lat, lon, install_date, location_geom) VALUES (%s, %s, %s, %s, %s, %s, %s)" ,
             (ct(row[0]), ct(row[1]), ct(row[2]), float(row[3]), float(row[4]), dt(row[5], 'date'), ppygis.Point(float(row[4]), float(row[3]), srid=4326)))
           committed = conn.commit()
           
     row_count += 1
     
   pp("\n[%s] Commited %s rows to stations.\n" % (y, row_count))
   
   
   # Read casual_subscriptions file and insert into DB.  Clear DB first
   """
   pp("\n[%s] Importing to casual_subscriptions." % y)
   db.execute("TRUNCATE TABLE casual_subscriptions_" + y)
   committed = conn.commit()
   
   locations_file = os.path.join(path, '../data/NRMN_2011_reports/Casual_subscriptions_2011_Season.csv')
   reader = csv.reader(open(locations_file, 'rU'), delimiter=',', dialect=csv.excel_tab)
Пример #12
0
def export(path):
    try:
        import config, json
        file = open(config.get("path", "data_dir") + "/" + path, 'r')
        flooddata = file.read()
        geojflood = json.loads(flooddata)

        fts = geojflood[FTS]
        plat = -1000
        plon = -1000

        img = []
        polyCoord = []

        for feat in fts:
            geo = feat[GEOM]
            coord = geo[COORD]
            if geo[TYPE] == POINT:

                if plat == -1000:

                    plat = coord[0]
                    plon = coord[1]
                    timeSt = feat[PROP][TIMES]
                else:
                    prop = feat[PROP]

                    marker = prop.get(MK)

                    if marker == None:
                        mkX = ''
                        mkY = ''
                    else:
                        mkX = marker[XVAL]
                        mkY = marker[YVAL]

                    polyline = prop.get(PL)
                    if polyline == None:
                        polyline = ''
                    temperature = prop.get(TPR)
                    if temperature == None:
                        temperature = 'nan'

                    pressure = prop.get(PRSS)
                    if pressure == None:
                        pressure = 'nan'
                    viewAngle = prop[VA]
                    compass = prop[COMPASS]
                    img.append(Img(prop[RIDENT],prop[FNAME],prop[TIMES],prop[ACC],viewAngle[VER],viewAngle[HOR],\
                    compass[AZIMUTH],compass[PITCH],compass[ROLL],prop[LS],coord[0],coord[1],mkX,mkY,polyline,temperature,pressure))
            else:
                for crd in coord[0]:
                    lat = crd[0]
                    lon = crd[1]
                    polyCoord.append(ppygis.Point(lon, lat))

        fcProp = geojflood[PROP]
        note = fcProp[NOTE]
        oid = path[path.rindex('/') + 1:path.rindex('.')]
        userid = path[0:path.index('/')]

        osver = fcProp[OSV]
        mkMod = fcProp[MKM]
        iWth = fcProp[IWIDTH]
        iHht = fcProp[IHEIGHT]

        fLen = fcProp[FLENGTH]

        dec = fcProp[DEC]

        #Define connection string
        conn_string = "host='" + HOST + "' dbname='" + DBNAME + "' user='******' password='******'"

        log.debug("Connecting to database\n    ->%s" % (conn_string))

        # get a connection, if a connect cannot be made an exception will be raised here
        conn = psycopg2.connect(conn_string)

        cursor = conn.cursor()
        log.debug("Connected!\n")

        point = ppygis.Point(plon, plat)
        point.srid = SRID

        #OID+","+NT+\
        #","+TSTMP+","+GEO
        cursor.execute(OBSGP_INSERT,\
        (oid,note,timeSt,point))

        ipA = ipaddr('eth0') + config.get(
            "imgurl", "iurl") + path[0:path.rindex('/') + 1]

        for image in img:
            image.insert(cursor, oid, userid, osver, mkMod, iWth, iHht, ipA,
                         fLen)

        #OID+","+OPT

        st = 1
        for opt in dec:
            cursor.execute(DEC_INSERT, (oid, opt, st))
            st = st + 1

        if len(polyCoord) > 0:
            ls = ppygis.LineString((polyCoord))
            pgon = ppygis.Polygon((ls, ))
            pgon.srid = SRID

            #OID+","+GEO
            cursor.execute(POL_INSERT, (oid, pgon))
        else:
            cursor.execute(POL_INSERT, (oid, None))

        conn.commit()
        cursor.close()
        conn.close()

    except Exception as e:
        import traceback
        exMsg = traceback.format_exc()
        log.exception("Exception: " + exMsg)
        #return  {"error":1 , "msg": exMsg}
        return {"error": 1, "msg": str(e)}

    return {"error": 0, "msg": "Operation successful"}
Пример #13
0
    def buildAverageSpeedsReport(self, type='realtime'):
        if type != 'baseline':
            if type != 'realtime':
                type = 'combined'

        min_length = 250  # shorter non-slow segments will be grouped together

        cursor = self.cursor()

        speeds = {}
        readings = {}

        # Clean up

        sql = "DELETE FROM report"
        if type == 'realtime':
            sql += " WHERE realtime=true"
        elif type == 'baseline':
            sql += " WHERE realtime=false"
        cursor.execute(sql)

        # Build average speed cache for routes

        sql = "SELECT ST_GeoHash(geometry, 30), AVG(speed), COUNT(*) FROM route"
        if type == 'realtime':
            sql += " WHERE  realtime=true"
        elif type == 'baseline':
            sql += " WHERE realtime=false"
        sql += " GROUP BY geometry HAVING COUNT(*) > 0"

        cursor.execute(sql)

        for record in cursor.fetchall():
            speeds[record[0]] = float(record[1])
            readings[record[0]] = float(record[2])

        # Group raw routes into linestring geometries

        sql = "SELECT ST_AsGeoJSON((ST_Dump(ST_LineMerge(ST_Collect(route.geometry)))).geom)::json->'coordinates' FROM (SELECT geometry FROM route GROUP BY geometry HAVING COUNT(*) > 0) AS route"

        cursor.execute(sql)
        collection = []
        for record in cursor.fetchall():
            linestring = json.loads(record[0])
            cursor.execute(
                "select ST_Length(ST_Transform(ST_GeomFromGeoJSON(%s)::geometry, 2839))",
                (json.dumps({
                    'type': 'LineString',
                    'coordinates': linestring,
                    'crs': {
                        'type': 'name',
                        'properties': {
                            'name': 'EPSG:4326'
                        }
                    }
                }), ))
            length = cursor.fetchone()[0]
            feature = {
                "coordinates": [],
                "speed": 0,
                "reading": 0,
                "remaining-length": length,
                "speeds": [],
                "readings": [],
                "lengths": []
            }
            # run through each group to detect speed category change and exclude short routes
            for i, point in enumerate(linestring):
                # start bulding route segments upon reaching second linestring point
                if len(feature['coordinates']) > 0:
                    route = []
                    route.append(
                        ppygis.Point(linestring[i - 1][0],
                                     linestring[i - 1][1],
                                     linestring[i - 1][2],
                                     srid=4326))
                    route.append(
                        ppygis.Point(linestring[i][0],
                                     linestring[i][1],
                                     linestring[i][2],
                                     srid=4326))
                    route = ppygis.LineString(route, srid=4326)
                    cursor.execute(
                        "select ST_Length(ST_Transform(%s::geometry, 2839)), ST_GeoHash(%s::geometry, 30)",
                        (route, route))
                    record = cursor.fetchone()
                    length = record[0]
                    if record[1] in speeds:
                        speed = speeds[record[1]]
                        reading = readings[record[1]]
                    else:
                        print "Route cache miss", record[1]
                        speed = -1
                        reading = 0

                    if len(feature['speeds']):
                        if self.categorizeSpeed(speed) != self.categorizeSpeed(
                                feature['speeds'][-1]):
                            if (sum(feature['lengths']) >= min_length
                                    and feature['remaining-length'] -
                                    sum(feature['lengths']) >= min_length) or (
                                        self.categorizeSpeed(speed) == 1):
                                feature['speed'] = float(sum(
                                    feature['speeds'])) / len(
                                        feature['speeds'])
                                feature['reading'] = float(
                                    sum(feature['readings'])) / len(
                                        feature['readings'])
                                collection.append([
                                    copy.deepcopy(feature['coordinates']),
                                    feature['speed'], feature['reading']
                                ])
                                # reset
                                feature['remaining-length'] -= sum(
                                    feature['lengths'])
                                feature['coordinates'] = [
                                    feature['coordinates'][-1]
                                ]
                                feature['speed'] = 0
                                feature['reading'] = 0
                                feature['lengths'] = []
                                feature['speeds'] = []
                                feature['readings'] = []

                    feature['coordinates'].append(
                        [point[0], point[1], point[2]])

                    feature['speeds'].append(speed)
                    feature['readings'].append(reading)
                    feature['lengths'].append(length)

                else:
                    feature['coordinates'].append(
                        [point[0], point[1], point[2]])

            feature['speed'] = float(sum(feature['speeds'])) / len(
                feature['speeds'])
            feature['reading'] = float(sum(feature['readings'])) / len(
                feature['readings'])
            collection.append([
                copy.deepcopy(feature['coordinates']), feature['speed'],
                feature['reading']
            ])

        now = datetime.datetime.fromtimestamp(time.time())
        for i, feature in enumerate(collection):
            cursor.execute(
                "INSERT INTO report (geometry, speed, reading, type, timestamp) VALUES (ST_GeomFromGeoJSON(%s), %s, %s, %s, %s)",
                (json.dumps({
                    'type': 'LineString',
                    'coordinates': feature[0],
                    'crs': {
                        'type': 'name',
                        'properties': {
                            'name': 'EPSG:4326'
                        }
                    }
                }), feature[1], feature[2], type, now))
            collection[i][0] = json.dumps({
                'type': 'LineString',
                'coordinates': feature[0]
            })  # just as the database would have returned it
            self.connection.commit()

        return collection
Пример #14
0
    def send_off_sail_data_abstract(self, dataset):
        if dataset.transekt_id == u'B':
            current_highest_index = self.__get_latest_fahrt_id()
            last_transect_end = self.__get_last_transect_end(
                current_highest_index)
            if last_transect_end is None:
                self.dialogs['missing_transect_end_dialog'].exec_()
                if self.dialogs['missing_transect_end_dialog'].delete:
                    self.__delete_old_transect(current_highest_index)
                else:
                    return
            fahrt_id = current_highest_index + 1
            ins = self.transects.insert().values(fahrt_id=fahrt_id,
                                                 datum_b=dataset.datum,
                                                 zeit_b=dataset.zeit,
                                                 kurswoche=dataset.kurswoche,
                                                 guide=dataset.guide,
                                                 schiff=dataset.schiff,
                                                 akustik=dataset.akustik)
            self.engine.execute(ins)

        elif dataset.transekt_id == u'E':
            current_highest_index = self.__get_latest_fahrt_id()
            last_transect_end = self.__get_last_transect_end(
                current_highest_index)
            if last_transect_end is not None:
                self.dialogs['missing_transect_beginn_dialog'].exec_()
                return
            else:
                fahrt_id = current_highest_index
            begin_transect = self.engine.execute(
                select([
                    self.points
                ]).where(self.points.c.fahrt_id == fahrt_id)).fetchone()
            result = self.engine.execute(
                select([self.points
                        ]).where(self.points.c.fahrt_id == fahrt_id).where(
                            self.points.c.transekt_id == 'KW '))
            polyline = []
            point = ppygis.Point(begin_transect.laenge,
                                 begin_transect.breite,
                                 srid=4326)
            polyline.append(point)
            for row in result:
                point = ppygis.Point(row.laenge, row.breite, srid=4326)
                polyline.append(point)
            point = ppygis.Point(dataset.laenge, dataset.breite, srid=4326)
            linestring = ppygis.LineString(polyline, srid=4326)
            fahrt_id = current_highest_index
            self.connection.execute(self.transects.update().where(
                self.transects.c.fahrt_id == fahrt_id).values(
                    datum_e=dataset.datum,
                    zeit_e=dataset.zeit,
                    geom=linestring))

        elif dataset.transekt_id == 'KW':
            fahrt_id = self.__get_latest_fahrt_id()
            last_transect_end = self.__get_last_transect_end(fahrt_id)
            if last_transect_end is not None:
                self.dialogs['invalid_course_dialog'].exec_()
                return
        result = self.engine.execute(select([func.max(self.points.c.punkt_id)
                                             ]))
        for row in result:
            punkt_id = row[0] + 1
        ins = self.points.insert().values(
            punkt_id=punkt_id,
            fahrt_id=fahrt_id,
            transekt_id=dataset.transekt_id,
            sicht_id=dataset.sicht_id,
            bewoelkung_id=dataset.bewoelkung_id,
            windrichtung_id=dataset.windrichtung_id,
            wind_id=dataset.wind_id,
            meer_id=dataset.meer_id,
            kurs=dataset.kurs,
            kurswoche=dataset.kurswoche,
            breite=dataset.breite,
            laenge=dataset.laenge,
            schiff=dataset.schiff,
            guide=dataset.guide,
            segel=dataset.segel,
            akustik=dataset.akustik,
            militaer=dataset.militaer,
            zeit=dataset.zeit,
            datum=dataset.datum,
            fotos=dataset.fotos,
            aufnahme=dataset.aufnahme)
        self.engine.execute(ins)
        self.set_main_frame_post_entry()
Пример #15
0
		def importGTFSData(self):

			city="Oulu"
			country="Finland"

			cursor = self.cursor()

			try:
				cursor.execute("INSERT INTO mt_city (name, country) VALUES (%s, %s)",
											 (city, country))

				file=unicodecsv.reader(open('gtfs/agency.txt'), encoding='utf-8')
				file.next()
				for line in file:
					if len(line) >= 4:
						cursor.execute("INSERT INTO mt_agency (agency_id, city_id, name, url, timezone, language, phone) VALUES (%s, (SELECT city_id FROM mt_city WHERE name=%s AND country=%s), %s, %s, %s, %s, %s)",
													 (line[0],
														city,
														country,
														line[1]     if len(line) >= 1 else None,
														line[2]     if len(line) >= 2 else None,
														line[3]     if len(line) >= 3 else None,
														line[4]     if len(line) >= 4 else None,
														line[5]     if len(line) >= 5 else None,
													 ))

				file=unicodecsv.reader(open('gtfs/stops.txt'), encoding='utf-8')
				file.next()
				for line in file:
					if len(line) >= 5:
						cursor.execute("INSERT INTO mt_stop (stop_id, city_id, code, name, description, timezone, geometry) VALUES (%s, (SELECT city_id FROM mt_city WHERE name=%s AND country=%s), %s, %s, %s, %s, ST_GeomFromText('POINT(%s %s 0)', 4326))",
													 (line[0],
														city,
														country,
														line[1],
														line[2],
														line[3],
														line[10]    if len(line) >= 10 else None,
														float(line[5]),
														float(line[4]),
														))
				file=unicodecsv.reader(open('gtfs/shapes.txt'), encoding='utf-8')
				file.next()
				shapes={}
				for line in file:
					if len(line) >= 3:
						if line[0] not in shapes:
							shapes[line[0]]={}
						shapes[line[0]][int(line[3])+1]=[float(line[2]), float(line[1]), 0]

				# sort according to sequence number
				for shape_id in shapes:
					shape=[]
					for i in sorted(shapes[shape_id].iterkeys()):
						shape.append(shapes[shape_id][i])
					shapes[shape_id]=shape


				file=unicodecsv.reader(open('gtfs/trips.txt'), encoding='utf-8')
				file.next()
				routes={}
				for line in file:
					if len(line) >= 6:
						if line[0] not in routes:
							routes[line[0]]={'shapes': {}}
						if line[6] not in routes[line[0]]['shapes']:
							if line[6] in shapes:
								routes[line[0]]['shapes'][line[6]]=shapes[line[6]]

				file=unicodecsv.reader(open('gtfs/routes.txt'), encoding='utf-8')
				file.next()
				for line in file:
					if len(line) >= 5:
						if line[0] not in routes:
							routes[line[0]]={'shapes': {}}
						routes[line[0]]['agency_id']=line[1]
						routes[line[0]]['short_name']=line[2]
						routes[line[0]]['full_name']=line[3]
						routes[line[0]]['description']=line[4]
						routes[line[0]]['type']=line[5]
						routes[line[0]]['url']=line[6]

				for route_id in routes:
					route=routes[route_id]
					linestring=[]
					for shape_id in route['shapes']:
						for point in route['shapes'][shape_id]:
							linestring.append(ppygis.Point(point[0], point[1], point[2], srid=4326))
					if len(linestring)>1:
						cursor.execute("INSERT INTO mt_route (route_id, agency_id, short_name, full_name, type, description, url, geometry) VALUES (%s, %s, %s, %s, %s, %s, %s, %s)",
											 (route_id,
												route['agency_id'],
												route['short_name'],
												route['full_name'],
												int(route['type']),
												route['description'],
												route['url'],
												ppygis.LineString(linestring, srid=4326),
											 ))

				file=unicodecsv.reader(open('gtfs/shapes.txt'), encoding='utf-8')
				file.next()
				shapes={}
				for line in file:
					if len(line) >= 2:
						if line[0] not in shapes:
							shapes[line[0]]=[]
						shapes[line[0]].append([line[2], line[1]])

			except Exception as e:
				print "Error: ", e
				raise

			self.connection.commit()
			return
Пример #16
0
def importXpt(xptfile,verbose=False):
    #verbose = True
    #xptfile = '/Users/igswahwsmcevan/Altimetry/xpd/Gulkana.1995.137.2000.153.selected.xpt'
    conn,cur = ConnectDb()
    
 
    #FINDING DATES OF FILE AND GLACIER NAME
    if verbose:print "FINDING DATES OF FILE AND GLACIER NAME"
    try:
        name,yr1,doy1,yr2,doy2 = re.findall('(\w+)\.(\d{4})\.(\d{3})\.(\d{4})\.(\d{3})\.selected',os.path.basename(xptfile))[0]
    except:
        print "Something is wrong with the filename: "+xptfile
        return
    
    yr1 = int(yr1)
    yr2 = int(yr2)
    doy1 = int(doy1)
    doy2 = int(doy2)
    
    date1 = doy_to_datetime(doy1,yr1)
    date2 = doy_to_datetime(doy2,yr2)
    
    #FINDING UTM ZONE
    if verbose:print "FINDING UTM ZONE"
    try:
        zone = re.findall('zone(\d{1,2})',glob.glob(cfg.get('Section One', 'analysispath')+name+'/shp/*zone*')[0],re.IGNORECASE)[0]
    except: 
        print "Could not identify zone for "+xptfile
        return
    
    #GLID AND LAMBID
    if verbose:print "GLID AND LAMBID"
    #print "select gid from glnames where name = '"+name+"';"
    #print 'here1'
    #sys.stdout.flush()
    try:
        glid = GetSqlData("select gid from glnames where name = '"+name+"';")[0]['gid']
    except:
        print xptfile+" does not have a glacier entry in glnames!"
        return
        
    #print "select gid from lamb where glid = "+str(glid)+" and date1 = '"+date1.isoformat()+"' and date2 = '"+date2.isoformat()+"';"
    try:
        lambid = GetSqlData("select gid from lamb where glid = "+str(glid)+" and date1 = '"+date1.isoformat()+"' and date2 = '"+date2.isoformat()+"';")[0]['gid']
    except:
        lambid = 'NULL'
        print "     WARNING: "+xptfile+" does not have an associated lambfile!"
        return
        
    if verbose:
        t = GetSqlData('select glnames.name as name,lamb.date1 as date1,lamb.date2 as date2 from lamb inner join glnames on lamb.glid=glnames.gid where lamb.gid ='+str(lambid)+';')[0]
        print t['name'],t['date1'],t['date2'],xptfile
    #READING FILE INTO ARRAYS
    #print 'here2'
    #sys.stdout.flush()     
    if verbose:print "load xptfile"
    time1 = time.time()

    x,y,z1,z2,dz = N.loadtxt(xptfile,usecols=[0,1,2,3,4],unpack=True)
    time2 = time.time()

    
    #print temp.size
    
    #f = open(xptfile)
    #x=N.array([],dtype=N.float64)
    #y=N.array([],dtype=N.float64)
    #z1=N.array([],dtype=N.float64)
    #z2=N.array([],dtype=N.float64)
    #dz=N.array([],dtype=N.float64)
    #
    #for line in f:
    #    (x_add,y_add,z1_add,z2_add,dz_add,trash,trash,trash,trash,trash) = [float(field) for field in line.split()]
    #    x = N.append(x,x_add)
    #    y = N.append(y,y_add)
    #    z1 = N.append(z1,z1_add)
    #    z2 = N.append(z2,z2_add)
    #    dz = N.append(dz,dz_add)
    
    if verbose:print 'Reprojecting'
    #sys.stdout.flush()      
    #REFERENCE SYSTEM OF GRID
    old_cs = osr.SpatialReference()
    old_cs.ImportFromProj4('+proj=utm +zone=%s +ellps=WGS84 +datum=WGS84 +units=m +no_defs' % zone)
    
    #WGS84 REFERNCE SYSTEM 
    wgs84_wkt = """GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]"""
    new_cs = osr.SpatialReference()
    new_cs.ImportFromWkt(wgs84_wkt)
    
    #ALBERS REFERNCE SYSTEM 
    #albers_cs = osr.SpatialReference()
    #albers_cs.ImportFromProj4("""+proj=aea +lat_1=55 +lat_2=65 +lat_0=50 +lon_0=-154 +x_0=0 +y_0=0 +ellps=GRS80 +datum=WGS84 +units=m +no_defs """)
        
    # create a transform object to convert between coordinate systems
    transformtowgs = osr.CoordinateTransformation(old_cs, new_cs) 
    #transformtoalbers = osr.CoordinateTransformation(old_cs, albers_cs)
    coordswgs=[]
    #coordsalb=[]
    
    
    sys.stdout.flush() 
    for i in xrange(len(x)):
        coordswgs.append(transformtowgs.TransformPoint(x[i],y[i]))
        #coordsalb.append(transformtoalbers.TransformPoint(x[i],y[i]))

    #lon,lat,trash = zip(*coords)
    
    time3 = time.time()
    
    
    #values = str(glid)+','+str(lambid)+",'"+date1.isoformat()+"','"+date2.isoformat()+"'"
    #values2 = #',"+z1+'::real,'+str(z2[0])+'::real,'+str(dz[0])+'::real,POINT'+re.sub(', 0.0','',str(coords[0]))
    #print "INSERT INTO xpt (glid, lambid,date1,date2) VALUES ("+values+");"
    #cur.execute("INSERT INTO xpt (glid, lambid,date1,date2) VALUES ("+values+");")
    
    if verbose:print 'Updating database'
    #sys.stdout.flush()
    

    maxgid = GetSqlData2("SELECT max(gid) FROM xpts;")['max'][0]
    if type(maxgid) == NoneType:maxgid=-1
    print type(maxgid)
    #print "%s\t%s\t%s\t%s\t%s\t%s\n" % (i,lambid,z1[i],z2[i],dz[i],ppygis.Point(coordswgs[i][0],coordswgs[i][1]).write_ewkb()) 
    buffer = StringIO.StringIO()
    for i in xrange(len(coordswgs)):
        #print "%s\t%s\t%s\t%s\t%s\t%s\n" % (i,lambid,z1[i],z2[i],dz[i],ppygis.Point(coordswgs[i][0],coordswgs[i][1]).write_ewkb())
        buffer.write("%s\t%s\t%s\t%s\t%s\t%s\n" % ((i+maxgid+1),lambid,z1[i],z2[i],dz[i],ppygis.Point(coordswgs[i][0],coordswgs[i][1]).write_ewkb()))
    buffer.seek(0)   
    cur.copy_from(buffer, 'xpts')   

    #for i in xrange(len(coordswgs)):
    #    cur.execute("INSERT INTO xpts (lambid,z1,z2,dz,geog) VALUES (%s,%s,%s,%s,ST_GeogFromText('POINT(%10.7f %10.7f)'));" % (lambid,z1[i],z2[i],dz[i],coordswgs[i][0],coordswgs[i][1]))
        #cur.execute(insertxpts2)
        #if i % 50000 == 0: print i
    #print 'here6'
    #sys.stdout.flush() 
    ####AddGeometryColumn********
    time4 = time.time()
    if verbose:print "Readfile", time2-time1
    if verbose:print "Reproject", time3-time2
    if verbose:print "Insert to table", time4-time3
    
    
    conn.commit()
    cur.close()
    conn.close()
                                
Пример #17
0
def computeCentroid(points):
    xs = map(lambda p: p.x, points)
    ys = map(lambda p: p.y, points)
    centroid = ppygis.Point(np.mean(xs), np.mean(ys), 0, srid=4326)
    return centroid
Пример #18
0
    def migrate(self):

        from_conn = spatialite.connect('navirec.sqlite')
        from_cursor = from_conn.cursor()
        to_cursor = self.cursor()

        # traces

        print "Migrating traces"

        sql = "select session_id, X(geom) as longitude, Y(geom) as latitude, altitude, timestamp from Traces"
        from_cursor.execute(sql)
        traces = from_cursor.fetchall()
        if traces is not None and len(traces) > 0:
            for trace in traces:
                journey_id = trace[0]
                longitude = trace[1]
                latitude = trace[2]
                altitude = trace[3]
                timestamp = trace[4]
                if altitude is None:
                    altitude = 0
                to_cursor.execute(
                    "SELECT * FROM trace WHERE journey_id=%s AND timestamp=%s AND ST_Equals(geometry, %s)",
                    (journey_id, timestamp,
                     ppygis.Point(longitude, latitude, altitude, srid=4326)))
                matching_traces = to_cursor.fetchall()
                if len(matching_traces) == 0:
                    sys.stdout.write('.')
                    to_cursor.execute(
                        "INSERT INTO trace (journey_id, timestamp, geometry) VALUES  (%s, %s, %s)",
                        (journey_id, timestamp,
                         ppygis.Point(longitude, latitude, altitude,
                                      srid=4326)))
                else:
                    sys.stdout.write('!')

        # routes

        print "Migrating routes"

        sql = "select session_id, X(PointN(geom, 1)) as longitude1, Y(PointN(geom, 1)) as latitude1, X(PointN(geom, 2)) as longitude2, Y(PointN(geom, 2)) as latitude2, timestamp, speed, mode from Routes"
        from_cursor.execute(sql)
        routes = from_cursor.fetchall()
        if routes is not None and len(routes) > 0:
            for route in routes:
                journey_id = route[0]
                longitude1 = route[1]
                latitude1 = route[2]
                longitude2 = route[3]
                latitude2 = route[4]
                timestamp = route[5]
                speed = route[6] / 3.6
                mode = route[7]
                altitude = 0
                point1 = ppygis.Point(longitude1,
                                      latitude1,
                                      altitude,
                                      srid=4326)
                point2 = ppygis.Point(longitude2,
                                      latitude2,
                                      altitude,
                                      srid=4326)
                line = ppygis.LineString((point1, point2), srid=4326)
                to_cursor.execute(
                    "SELECT * FROM route WHERE journey_id=%s AND timestamp=%s AND ST_Equals(geometry, %s)",
                    (journey_id, timestamp, line))
                matching_routes = to_cursor.fetchall()
                if len(matching_routes) == 0:
                    sys.stdout.write('.')
                    to_cursor.execute(
                        "INSERT INTO route (journey_id, timestamp, geometry, speed, mode) VALUES  (%s, %s, %s, %s, %s)",
                        (journey_id, timestamp, line, speed, mode))
                else:
                    sys.stdout.write('!')

        self.connection.commit()
        from_conn.close()
        return
Пример #19
0
		def pullPublicTransportVehicleSpeeds(self, offset=0):

				limit=1

				cursor = self.cursor()
				url='http://92.62.36.215:8080/siri-vm-ws/NotificationProducer'
#				url='http://127.0.0.1:8080/siri-vm-ws/NotificationProducer'
				headers = {
								"Content-type": "application/xml; charset=utf-8",
								}

				sql = """SELECT distinct route_id, short_name, ST_AsGeoJSON(geometry) FROM mt_route r ORDER BY route_id ASC LIMIT %s OFFSET %s"""
				cursor.execute(sql, (limit, offset,))
				records = cursor.fetchall()

				opener = urllib2.build_opener()


				for record in records:
					#print "Updating line " + str(record[1])
					body ="""<soapenv:Envelope xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/" xmlns:siri="http://wsdl.siri.org.uk/siri" xmlns:siri1="http://www.siri.org.uk/siri">
										 <soapenv:Header/>
										 <soapenv:Body>
												<siri:GetVehicleMonitoring>
													 <ServiceRequestInfo>
															<siri1:RequestTimestamp>2015-06-12T11:50:11.406+02:00</siri1:RequestTimestamp>
															<siri1:RequestorRef>okffi</siri1:RequestorRef>
													 </ServiceRequestInfo>
													 <Request version="2.0">
															<siri1:RequestTimestamp>2015-06-12T11:50:11.406+02:00</siri1:RequestTimestamp>
															<siri1:LineRef>""" + record[1] + """</siri1:LineRef>
													 </Request>
													 <RequestExtension>
															<!--You may enter ANY elements at this point-->
													 </RequestExtension>
												</siri:GetVehicleMonitoring>
										 </soapenv:Body>
									</soapenv:Envelope>"""

					req = urllib2.Request(url, body, headers)
					res = opener.open(req)
					xml = res.read();

					root = ET.fromstring(xml)

					for activity in root.findall('.//{http://www.siri.org.uk/siri}MonitoredVehicleJourney[{http://www.siri.org.uk/siri}PreviousCalls][{http://www.siri.org.uk/siri}DirectionRef][{http://www.siri.org.uk/siri}LineRef]'):

						vehicleRef=activity.find('.//{http://www.siri.org.uk/siri}VehicleRef')
						lineRef=activity.find('.//{http://www.siri.org.uk/siri}LineRef')
						directionRef=activity.find('.//{http://www.siri.org.uk/siri}DirectionRef')
						journeyStartTime=activity.find('.//{http://www.siri.org.uk/siri}PreviousCalls/{http://www.siri.org.uk/siri}PreviousCall[last()]/{http://www.siri.org.uk/siri}AimedArrivalTime')
						journeyID = "oulubuses.line:" + lineRef.text + ".direction:" + directionRef.text + ".vehicle:" + vehicleRef.text + ".start:" + journeyStartTime.text

						measurements=[]

						stops=activity.findall('.//{http://www.siri.org.uk/siri}PreviousCalls/{http://www.siri.org.uk/siri}PreviousCall[{http://www.siri.org.uk/siri}StopPointRef][{http://www.siri.org.uk/siri}ActualArrivalTime][{http://www.siri.org.uk/siri}ActualDepartureTime]')

						for i, stop in enumerate(stops):
							measurements.append({
								'stopRef': stop.find('{http://www.siri.org.uk/siri}StopPointRef').text,
								'arrivalTime': stop.find('{http://www.siri.org.uk/siri}ActualArrivalTime').text,
								'departureTime': stop.find('{http://www.siri.org.uk/siri}ActualDepartureTime').text
							});
							if i>0:
								sql = """SELECT ST_Length(ST_Transform(geometry, 2839)) as distance,
																EXTRACT(EPOCH FROM (%s::timestamptz - %s::timestamptz)) AS duration,
																ST_AsGeoJSON(geometry) AS geometry
													FROM (SELECT
																		ST_Line_Substring(
																			r.geometry,
																			LEAST(ST_Line_Locate_Point(r.geometry, (SELECT geometry FROM mt_stop WHERE stop_id=%s)),
																					ST_Line_Locate_Point(r.geometry, (SELECT geometry FROM mt_stop WHERE stop_id=%s))),
																			GREATEST(ST_Line_Locate_Point(r.geometry, (SELECT geometry FROM mt_stop WHERE stop_id=%s)),
																					ST_Line_Locate_Point(r.geometry, (SELECT geometry FROM mt_stop WHERE stop_id=%s)))
																		) AS geometry
																FROM mt_route r
																WHERE short_name=%s) s"""
								cursor.execute(sql, (measurements[i-1]['arrivalTime'],
																		 measurements[i]['departureTime'],
																		 measurements[i-1]['stopRef'],
																		 measurements[i]['stopRef'],
																		 measurements[i-1]['stopRef'],
																		 measurements[i]['stopRef'],
																		 record[1]
																		 ,))
								for route in cursor.fetchall():
									distance=route[0]
									duration=route[1]
									linestring=json.loads(route[2])['coordinates']
									for j, point in enumerate(linestring):
										if j>0:
											route=[]
											route.append(ppygis.Point(linestring[j-1][0], linestring[j-1][1], linestring[j-1][2], srid=4326))
											route.append(ppygis.Point(linestring[j][0], linestring[j][1], linestring[j][2], srid=4326))
											route=ppygis.LineString(route, srid=4326)
											try:
												cursor.execute("INSERT INTO route (geometry, journey_id, timestamp, speed, mode, realtime) VALUES (%s, %s, %s, %s, %s, %s)",
																			(route, journeyID, measurements[i]['departureTime'], 0 if duration == 0 else distance/duration, "BUS", False, ))
											except db.IntegrityError as e:
												self.connection.rollback()
											except:
												raise
											else:
												self.connection.commit()
					#print "done updating line"

				if(not len(records)):
					offset=0
				else:
					offset+=limit

				threading.Timer(5.0, lambda: MaaS().pullPublicTransportVehicleSpeeds(offset)).start()
				return
Пример #20
0
def dbPoint(point):
    return ppygis.Point(point.latitude,
                        point.longitude,
                        point.elevation,
                        srid=4326).write_ewkb()
Пример #21
0
                out = commands.getstatusoutput(route_this)

                # Check if route was alright, if not mark and try new one.
                if ('Routed OK' in out[1] and check_route_output
                        == True) or check_route_output == False:
                    pp('[Route found]    ')
                    # Read data from pgx file
                    gpx_dom = parse(out_gpx)
                    points = gpx_dom.getElementsByTagName('trkpt')
                    line_points = []
                    for p in points:
                        if p.hasAttribute('lat') and p.hasAttribute('lon'):
                            line_points.append(
                                ppygis.Point(float(p.getAttribute('lon')),
                                             float(p.getAttribute('lat')),
                                             srid=srid))
                        else:
                            pp('[missing lat/lon] ')

                    if len(line_points) == 0:
                        pp('[no points]    ')
                        routes_not_committed += 1
                    else:
                        route = ppygis.LineString(line_points, srid=srid)
                        # Put into DB
                        db.execute(
                            "INSERT INTO routes_" + y +
                            " (terminal_id_start, terminal_id_end, start_geom, end_geom, route_geom) VALUES (%s, %s, %s, %s, %s)",
                            (v['start'], v['end'],
                             ppygis.Point(float(v['start_lon']),
Пример #22
0
def dbPoints(points):
    return ppygis.LineString(
        map(
            lambda p: ppygis.Point(
                p.latitude, p.longitude, p.elevation, srid=4326), points),
        4326).write_ewkb()
def sweg_vs_retrieve_db_data(cur_rli_db, i_dict_boundary, i_dict_register_info,
                             i_str_weather_dataset, i_dict_weather_grid_info,
                             i_dict_performance_curves_info,
                             i_dict_powercurve_mix, i_dict_windspeed_corr,
                             i_dict_z0_info, i_dict_random_input,
                             dict_console_info):
    '''
    Retrieves simulation input data from a local database.
    
    Copyright (c)2013 Marcus Biank, Reiner Lemoine Institut, Berlin
    
    Keyword arguments:        
    
        cur_rli_db -- cursor on database connection
        
        i_dict_boundary -- dictionary containing information on 
                           a existing boundary conditions 
        
        i_dict_register_info -- dictionary containing information on the wind 
                                installation register to be used
        
        i_str_weather_dataset -- string specifying which dataset is to be used
        
        i_dict_weather_grid_info -- dictionary containing information 
                                    on the weather datasets
        
        i_dict_performance_curves_info -- dictionary containing information on 
                                          the performance curves to be used
                                          
        i_dict_powercurve_mix -- dictionary containing information on source
                                 for power curve mixing
        
        i_dict_windspeed_corr -- dict specifying which method for windspeed 
                                 correction is to be used
        
        i_dict_windfarm_efficiencies -- dict specifying how wind farm 
                                        efficiencies are to be calculated
        
        i_dict_z0_info -- dictionary containing information on z0
        
        i_dict_random_input -- dictionary containing information on the usage 
                               of randomly generated data
        
        dict_console_info -- dictionary containing information on the console 
                             logging functionality
    '''

    #import additional modules
    import shapefile
    import sys
    import ppygis

    from Modules.sweg_vs_additional_modules import console_info as Info
    from Modules.sweg_vs_additional_modules import loadmat

    #declare local variables
    arr_error = []
    arr_register = []
    arr_administration = []
    arr_gridcell_id = []
    arr_shp_points = []
    arr_cp = []
    arr_P = []
    arr_P_shares = []
    arr_z0 = []
    dict_random_input = {}

    #detect source for boundary polygon
    if i_dict_boundary['source'] == "GeoDB":
        #compose sql statement for additional boundary query
        str_sql_statement_bound_src = ", " + \
          i_dict_boundary['dict_boundary_geodb']['str_schema'] + "." + \
          i_dict_boundary['dict_boundary_geodb']['str_data'] + " AS boundary "
        if i_dict_boundary['dict_boundary_geodb'][
                'str_geom_adminlvl'] == "Land":
            str_sql_statement_bound_cond = "AND ST_CONTAINS(boundary." + \
              i_dict_boundary['dict_boundary_geodb']['str_geom_column'] + \
              ", register.geom) AND boundary.bez_nat = '" + \
              i_dict_boundary['dict_boundary_geodb']['str_geom_name'] + \
              '''' AND boundary.bez_lan is NULL AND 
                   boundary.bez_rbz is NULL AND 
                   boundary.bez_krs is NULL AND 
                   boundary.bez_vwg is NULL AND 
                   boundary.bez_gem is NULL '''
        elif i_dict_boundary['dict_boundary_geodb'][
                'str_geom_adminlvl'] == "Bundesland":
            str_sql_statement_bound_cond = "AND ST_CONTAINS(boundary." + \
              i_dict_boundary['dict_boundary_geodb']['str_geom_column'] + \
              ", register.geom) AND boundary.bez_lan = '" + \
              i_dict_boundary['dict_boundary_geodb']['str_geom_name'] + \
              '''' AND boundary.bez_rbz is NULL AND 
                   boundary.bez_krs is NULL AND 
                   boundary.bez_vwg is NULL AND 
                   boundary.bez_gem is NULL '''
        elif i_dict_boundary['dict_boundary_geodb'][
                'str_geom_adminlvl'] == "Regierungsbezirk":
            str_sql_statement_bound_cond = "AND ST_CONTAINS(boundary." + \
              i_dict_boundary['dict_boundary_geodb']['str_geom_column'] + \
              ", register.geom) AND boundary.bez_rbz = '" + \
              i_dict_boundary['dict_boundary_geodb']['str_geom_name'] + \
              '''' AND boundary.bez_krs is NULL AND 
                   boundary.bez_vwg is NULL AND 
                   boundary.bez_gem is NULL '''
        elif i_dict_boundary['dict_boundary_geodb'][
                'str_geom_adminlvl'] == "Kreis":
            str_sql_statement_bound_cond = "AND ST_CONTAINS(boundary." + \
              i_dict_boundary['dict_boundary_geodb']['str_geom_column'] + \
              ", register.geom) AND boundary.bez_krs = '" + \
              i_dict_boundary['dict_boundary_geodb']['str_geom_name'] + \
              '''' AND boundary.bez_vwg is NULL AND 
                   boundary.bez_gem is NULL '''
        elif i_dict_boundary['dict_boundary_geodb'][
                'str_geom_adminlvl'] == "Verwaltungsgemeinschaft":
            str_sql_statement_bound_cond = "AND ST_CONTAINS(boundary." + \
              i_dict_boundary['dict_boundary_geodb']['str_geom_column'] + \
              ", register.geom) AND boundary.bez_vwg = '" + \
              i_dict_boundary['dict_boundary_geodb']['str_geom_name'] + \
              "' AND boundary.bez_gem is NULL "
        elif i_dict_boundary['dict_boundary_geodb'][
                'str_geom_adminlvl'] == "Gemeinde":
            str_sql_statement_bound_cond = "AND ST_CONTAINS(boundary." + \
            i_dict_boundary['dict_boundary_geodb']['str_geom_column'] + \
            ", register.geom) AND boundary.bez_gem = '" + \
            i_dict_boundary['dict_boundary_geodb']['str_geom_name'] + "' "

    elif i_dict_boundary['source'] == "Shapefile":
        #compose sql statement for additional boundary query
        #set database relevant variables to ""
        str_sql_statement_bound_src = ""
        try:
            shp_file = shapefile.Reader(
                str(i_dict_boundary['str_shp_filepath']))
            Info("Shapefile successfully read.", 2, dict_console_info)
            shp_shapes = shp_file.shapes()
            Info("Number of shapes in shp_file: " +  \
              str(len(shp_shapes)), 2,dict_console_info)

            if len(shp_shapes) == 1:
                #define points
                for point in shp_shapes[0].points:
                    arr_shp_points.append(
                        ppygis.Point(point[0], point[1], 4326))
                arr_shp_points.append(
                    ppygis.Point(shp_shapes[0].points[0][0],
                                 shp_shapes[0].points[0][1], 4326))
                #define linestring ring
                postgis_shp_linestring = [
                    ppygis.LineString(arr_shp_points, 4326)
                ]
                #define polygon
                postgis_shp_polygon = ppygis.Polygon(postgis_shp_linestring,
                                                     4326)
                #define ewkb (extended well-known binary) of polygon
                str_shpgeom = ppygis.Geometry.getquoted(postgis_shp_polygon)

                str_sql_statement_bound_cond = "AND ST_CONTAINS(" + \
                  str_shpgeom + ", register.geom) "
                #print str_sql_statement_bound_cond
            else:
                arr_error.append("Error: " + str(len(shp_shapes)) + \
                  " shapes in the provided shapefile! Only one allowed.")
        except:
            arr_error.append("Error: " + str(sys.exc_info()))

    elif i_dict_boundary['source'] == "DBtable":
        #compose sql statement for additional boundary query
        str_sql_statement_bound_src = ", " + \
          i_dict_boundary['dict_boundary_dbtable']['str_schema'] + "." + \
          i_dict_boundary['dict_boundary_dbtable']['str_data'] + " AS boundary "

        str_sql_statement_bound_cond = "AND ST_CONTAINS(boundary." + \
              i_dict_boundary['dict_boundary_dbtable']['str_geom_column'] + \
              ", register.geom) AND boundary." + i_dict_boundary[
                'dict_boundary_dbtable']['str_condition_col'] + " = '" + \
              i_dict_boundary['dict_boundary_dbtable']['str_condition_val'] + "'"

    else:
        str_sql_statement_bound_cond = ""

    if str_sql_statement_bound_cond != "":

        #compose sql statement for register retrieval
        str_sql_statement = '''
          SELECT wp.*, adm.bez_nat, adm.bez_lan, adm.bez_krs 
          FROM (
              SELECT register.*, grid.gridcell_id, grid.x, grid.y 
              FROM ''' + \
                  i_dict_register_info['str_schema'] + "." + \
                  i_dict_register_info['str_data'] + " AS register, " + \
                  i_dict_weather_grid_info[
                    i_str_weather_dataset]['str_schema'] + '.' + \
                  i_dict_weather_grid_info[
                    i_str_weather_dataset]['str_data'] + " AS grid " + \
                  str_sql_statement_bound_src + \
              "WHERE inbetriebn IS NOT NULL AND ST_CONTAINS(grid." + \
              i_dict_weather_grid_info[i_str_weather_dataset][
                'str_geom_column'] + ", register.geom) " + \
              str_sql_statement_bound_cond + ") AS wp, " + \
              i_dict_boundary['dict_boundary_geodb']['str_schema'] + "." + \
              i_dict_boundary['dict_boundary_geodb']['str_data'] + " AS adm " +\
          '''WHERE ST_CONTAINS(adm.geom,wp.geom) AND
                   adm.bez_gem IS NOT NULL
          ORDER BY wp.gridcell_id;'''

        #print str_sql_statement

        #retrieve register data
        try:
            cur_rli_db.execute(str_sql_statement)
            arr_register = [row for row in cur_rli_db.fetchall()]
            Info("Register successfully retrieved.", 1, dict_console_info)
            Info("Number of rows in register:   " + \
              str(len(arr_register)), 3, dict_console_info)
        except Exception as e:
            arr_error.append("Error: " + e.pgerror)
        else:
            if arr_register == []:
                arr_error.append(
                    "Error: Selection of register data yielded no results!")

        #print [x[15] for x in arr_register]

        #compose sql statement for administration retrieval
        str_sql_statement = '''
          SELECT adm.bez_nat, adm.bez_lan, adm.bez_krs, adm.bez_gem 
          FROM (
              SELECT register.* 
              FROM ''' + i_dict_register_info['str_schema'] + "." + \
                  i_dict_register_info['str_data'] + " AS register" + \
                  str_sql_statement_bound_src + \
              "WHERE inbetriebn IS NOT NULL " + str_sql_statement_bound_cond + \
              ") AS wp, " + \
          i_dict_boundary['dict_boundary_geodb']['str_schema'] + \
            "." + i_dict_boundary['dict_boundary_geodb']['str_data'] + ''' AS adm 
          WHERE ST_CONTAINS(adm.geom,wp.geom) AND
                adm.bez_gem IS NOT NULL
          GROUP BY adm.bez_nat, adm.bez_lan, adm.bez_krs, adm.bez_gem;'''

        #print str_sql_statement

        #retrieve administration data
        try:
            cur_rli_db.execute(str_sql_statement)
            arr_administration = [row for row in cur_rli_db.fetchall()]
            Info("Administration areas successfully retrieved.", 1,
                 dict_console_info)
            Info("Number of rows in administration areas:   " + \
              str(len(arr_administration)), 3, dict_console_info)
        except Exception as e:
            arr_error.append("Error: " + e.pgerror)
        else:
            if arr_administration == []:
                arr_error.append(
                    "Error: Selection of administration data yielded no results!"
                )

    else:
        #compose sql statement for register retrieval
        str_sql_statement = '''
          SELECT register.*, grid.gridcell_id, grid.x, grid.y 
          FROM ''' + \
              i_dict_register_info['str_schema'] + "." + \
              i_dict_register_info['str_data'] + " AS register, " + \
              i_dict_weather_grid_info[
                i_str_weather_dataset]['str_schema'] + '.' + \
              i_dict_weather_grid_info[
                i_str_weather_dataset]['str_data'] + " AS grid " + \
              str_sql_statement_bound_src + \
          "WHERE inbetriebn IS NOT NULL AND ST_CONTAINS(grid." + \
          i_dict_weather_grid_info[i_str_weather_dataset][
            'str_geom_column'] + ", register.geom) " + \
          str_sql_statement_bound_cond + \
          "ORDER BY wp.gridcell_id;"

        #print str_sql_statement

        #retrieve register data
        try:
            cur_rli_db.execute(str_sql_statement)
            arr_register = [row for row in cur_rli_db.fetchall()]
            Info("Register successfully retrieved.", 1, dict_console_info)
            Info("Number of rows in register:   " + \
              str(len(arr_register)), 3, dict_console_info)
        except Exception as e:
            arr_error.append("Error: " + e.pgerror)
        else:
            if arr_register == []:
                arr_error.append(
                    "Error: Selection of register data yielded no results!")

        #print [x[15] for x in arr_register]

    #compose sql statement for gridcell_id retrieval
    str_sql_statement = '''
      SELECT grid.gridcell_id, grid.x, grid.y 
      FROM ''' + \
          i_dict_register_info['str_schema'] + "." + \
          i_dict_register_info['str_data'] + " AS register, " + \
          i_dict_weather_grid_info[
            i_str_weather_dataset]['str_schema'] + '.' + \
          i_dict_weather_grid_info[
            i_str_weather_dataset]['str_data'] + " AS grid " + \
          str_sql_statement_bound_src + \
      "WHERE  inbetriebn IS NOT NULL AND ST_CONTAINS(grid." + \
        i_dict_weather_grid_info[i_str_weather_dataset][
          'str_geom_column'] +", register.geom) " + \
          str_sql_statement_bound_cond + \
      '''GROUP BY grid.gridcell_id, grid.x, grid.y  
      ORDER BY grid.gridcell_id;'''

    #print str_sql_statement

    #retrieve array of unique gridcell_ids
    try:
        cur_rli_db.execute(str_sql_statement)
        arr_gridcell_id = [row for row in cur_rli_db.fetchall()]
        Info("Gridcell Ids successfully retrieved.", 1, dict_console_info)
        Info("Number of unique gridcell_id: " + \
          str(len(arr_gridcell_id)), 3, dict_console_info)
    except Exception as e:
        arr_error.append("Error: " + e.pgerror)
    else:
        if arr_gridcell_id == []:
            arr_error.append(
                "Error: Selection of gridcell_id data yielded no results!")

    #print [x[0] for x in arr_gridcell_id]

    #compose sql statement for power curves retrieval
    str_sql_statement = '''
      SELECT rli_anlagen_id, p_nenn, "1", "2", "3", "4", "5", "6", "7",
          "8", "9", "10", "11", "12", "13", "14", "15", "16", "17", "18", 
          "19", "20", "21", "22", "23", "24", "25" 
      FROM ''' + \
          i_dict_performance_curves_info['power_curves']['str_schema'] + "." +\
          i_dict_performance_curves_info['power_curves']['str_data'] + \
      ' WHERE "0" IS NOT NULL ORDER BY rli_anlagen_id'

    #retrieve power curve data
    try:
        cur_rli_db.execute(str_sql_statement)
        arr_P = [row for row in cur_rli_db.fetchall()]
    except Exception as e:
        arr_error.append("Error: " + e.pgerror)
    else:
        if arr_P == []:
            arr_error.append(
                "Error: Selection of power curve data yielded no results!")

    #retrieve turbine type specific yearly shares
    if i_dict_powercurve_mix['source']['method'] == 'weighted_average':
        #compose sql statement for powercurves shares retrieval
        str_sql_statement = '''
          SELECT * 
          FROM ''' + \
              i_dict_powercurve_mix['source']['str_schema'] + "." +\
              i_dict_powercurve_mix['source']['str_data'] + \
          ' ORDER BY rli_anlagen_id'
        #retrieve powercurve shares data
        try:
            cur_rli_db.execute(str_sql_statement)
            arr_P_shares = [row for row in cur_rli_db.fetchall()]

        except Exception as e:
            arr_error.append("Error: " + e.pgerror)
    else:
        arr_P_shares = []

    #compose sql statement for cp curves retrieval
    str_sql_statement = '''
      SELECT 
          rli_anlagen_id, p_nenn, "1", "2", "3", "4", "5", "6", "7", "8", 
          "9", "10", "11", "12", "13", "14", "15", "16", "17", "18", "19", 
          "20", "21", "22", "23", "24", "25" 
      FROM ''' + \
          i_dict_performance_curves_info['cp_curves']['str_schema'] + \
          "." + i_dict_performance_curves_info['cp_curves']['str_data'] + \
      ' WHERE "0" IS NOT NULL ORDER BY rli_anlagen_id'

    #retrieve cp curve data
    try:
        cur_rli_db.execute(str_sql_statement)
        arr_cp = [row for row in cur_rli_db.fetchall()]
        Info("Performance curves successfully retrieved.", 1,
             dict_console_info)
        Info("Number of cp-curves:    " + str(len(arr_cp)), 3,
             dict_console_info)
        Info("Number of Power-curves: " + str(len(arr_P)), 3,
             dict_console_info)
    except Exception as e:
        arr_error.append("Error: " + e.pgerror)
    else:
        if arr_cp == []:
            arr_error.append(
                "Error: Selection of cp curve data yielded no results!")

    #compose sql statement for roughness length retrieval
    if i_dict_windspeed_corr['height_corr'] != 'linear':
        str_sql_statement = '''
          SELECT 
              grid.gridcell_id, z0.z0 
          FROM (
              SELECT grid.gridcell_id 
              FROM ''' + \
                  i_dict_register_info['str_schema'] + "." + \
                  i_dict_register_info['str_data'] + " AS register, " +\
                  i_dict_weather_grid_info[i_str_weather_dataset][
                    'str_schema'] + '.' + \
                  i_dict_weather_grid_info[i_str_weather_dataset][
                    'str_data'] + " AS grid " + str_sql_statement_bound_src + \
              "WHERE ST_CONTAINS(grid." + \
                i_dict_weather_grid_info[i_str_weather_dataset][
                  'str_geom_column'] +", register.geom) " + \
                str_sql_statement_bound_cond + \
              '''GROUP BY grid.gridcell_id) as grid 
          INNER JOIN ''' + \
              i_dict_z0_info['str_schema'] + "." + i_dict_z0_info['str_data'] +\
          ''' as z0 ON z0.gridcell_id = grid.gridcell_id 
          ORDER BY grid.gridcell_id;'''
        try:
            cur_rli_db.execute(str_sql_statement)
            arr_of_rows = [row for row in cur_rli_db.fetchall()]
            grid_cell_id = [x[0] for x in arr_of_rows]
            z_0 = [x[1] for x in arr_of_rows]
            arr_z0 = [grid_cell_id, z_0]  #array of cols
            Info("Roughness lengths successfully retrieved.", 1,
                 dict_console_info)
        except Exception as e:
            arr_error.append("Error: " + e.pgerror)
        else:
            if arr_z0 == []:
                arr_error.append('''Error: 
                     Selection of roughness length data yielded no results!''')

    #retrieve random input (technical availability)
    if i_dict_random_input['bol_load'] == True:
        try:
            dict_random_input = loadmat(i_dict_random_input['str_load_path'])
            Info("Timeseries of random input was retrieved.", 1,
                 dict_console_info)

        except:
            dict_random_input = {}

    if arr_error != []:
        print arr_error[0]

    return (arr_error, list(arr_register), list(arr_administration),
            list(arr_gridcell_id), list(arr_cp), list(arr_P),
            list(arr_P_shares), list(arr_z0), dict_random_input)