コード例 #1
0
ファイル: gpximport.py プロジェクト: peletiah/poab_pyramid
def gpxprocess(request):
    class trkpt:
        def __init__(self, latitude, longitude):
            self.latitude = latitude
            self.longitude = longitude
    
    trkptlist=list()
    
    gpx_ns = "http://www.topografix.com/GPX/1/1"
    filename = request.POST['gpx'].filename
    input_file = request.POST['gpx'].file

    root = etree.parse(input_file).getroot()
    trackSegments = root.getiterator("{%s}trkseg"%gpx_ns)
    for trackSegment in trackSegments:
        for trackPoint in trackSegment:
            lat=trackPoint.attrib['lat']
            lon=trackPoint.attrib['lon']
            new_trkpt=trkpt(lat,lon)
            trkptlist.append(new_trkpt)
    
    reduced_trkpts=reduce_trackpoints(trkptlist)
    json_string=create_json_for_db(reduced_trkpts)
    track=Track(timetools.now(),len(trkptlist),0,'00:00:00',None,None,None,None,None,None,None,json_string)
    DBSession.add(track)
    DBSession.flush()
    #raise HTTPFound(request.route_url('track','fromgpx',track.id))
    route=request.route_url('track','fromgpx',track.id)
    response = Response('<a href="%s">%s</a>' % (route,route))
    response.content_type = 'text/html'
    return(response)
コード例 #2
0
ファイル: offline_sync.py プロジェクト: peletiah/poab_pyramid
def imagesync(request):
    sync_status='sync_error'
    print request.POST.keys()
    image_json = request.POST.get('image_json')
    log_json = json.loads(request.POST.get('log').value)
    image_bin = request.POST.get('image_bin')

    image_json = json.loads(image_json.value)
    author = Author.get_author(image_json['author']['name'])


    image = Image.get_image_by_uuid(image_json['uuid']) #does image with this uuid(from json-info) already exist in our db
    if not image:

        basedir = '/srv/trackdata/bydate'
        img_prvw_w='500'
        img_large_w='990'
        created=datetime.strptime(log_json['created'], "%Y-%m-%d %H:%M:%S") #we use the timestamp from log_json['created'] for the image-location
        datepath=created.strftime("%Y-%m-%d")
        filedir = filetools.createdir(basedir, author.name, datepath)
        imgdir = filedir+'images/sorted/'

        filehash = filetools.safe_file(imgdir, image_json['name'], image_bin.value)

        imagetools.resize(imgdir, imgdir+img_prvw_w+'/', image_json['name'], img_prvw_w)
        imagetools.resize(imgdir, imgdir+img_large_w+'/',image_json['name'] , img_large_w) #TODO: what happens when a 990px-wide img was uploaded?


        hash_large=hashlib.sha256(open(imgdir+img_large_w+'/'+image_json['name'], 'rb').read()).hexdigest() #TODO
        filehash=hashlib.sha256(open(imgdir+'/'+image_json['name'], 'rb').read()).hexdigest() #TODO
        image = Image(
                    name = image_json['name'], 
                    location = imgdir, 
                    title = image_json['title'],
                    comment = image_json['comment'],
                    alt = image_json['alt'],
                    aperture = image_json['aperture'],
                    shutter = image_json['shutter'],
                    focal_length = image_json['focal_length'],
                    iso = image_json['iso'],
                    timestamp_original = image_json['timestamp_original'],
                    hash = filehash,
                    hash_large = hash_large, #TODO: we need the real file's hash if 990px was uploaded and not converted
                    author = author.id,
                    trackpoint = None,
                    last_change = timetools.now(),
                    published = timetools.now(),
                    uuid = image_json['uuid']
                    )
        DBSession.add(image)
        DBSession.flush()
        sync_status = 'is_synced'

    else:
        #TODO: So our image is actually in the db - why has this been found earlier in sync?type=status??? 
        print 'ERROR: Image found in DB, but this should have happened in /sync?type=status'
        sync_status='sync_error'
    
    return Response(json.dumps({'log_id' : log_json['id'], 'type':'image', 'item_uuid':image_json['uuid'], 'sync_status':sync_status})) #Something went very wrong
コード例 #3
0
ファイル: gpxtools.py プロジェクト: peletiah/poab_pyramid
def parse_trackpoints(trackpoints, gpx_ns):
    for trackpoint in trackpoints:
        lat = trackpoint.attrib['lat']
        lon = trackpoint.attrib['lon']
        elevation = trackpoint.find('{%s}ele'% gpx_ns).text
        time_str = trackpoint.find('{%s}time'% gpx_ns).text.replace('T',' ')[:-1]
        time = datetime.datetime.strptime(time_str, "%Y-%m-%d %H:%M:%S")
        trackpoint = Trackpoint(track_id=None, latitude=lat, longitude=lon, altitude=None, velocity=None, temperature=None, direction=None, pressure=None, timestamp=time)
        DBSession.add(trackpoint)
        DBSession.flush()
コード例 #4
0
ファイル: log.py プロジェクト: peletiah/poab_pyramid
def get_logs_by_trackpoints(trackpoints):
    trkpt_list=list()
    for trackpoint in trackpoints:
        trkpt_list.append(trackpoint.id)
    q = DBSession.query(Log).filter(and_(Log.infomarker_id.in_(trkpt_list),Log.id!=29))
    logs = q.order_by(asc(Log.published)).all()
    return logs
コード例 #5
0
ファイル: log.py プロジェクト: peletiah/poab_pyramid
def svg_view(request):
    country_id=int(request.matchdict['country_id'])
    if country_id==0:
            return {'country': 'world'}
    q = DBSession.query(Country).filter(Country.iso_numcode==country_id)
    country=q.one()
    return {'country': country.iso_numcode}
コード例 #6
0
ファイル: offline_sync.py プロジェクト: peletiah/poab_pyramid
def interlink_log(request):
    log_json =  json.loads(request.POST.get('log_json'))
    log = Log.get_log_by_uuid(log_json['uuid'])
    latest_timestamp = datetime.strptime('1970-01-01', '%Y-%m-%d')
    #Link to Tracks
    for track in log_json['tracks']:
        track = Track.get_track_by_uuid(track['uuid'])
        print '############### track.id #########'
        print track.id
        print '################ track.id end #########'
        log.track.append(track)
        #find the latest trackpoint-timestamp related to this log
        #this will be the trackpoint linked to log as infomarker
        if track.trackpoints[0].timestamp > latest_timestamp:
            log.infomarker = track.trackpoints[0].id
            latest_timestamp = track.trackpoints[0].timestamp
    print log_json['tracks']
    if not log_json['tracks']:
        log.infomarker = 3572 #TODO
 
    #Get location for infomarker
    location = Location(name = None, trackpoint_id = None, country_id = None)
    location.name = flickrtools.findplace(log.trackpoint_log_ref.latitude, log.trackpoint_log_ref.longitude, 11, log.author_log_ref)
    location.trackpoint_id = log.trackpoint_log_ref.id,
    location.country_id = flickrtools.get_country_by_lat_lon(log.trackpoint_log_ref.latitude, log.trackpoint_log_ref.longitude, log.author_log_ref).iso_numcode
    #print '\n\n\n\n\n\n'+location.name
    print '\n\n\n\n\n'
    DBSession.add(location)
 
    #Link to Images
    for image in log_json['images']:
        image = Image.get_image_by_uuid(image['uuid'])
        log.image.append(image)
 
    content_with_uuid_tags = log.content
    #print content_with_uuid_tags
    img_uuid_list = re.findall("(\[img_uuid=[0-9A-Za-z-]{1,}\])", content_with_uuid_tags)
    #regex matches A-Z, a-z, 0-9 and "-", e.g. "0eb92a91-3a92-4707-be6e-1907f6c0829"
    print img_uuid_list
    for img_uuid_tag in img_uuid_list:
        img_uuid = re.search("^\[img_uuid=([0-9A-Za-z-]{1,})\]$",img_uuid_tag).group(1)
        image = Image.get_image_by_uuid(img_uuid)
        if image:
            content_with_uuid_tags=content_with_uuid_tags.replace(img_uuid_tag,('[imgid=%s]') % image.id)
    log.content = content_with_uuid_tags
    DBSession.add(log)
    return Response(json.dumps({'link_status':'linked', 'item_uuid': log.uuid}))
コード例 #7
0
ファイル: view.py プロジェクト: peletiah/poab_pyramid
def fetch_images_for_trackpoints(q):
    trackpoints = q.all()
    trkpt_list=list()
    for trackpoint in trackpoints:
        trkpt_list.append(trackpoint.id)
    q = DBSession.query(Image).filter(and_(Image.trackpoint.in_(trkpt_list)))
    images = q.order_by(asc(Image.timestamp_original)).all()
    return images
コード例 #8
0
ファイル: gpxtools.py プロジェクト: peletiah/poab_pyramid
def parse_trackpoints(trackpoints, gpx_ns):
    for trackpoint in trackpoints:
        lat = trackpoint.attrib['lat']
        lon = trackpoint.attrib['lon']
        elevation = trackpoint.find('{%s}ele' % gpx_ns).text
        time_str = trackpoint.find('{%s}time' % gpx_ns).text.replace('T',
                                                                     ' ')[:-1]
        time = datetime.datetime.strptime(time_str, "%Y-%m-%d %H:%M:%S")
        trackpoint = Trackpoint(track_id=None,
                                latitude=lat,
                                longitude=lon,
                                altitude=None,
                                velocity=None,
                                temperature=None,
                                direction=None,
                                pressure=None,
                                timestamp=time)
        DBSession.add(trackpoint)
        DBSession.flush()
コード例 #9
0
ファイル: offline_sync.py プロジェクト: peletiah/poab_pyramid
def interlink_image(request):
    image_json = json.loads(request.POST.get('image_json'))
    print image_json['id']
    print image_json['name']
    print image_json['trackpoint']
    image = Image.get_image_by_uuid(image_json['uuid'])
    try:
        trackpoint = Trackpoint.get_trackpoint_by_uuid(image_json['trackpoint']['uuid'])
    except:
        trackpoint = None
    location = None
    if trackpoint:
        print trackpoint
        print image
        image.trackpoint = trackpoint.id
    #Get location for image.trackpoint
        location = Location(name = None, trackpoint_id = None, country_id = None)
        location.name = flickrtools.findplace(image.trackpoint_img_ref.latitude, image.trackpoint_img_ref.longitude, 11, image.author_img_ref)
        location.trackpoint_id = image.trackpoint_img_ref.id,
        location.country_id = flickrtools.get_country_by_lat_lon(image.trackpoint_img_ref.latitude, image.trackpoint_img_ref.longitude, image.author_img_ref).iso_numcode
    if not image.image_flickr_ref:
        print '\n\n\n\n\n\n\n'+str(image.id)
        print '\n\n\n\n\n\n\n'
        farm,server,photoid,secret,originalsecret,originalformat = flickrtools.uploadimage(image, image.author_img_ref, '')
        flickrimage = FlickrImage(image = image.id, farm = farm, server = server, photoid = photoid, secret = secret)
        DBSession.add(flickrimage)
    DBSession.add(image)
    if location: #TODO(Ugly?)
        DBSession.add(location)
    

    return Response(json.dumps({'link_status':'linked', 'item_uuid': image.uuid}))
コード例 #10
0
ファイル: flickrtools.py プロジェクト: peletiah/poab_pyramid
def get_country_by_lat_lon(lat,lon,author):
    accuracy=1 #level of region-detail in flickr, 1 is world, 8 is district
    flickr_countryname=findplace(lat,lon,accuracy,author)
    if len(flickr_countryname.split(',')) > 1:
            flickr_countryname=flickr_countryname.split(',')[-1].replace(' ','')
    print "flickr_countryname: "+str(flickr_countryname)
    if flickr_countryname !=None:
        country=DBSession.query(Country).filter(Country.flickr_countryname==flickr_countryname).one()
        print 'country found - id:'+ str(country.iso_numcode) + ' - details:' + str(country)
    else:
        print "no country found, returning dummy country!"
        query_country=session.query(db_country).filter(db_country.iso_numcode==1)
        country=query_country.one()
    return country
コード例 #11
0
ファイル: offline_sync.py プロジェクト: peletiah/poab_pyramid
def logsync(request):
    sync_status='sync_error'
    log_json = json.loads(request.POST.get('log_json').value)
    print log_json
    etappe_json = log_json['etappe']
    #TODO: might be better with dates instead of uuid
    etappe = Etappe.get_etappe_by_uuid(etappe_json['uuid']) 
    if not etappe:
        etappe = Etappe(
                start_date = etappe_json['start_date'],
                end_date = etappe_json['end_date'],
                name = etappe_json['name'],
                uuid = etappe_json['uuid']
                )
        DBSession.add(etappe)
        DBSession.flush()

    log = Log.get_log_by_uuid(log_json['uuid'])
    if not log:
        print 'No log found, adding new log.'
        print 'Author: '+log_json['author']
        author = Author.get_author(log_json['author'])
        print author
        log = Log(
                infomarker = None,
                topic=log_json['topic'],
                content=log_json['content'],
                author=author.id,
                etappe=etappe.id,
                created=log_json['created'],
                published=timetools.now(),
                uuid=log_json['uuid']
                )
        DBSession.add(log)
        DBSession.flush()
        sync_status = 'is_synced'; #Item was not synced before we started
    elif log: #TODO: Updating log, needs last_change comparison and stuff
        print 'Log already exists on server'
        sync_status = 'was_synced' #Item was already on the server earlier        
    else:
        sync_status = 'sync_error' #something is wrong here!
    return Response(json.dumps({'log_id':log_json['id'], 'type':'log', 'item_uuid':log_json['uuid'], 'sync_status':sync_status}))
コード例 #12
0
ファイル: track.py プロジェクト: peletiah/poab_pyramid
def json_track_view(request):
    try:
        action=request.matchdict['action']
    except:
        action='c'
    try:
        id=int(request.matchdict['id'])
    except:
        id=0
    if action=='c' and id==0:
        after_date = datetime.datetime.strptime('2010-09-02',"%Y-%m-%d")
        before_date = datetime.datetime.strptime('2013-01-07', "%Y-%m-%d")
        curr_date = after_date
        tracks = DBSession.query(Track).filter(and_(Track.start_time > after_date, Track.id != 141)).all()
        response = Response(generate_json_from_tracks(tracks))
    elif action=='c':
        trackpoints = DBSession.query(Trackpoint).filter(and_(Trackpoint.country_id==id,Trackpoint.infomarker==True)).all()
        trkpt_list=list()
        for trackpoint in trackpoints:
            trkpt_list.append(trackpoint.track_id)
        tracks = DBSession.query(Track).filter(Track.id.in_(trkpt_list)).all()
        response = Response(generate_json_from_tracks(tracks))
    elif action=='infomarker':
        infomarker = DBSession.query(Trackpoint).filter(Trackpoint.id==id).one()
        tracks = DBSession.query(Track).filter(Track.id==infomarker.track_id).all()
        response = Response(generate_json_from_tracks(tracks))
    elif action=='fromgpx':
        tracks = DBSession.query(Track).filter(Track.id==id).all()
        response = Response(generate_json_from_tracks(tracks))
    elif action=='simple':
        trackpoint = DBSession.query(Trackpoint).filter(Trackpoint.id==id).one()
        response = Response(generate_json_from_trackpoint(trackpoint))
    else:
        after_date = datetime.datetime.strptime('2010-09-02',"%Y-%m-%d")
        before_date = datetime.datetime.strptime('2010-12-07', "%Y-%m-%d")
        curr_date = after_date
        tracks = DBSession.query(Track).filter(and_(Track.date > after_date, Track.date < before_date)).all()
        response = Response(generate_json_from_tracks(tracks))
    response.content_type = 'application/json'
    return(response)
コード例 #13
0
ファイル: offline_sync.py プロジェクト: peletiah/poab_pyramid
def tracksync(request):
    sync_status='sync_error'
    print request.POST.keys()
    track_json = json.loads(request.POST.get('track'))
    print track_json['distance']
    log_json = json.loads(request.POST.get('log_json'))
    print '\n'
    print track_json['author']

    author = Author.get_author(track_json['author'])


    track = Track.get_track_by_uuid(track_json['uuid']) #does track with this uuid(from json-info) already exist in our db
    if not track:
        print '\n\n\n'
        print 'Track not found by uuid %s!' %track_json['uuid']
        print '\n\n\n'
        track = Track(
                    reduced_trackpoints = json.loads(track_json['reduced_trackpoints']),
                    distance = track_json['distance'],
                    timespan = track_json['timespan'],
                    trackpoint_count = track_json['trackpoint_count'],
                    start_time = track_json['start_time'],
                    end_time = track_json['end_time'],
                    color = track_json['color'],
                    author = author.id,
                    etappe = None,
                    uuid = track_json['uuid']
                    )
        DBSession.add(track)
        DBSession.flush()
        for trackpoint_json in track_json['trackpoints']:
            trackpoint_in_db = Trackpoint.get_trackpoint_by_lat_lon_time(trackpoint_json['latitude'], \
                                        trackpoint_json['longitude'], trackpoint_json['timestamp'])
            if not trackpoint_in_db:
                print trackpoint_json
                trackpoint = Trackpoint(
                                    track_id = track.id,
                                    latitude = trackpoint_json['latitude'],
                                    longitude = trackpoint_json['longitude'],
                                    altitude = trackpoint_json['altitude'],
                                    velocity = trackpoint_json['velocity'],
                                    temperature = trackpoint_json['temperature'],
                                    direction = trackpoint_json['direction'],
                                    pressure = trackpoint_json['pressure'],
                                    timestamp = trackpoint_json['timestamp'],
                                    uuid = trackpoint_json['uuid']
                                    )
                DBSession.add(trackpoint)
                DBSession.flush()

        sync_status = 'is_synced'

    elif track:
        print 'was_synced'
        sync_status = 'was_synced'
    else:
        print 'sync_error'
        sync_status = 'sync_error'
    
    return Response(json.dumps({'log_id':log_json['id'], 'type':'track', 'item_uuid':track_json['uuid'], 'sync_status':sync_status}))
コード例 #14
0
        self.completer = rlcompleter.Completer(namespace=self.locals)
        readline.set_completer(self.completer.complete)
        if 'libedit' in readline.__doc__:
            readline.parse_and_bind("bind ^I rl_complete")
        else:
            readline.parse_and_bind("tab: complete")
 
    def init_history(self, histfile):
        if hasattr(readline, "read_history_file"):
            try:
                readline.read_history_file(histfile)
            except IOError:
                pass
            atexit.register(self.save_history, histfile)
 
    def save_history(self, histfile):
        readline.write_history_file(histfile)


env = bootstrap('../development.ini')
engine=engine_from_config(env['registry'].settings, 'sqlalchemy.')
DBSession.configure(bind=engine)
ic = SQLAlchemyShell()
cmd = "from poab.models import *"
print ">>>", cmd
ic.push(cmd)
ic.interact(banner="Use quit() or Ctrl-D (i.e. EOF) to exit")



コード例 #15
0
ファイル: copy_db.py プロジェクト: peletiah/poab_pyramid
def copy_db(request):
    #tracks_old = DBSession.query(TrackOld).filter(TrackOld.id == 141).all()

#TRACK/TRACKPOINT

#    #tracks_old = DBSession.query(TrackOld).filter(TrackOld.id == 141).all()
#    tracks_old = DBSession.query(TrackOld).all()
#    print tracks_old
#    for track_old in tracks_old:
#        track_query = DBSession.query(Track).filter(Track.id == track_old.id)
#        if track_query.count() < 1:
#            start_time = track_old.date
#            end_time = track_old.date
#            trackpoint_count = track_old.trkptnum
#            distance = str(track_old.distance)
#            timespan = track_old.timespan
#            color = track_old.color 
#            track = Track(
#                id = track_old.id,
#                start_time = start_time,
#                end_time = end_time,
#                trackpoint_count = trackpoint_count,
#                distance = distance,
#                timespan = timespan,
#                color = color,
#                reduced_trackpoints = None,
#                author = 1,
#                etappe = 1,
#                uuid = None
#                )
#            DBSession.add(track)
#            print track_old.distance
#            print track.distance
#            trackpoints_old = DBSession.query(TrackpointOld).filter(TrackpointOld.track_id == track.id).all()
#            trackpoint_list=list()
#            for trackpoint_old in trackpoints_old:
#                #print trackpoint_old.altitude,trackpoint_old.velocity,trackpoint_old.temperature,trackpoint_old.direction,trackpoint_old.pressure
#                trackpoint = Trackpoint(
#                                id = trackpoint_old.id,
#                                track_id = track.id,
#                                latitude = trackpoint_old.latitude,
#                                longitude = trackpoint_old.longitude,
#                                altitude = trackpoint_old.altitude,
#                                velocity = trackpoint_old.velocity,
#                                temperature = trackpoint_old.temperature,
#                                direction = trackpoint_old.direction,
#                                pressure = trackpoint_old.pressure,
#                                timestamp = trackpoint_old.timestamp,
#                                uuid = None
#                                )
#                DBSession.add(trackpoint)
#                if trackpoint_old.location or trackpoint_old.country_id:
#                    location = Location(
#                            trackpoint_id = trackpoint.id,
#                            country_id = trackpoint_old.country_id,
#                            name = trackpoint_old.location
#                            )
#                    DBSession.add(location)
#                trackpoint_list.append(trackpoint)
#            
#            trackpoint_list.sort(key = lambda trackpoint: trackpoint.timestamp)
#            if track_old.id != 140 and track_old.id != 145:
#                reduced_trkpts=reduce_trackpoints(trackpoint_list, 0.0002)
#                track.reduced_trackpoints = reduced_trkpts
#                print reduced_trkpts
#            DBSession.add(track)
#        else:
#            track = track_query.one()
#
#    #TRACKPOINTS with Track
#    trackpoints_unlinked = DBSession.query(TrackpointOld).filter(TrackpointOld.track_id == None).all()
#    trackpoint_list=list()
#    for trackpoint_unlinked in trackpoints_unlinked:
#        #print trackpoint_old.altitude,trackpoint_old.velocity,trackpoint_old.temperature,trackpoint_old.direction,trackpoint_old.pressure
#        trackpoint = Trackpoint(
#                        id = trackpoint_unlinked.id,
#                        track_id = track.id,
#                        latitude = trackpoint_unlinked.latitude,
#                        longitude = trackpoint_unlinked.longitude,
#                        altitude = trackpoint_unlinked.altitude,
#                        velocity = trackpoint_unlinked.velocity,
#                        temperature = trackpoint_unlinked.temperature,
#                        direction = trackpoint_unlinked.direction,
#                        pressure = trackpoint_unlinked.pressure,
#                        timestamp = trackpoint_unlinked.timestamp,
#                        uuid = None
#                        )
#        DBSession.add(trackpoint)
#        if trackpoint_unlinked.location or trackpoint_unlinked.country_id:
#            location = Location(
#                    trackpoint_id = trackpoint.id,
#                    country_id = trackpoint_unlinked.country_id,
#                    name = trackpoint_unlinked.location
#                    )
#            DBSession.add(location)
#
#
##LOG
#
#    #logs_old = DBSession.query(LogOld).filter(LogOld.id==446).all()
#    logs_old = DBSession.query(LogOld).all()
#    
#    for log_old in logs_old:
#        log_query = DBSession.query(Log).filter(Log.id == log_old.id)
#        if log_query.count() < 1:
#            content = log_old.content
#            imgid_tag_list = re.findall("(\[imgid[0-9A-Za-z-]{1,}\])", content)
#            for tag in imgid_tag_list:
#                print tag
#                imgid=re.search("^\[imgid([0-9A-Za-z-]{1,})\]$",tag).group(1)
#                content=content.replace(tag,('[imgid=%s]') % imgid)
#            log = Log(
#                    id = log_old.id,
#                    infomarker = log_old.infomarker_id,
#                    topic = log_old.topic,
#                    content = content,
#                    created = log_old.createdate,
#                    etappe = 1,
#                    author = 1,
#                    uuid = None
#                )
#            DBSession.add(log)
#            DBSession.flush()
#        else:
#            log=log_query.one()
#        track = DBSession.query(Track).filter(Track.id == log.trackpoint_log_ref.track_id).one()
#        log.track.append(track)
#        
#
#IMAGE

    #images_old = DBSession.query(Imageinfo).filter(Imageinfo.log_id==446).all() 
    images_old = DBSession.query(Imageinfo).all() 
    for image_old in images_old:
        image_query = DBSession.query(Image).filter(Image.id == image_old.id)
        if image_query.count() < 1:

            name = image_old.imgname.split('/')[-1]
            location_old = image_old.imgname
            location_old_prefix = '/srv'
            #location_old_prefix = '/media/backup2/images/images_backup2/srv'
            basedir = '/srv/trackdata/bydate'
            img_large_w='990' #width of images in editor-preview
            img_medium_w='500' #width of images in editor-preview
            img_thumb_w='150' #width of images in editor-preview

            print location_old.split('/')[-5]
            if re.findall("best",location_old):
                location_old_fullsize = location_old_prefix+location_old.replace(location_old.split('/')[-2]+'/','best/')
                location_new = filetools.createdir('/srv/trackdata/bydate','christian',location_old.split('/')[-4])+'images/sorted/'
                os.popen('/bin/cp %s %s' %(location_old_fullsize, location_new))
                imagetools.resize(location_new, location_new+img_large_w+'/', name, img_large_w)
            else:
                location_old_fullsize = location_old_prefix+location_old.replace(location_old.split('/')[-2]+'/','')
                location_new = filetools.createdir('/srv/trackdata/bydate','christian',location_old.split('/')[-5])+'images/sorted/'
                os.popen('/bin/cp %s%s %s%s/' %(location_old_prefix, location_old, location_new, img_large_w))
                os.popen('/bin/cp %s %s' %(location_old_fullsize, location_new))
                
            print location_old_prefix+location_old
            print location_old_fullsize
            print location_new
            imagetools.resize(location_new, location_new+img_medium_w+'/', name, img_medium_w)
            imagetools.resize(location_new, location_new+img_thumb_w+'/', name, img_thumb_w)
            image = Image(
                    id = image_old.id,
                    name = name,
                    location = location_new,
                    title = image_old.flickrtitle,
                    comment = image_old.flickrdescription,
                    alt = None,
                    aperture = image_old.aperture,
                    shutter = image_old.shutter,
                    focal_length = image_old.focal_length,
                    iso = image_old.iso,
                    timestamp_original = image_old.flickrdatetaken,
                    hash = image_old.photohash,
                    hash_large = image_old.photohash_990, #TODO: we need the real file's hash if 990px was uploaded and not converted
                    author = 1,
                    trackpoint = image_old.trackpoint_id,
                    last_change = timetools.now(),
                    published = timetools.now(),
                    uuid = None
                    )
            DBSession.add(image)
            DBSession.flush()
            flickr = FlickrImage(
                image = image.id,
                farm = image_old.flickrfarm,
                server = image_old.flickrserver,
                photoid = image_old.flickrphotoid,
                secret = image_old.flickrsecret
            )
            DBSession.add(flickr)
            DBSession.flush()
        else:
            image = image_query.one()
        log=DBSession.query(Log).filter(Log.id == image_old.log_id).one()
        log.image.append(image)
        

    return Response('ok')
コード例 #16
0
ファイル: log.py プロジェクト: peletiah/poab_pyramid
def log_view(request):
    try:
        action=request.matchdict['action']
    except:
        action='c'
    try:
        id=int(request.matchdict['id'])
    except:
        id=0
    try:
        page_number=int(request.matchdict['page'].replace('/',''))
    except:
        page_number=None
    if id==0 and page_number==None:
        q = DBSession.query(Log).order_by(Log.published)
        log_count = q.count()
        page_fract=float(Fraction(str(log_count)+'/3'))
        #print '\n\n\n PAGE FRACT'
        #print page_fract
        if int(str(page_fract).split('.')[1])==0:
            page=int(str(page_fract).split('.')[0])-1
        else:               
            page=str(page_fract).split('.')[0]
    elif page_number==None:
        page=0
    else:
        page=page_number
    #navstring=countryDetails(model,id)
    curr_page=int(page)
    #return { 'bla': log_count}
    if action=='c' and id==0:
        #TODO do we really want to query all logs here?
        ##trackpoints = DBSession.query(Trackpoint).filter(Trackpoint.infomarker==True).all()
        ##country_id=id
        ##logs=get_logs_by_trackpoints(trackpoints)
        logs = DBSession.query(Log).order_by(Log.published).all()
    elif action=='c':
        locations = DBSession.query(Location).filter(Location.country_id==id).all()
        trackpoints = list()
        for location in locations:
            trackpoint = DBSession.query(Trackpoint).filter(Trackpoint.location_ref.contains(location)).all()
            trackpoints.append(trackpoint)
        logs = list()
        for trackpoint in trackpoints:
            #print trackpoint
            log = DBSession.query(Log).filter(Log.trackpoint_log_ref==trackpoint).all()
            logs.append(log)            
    elif action=='id': 
        logs = DBSession.query(Log).filter(Log.id==id).order_by(Log.published).all()
    page_list=list()
    pages_list=list()
    i=0
    for log in logs:
        page_list.append(log)
        i=i+1
        if i==3:
            page_list.reverse()
            pages_list.append(page_list)
            page_list=list()
            i=0
    if i<3 and i>0:
        page_list.reverse()
        pages_list.append(page_list)
    logdetaillist=list()
    for log in pages_list[curr_page]:
        twitter = False
        guid = None
        #print log.trackpoint_log_ref
        # ###query for last trackpoint
        ##q = DBSession.query(Trackpoint).filter(and_(Trackpoint.track_id==infomarker.track_id,Trackpoint.id==infomarker.id)).order_by(asc(Trackpoint.timestamp))
        ##lasttrkpt=q.first()
        # ###query if images exist for the log
        #print log.images
        if len(log.images) > 0:
            #creates the infomarker-image_icon-and-ajax-link(fancy escaping for js needed):
            gallerylink="""<span class="image_icon"><a title="Show large images related to this entry" href="/view/log/%s/0"></a></span>""" % (log.id)
        else:
            gallerylink=''
        #print log.tracks
        if len(log.tracks) > 0:
            # ###calculate duration from track-info
            total_seconds = 0
            total_distance = Decimal(0)
            for track in log.tracks:
                total_seconds = total_seconds + track.timespan.seconds
                total_distance = total_distance + Decimal(track.distance)
            total_minutes = total_seconds / 60
            mins = total_minutes % 60 #full minutes left after division by 60
            hours = total_minutes / 60
            timespan = str(hours)+'h '+str(mins)+'min'
            rounded_distance=str(total_distance.quantize(Decimal("0.01"), ROUND_HALF_UP))+'km'
            #print timespan, rounded_distance
        else:
            rounded_distance=None
            timespan=None
        # ###query for timezone and calculate localtime
        try:#TODO: fix timezone querying
            ##q = DBSession.query(Timezone).filter(Timezone.id==infomarker.timezone_id)
            q = DBSession.query(Timezone).filter(Timezone.id==8) #TODO: EEST for testing only
            timezone = q.one()
            localtime=log.created+timezone.utcoffset
        except:
            localtime=log.created
        ## ###query for country and continent #TODO
        #q = DBSession.query(Country).filter(Country.iso_numcode==infomarker.country_id)
        q = DBSession.query(Country).filter(Country.iso_numcode==792) #TODO Turkey for testing only
        country=q.one()
        q = DBSession.query(Continent).filter(Continent.id==country.continent_id)
        continent=q.one()
        p=re.compile("http://twitter.com/derreisende/statuses/(?P<guid>\d{1,})")
        if p.search(log.topic):
            guid=p.search(log.topic).group("guid")
            twitter=True
        log_content_display=log.content
        imgidtags=re.findall('\[imgid=[0-9]*\]',log_content_display)
        #print '\n\n'
        #print imgidtags
        for imgidtag in imgidtags:
                #print imgidtag
                image_id=re.search("^\[imgid=(\d{1,})\]$",imgidtag).group(1)
                print '###############################'
                print image_id
                print '###############################'
                #imageinfo_id=imgidtag[6:-1]
                q = DBSession.query(Image).filter(Image.id==image_id)
                image = q.one()
                #print image
                #print '\n\n'
                #print '\n\n'
                #print image.location
                #print '\n\n'
                #print '\n\n'
                flickr_name = 'peletiah'
                #TODO: flickr-nickname to author-name missing
                if image.author_img_ref.name == 'christian':
                    flickr_name = 'peletiah'
                elif image.author_img_ref.name == 'daniela':
                    flickr_name = 'liveones'
                #print image.id
                if image.comment:
                    inlineimage='''<div class="log_inlineimage"><div class="imagecontainer"><a href="%s%s%s" title="%s" rel="image_colorbox"><img class="inlineimage" src="%s%s%s%s" alt="%s" /></a><div class="caption">
        <span>&#8594;</span>
            <a href="http://www.flickr.com/%s/%s" target="_blank">www.flickr.com</a>
    </div></div><span class="imagedescription">%s</span></div>''' % ('/static', image.location.replace('/srv',''), image.name, image.title, '/static', image.location.replace('/srv',''), '500/', image.name, image.alt, flickr_name, image.image_flickr_ref[0].photoid, image.comment)
                else:
                    inlineimage='''<div class="log_inlineimage"><div class="imagecontainer"><a href="%s%s%s" title="%s" rel="image_colorbox" ><img class="inlineimage" src="%s%s%s%s" alt="%s" /></a><div class="caption">
        <span>&#8594;</span>
            <a href="http://www.flickr.com/%s/%s" target="_blank">www.flickr.com</a>
    </div></div></div>''' % ('/static', image.location.replace('/srv',''), image.name, image.title, '/static', image.location.replace('/srv',''), '500/', image.name, image.alt, flickr_name, image.image_flickr_ref[0].photoid) #TODO breaks when no flickr-info in db

                #flickrlink_large = 'http://farm%s.static.flickr.com/%s/%s_%s_b.jpg' % (imageinfo.flickrfarm,imageinfo.flickrserver,imageinfo.flickrphotoid,imageinfo.flickrsecret)
                ##image_large = '/static%s' % (imageinfo.imgname) #TODO from flickr or local?
                ##if imageinfo.flickrdescription==None:
                    ##inlineimage='''<div class="log_inlineimage"> <div class="imagecontainer"><a href="%s" title="%s" rel="image_colorbox"><img class="inlineimage" src="http://farm%s.static.flickr.com/%s/%s_%s.jpg" alt="%s" /></a><div class="caption">
        ##<span>&#8594;</span>
        ##    <a href="http://www.flickr.com/peletiah/%s" target="_blank">www.flickr.com</a>
   ## </div></div></div>''' % (image_large,imageinfo.flickrtitle,imageinfo.flickrfarm,imageinfo.flickrserver,imageinfo.flickrphotoid,imageinfo.flickrsecret,imageinfo.flickrtitle,imageinfo.flickrphotoid)
                ##else:
                    ##inlineimage='''<div class="log_inlineimage"><div class="imagecontainer"><a href="%s" title="%s" rel="image_colorbox" ><img class="inlineimage" src="http://farm%s.static.flickr.com/%s/%s_%s.jpg" alt="%s" /></a><div class="caption">
        ##<span>&#8594;</span>
        ##    <a href="http://www.flickr.com/peletiah/%s" target="_blank">www.flickr.com</a>
    ##</div></div><span class="imagedescription">%s</span></div>''' % (image_large,imageinfo.flickrtitle,imageinfo.flickrfarm,imageinfo.flickrserver,imageinfo.flickrphotoid,imageinfo.flickrsecret,imageinfo.flickrtitle,imageinfo.flickrphotoid,imageinfo.flickrdescription)

                log_content_display=log_content_display.replace(imgidtag,inlineimage)
        urlfinder = re.compile('^(http:\/\/\S+)')
        urlfinder2 = re.compile('\s(http:\/\/\S+)')
        def urlify_markdown(value):
            value = urlfinder.sub(r'<\1>', value)
            return urlfinder2.sub(r' <\1>', value)
        log_content_display=markdown.markdown(urlify_markdown(log_content_display))
        class Logdetails(object):
                def __init__(self, topic, twitter, guid, localtime, published, content, rounded_distance, timezone, timespan, country, continent, lasttrkpt, infomarker, log, gallerylink, author):
                    self.topic=topic
                    self.twitter=twitter
                    self.guid=guid
                    self.created=localtime.strftime('%B %d, %Y')
                    if log.published:
                        self.published=log.published.strftime('%B %d, %Y')
                    else:
                        self.published=None
                    self.content=content
                    try:
                        self.distance=rounded_distance
                    except NameError:
                        self.distance='-'
                    self.timezoneabbriv=timezone.abbreviation
                    if timespan:
                        self.timespan=timespan
                    else:
                        self.timespan=None
                    self.country=country.iso_countryname
                    self.continent=continent.name
                    self.location=log.trackpoint_log_ref.location_ref[0].name
                    self.infomarkerid=log.trackpoint_log_ref.id
                    self.id=log.id
                    self.gallerylink=gallerylink
                    self.author = log.author_log_ref
        logdetails = Logdetails(log.topic, twitter, guid, localtime, log.published, log_content_display, rounded_distance, timezone, timespan, country, continent, log.trackpoint_log_ref, log.trackpoint_log_ref, log, gallerylink, log.author) #TODO: "log.trackpoint_log_ref, log.trackpoint_log_ref" was originally "infomarker, lasttrkpt"
        logdetaillist.append(logdetails)

    return {
        'pages_list': pages_list,
        'curr_page': int(curr_page),
        'logdetaillist': logdetaillist,
        'request': request,
        'action': action,
        'id': id,
        'country': country
    }
コード例 #17
0
ファイル: feed.py プロジェクト: peletiah/poab_pyramid
def rss_view(request):
    logs = DBSession.query(Log).filter(Log.id != 508).order_by(desc(Log.created)).limit(20)
    logdetaillist=list()
    for log in logs:
        twitter = False
        guid = None
        print log.infomarker
        #q = DBSession.query(Trackpoint).filter(Trackpoint.id==log.infomarker)
        #infomarker=q.one()
        ## ###query for last trackpoint
        #q = DBSession.query(Trackpoint).filter(and_(Trackpoint.track_id==infomarker.track_id,Trackpoint.id==infomarker.id)).order_by(asc(Trackpoint.timestamp))
        #lasttrkpt=q.first()
        #q = DBSession.query(Track).filter(Track.id==infomarker.track_id)
        #if q.count() == 1:
        #    track=q.one()
        #    # ###calculate duration from track-info
        #    total_mins = track.timespan.seconds / 60
        #    mins = total_mins % 60
        #    hours = total_mins / 60
        #    timespan = str(hours)+'h '+str(mins)+'min'
        #    rounded_distance=str(track.distance.quantize(Decimal("0.01"), ROUND_HALF_UP))+'km'
        if len(log.tracks) > 0:
            # ###calculate duration from track-info
            total_seconds = 0
            total_distance = Decimal(0)
            for track in log.tracks:
                total_seconds = total_seconds + track.timespan.seconds
                total_distance = total_distance + Decimal(track.distance)
            total_minutes = total_seconds / 60
            mins = total_minutes % 60 #full minutes left after division by 60
            hours = total_minutes / 60
            timespan = str(hours)+'h '+str(mins)+'min'
            rounded_distance=str(total_distance.quantize(Decimal("0.01"), ROUND_HALF_UP))+'km'
            print timespan, rounded_distance
        else:
            rounded_distance=None
            timespan=None
        # ###query for timezone and calculate localtime
        try:
            q = DBSession.query(Timezone).filter(Timezone.id==8)
            timezone = q.one()
            localtime=log.created+timezone.utcoffset
        except:
            localtime=log.created
        # ###query for country and continent
        q = DBSession.query(Country).filter(Country.iso_numcode==792)
        country=q.one()
        q = DBSession.query(Continent).filter(Continent.id==country.continent_id)
        continent=q.one()
        p=re.compile("http://twitter.com/derreisende/statuses/(?P<guid>\d{1,})")
        if p.search(log.topic):
            guid=p.search(log.topic).group("guid")
            twitter=True
        log_content_display=log.content
        imgidtags=re.findall('\[imgid=[0-9]*\]',log_content_display)
        print '\n\n'
        print '\n\n'
        print imgidtags
        print log.id
        for imgidtag in imgidtags:
                #imageinfo_id=imgidtag[6:-1]
               # print imageinfo_id
               # q = DBSession.query(Imageinfo).filter(Imageinfo.id==imageinfo_id)
               # imageinfo = q.one()
                image_id=re.search("^\[imgid=(\d{1,})\]$",imgidtag).group(1)
                print image_id
                #imageinfo_id=imgidtag[6:-1]
                q = DBSession.query(Image).filter(Image.id==image_id)
                image = q.one()
                #flickrlink_large = 'http://farm%s.static.flickr.com/%s/%s_%s_b.jpg' % (imageinfo.flickrfarm,imageinfo.flickrserver,imageinfo.flickrphotoid,imageinfo.flickrsecret)
                if image.comment:
                    inlineimage='''<div class="log_inlineimage"><div class="imagecontainer"><a href="%s%s%s" title="%s" rel="image_colorbox"><img class="inlineimage" src="%s%s%s%s" alt="%s" /></a><div class="caption">
        <span>&#8594;</span>
            <a href="http://www.flickr.com/peletiah/%s" target="_blank">www.flickr.com</a>
    </div></div><span class="imagedescription">%s</span></div>''' % ('/static', image.location.replace('/srv',''), image.name, image.title, '/static', image.location.replace('/srv',''), '500/', image.name, image.alt, image.image_flickr_ref[0].photoid, image.comment)
                else:
                    inlineimage='''<div class="log_inlineimage"><div class="imagecontainer"><a href="%s%s%s" title="%s" rel="image_colorbox" ><img class="inlineimage" src="%s%s%s%s" alt="%s" /></a><div class="caption">
        <span>&#8594;</span>
            <a href="http://www.flickr.com/peletiah/%s" target="_blank">www.flickr.com</a>
    </div></div></div>''' % ('/static', image.location.replace('/srv',''), image.name, image.title, '/static', image.location.replace('/srv',''), '500/', image.name, image.alt, image.image_flickr_ref[0].photoid) #TODO breaks when no flickr-info in db
                log_content_display=log_content_display.replace(imgidtag,inlineimage)
        urlfinder = re.compile('^(http:\/\/\S+)')
        urlfinder2 = re.compile('\s(http:\/\/\S+)')
        def urlify_markdown(value):
            value = urlfinder.sub(r'<\1>', value)
            return urlfinder2.sub(r' <\1>', value)
        log_content_display=markdown.markdown(urlify_markdown(log_content_display))
        class Logdetails(object):
                def __init__(self, topic, twitter, guid, localtime, content, rounded_distance, timezone, timespan, country, continent, lasttrkpt, infomarker, log):
                    self.topic=topic
                    self.twitter=twitter
                    self.guid=guid
                    self.created=localtime.strftime('%Y-%m-%dT%H:%M:%SZ%Z')
                    self.content=content
                    try:
                        self.distance=rounded_distance
                    except NameError:
                        self.distance='-'
                    self.timezoneabbriv=timezone.abbreviation
                    if timespan:
                        self.timespan=timespan
                    else:
                        self.timespan=None
                    self.country=country.iso_countryname
                    self.continent=continent.name
                    self.location=lasttrkpt.location_ref[0].name
                    self.infomarkerid=log.trackpoint_log_ref.id
                    self.id=log.id
        logdetails = Logdetails(log.topic, twitter, guid, localtime, log_content_display, rounded_distance, timezone, timespan, country, continent, log.trackpoint_log_ref, log.trackpoint_log_ref, log)
        logdetaillist.append(logdetails)
    
    request.response.content_type = "application/atom+xml"
    return {
        'logdetaillist': logdetaillist
    }
コード例 #18
0
    def init_completer(self):
        import rlcompleter
        self.completer = rlcompleter.Completer(namespace=self.locals)
        readline.set_completer(self.completer.complete)
        if 'libedit' in readline.__doc__:
            readline.parse_and_bind("bind ^I rl_complete")
        else:
            readline.parse_and_bind("tab: complete")

    def init_history(self, histfile):
        if hasattr(readline, "read_history_file"):
            try:
                readline.read_history_file(histfile)
            except IOError:
                pass
            atexit.register(self.save_history, histfile)

    def save_history(self, histfile):
        readline.write_history_file(histfile)


env = bootstrap('../development.ini')
engine = engine_from_config(env['registry'].settings, 'sqlalchemy.')
DBSession.configure(bind=engine)
ic = SQLAlchemyShell()
cmd = "from poab.models import *"
print ">>>", cmd
ic.push(cmd)
ic.interact(banner="Use quit() or Ctrl-D (i.e. EOF) to exit")
コード例 #19
0
ファイル: view.py プロジェクト: peletiah/poab_pyramid
def view_view(request):
    try:
        action=request.matchdict['action']
    except:
        action='c'
    try:
        id=int(request.matchdict['id'])
    except:
        id=0
    try:
        page_number=int(request.matchdict['page'].replace('/',''))
    except:
        page_number=None
    if id==0 and page_number==None:
        q = DBSession.query(Image).order_by(Image.timestamp_original)
        image_count=q.count()
        page_fract=float(Fraction(str(image_count)+'/10'))
        if int(str(page_fract).split('.')[1])==0:
            page=int(str(page_fract).split('.')[0])-1
        else:               
            page=str(page_fract).split('.')[0]
    elif page_number==None:
        page=0
    else:
        page=page_number
    #navstring=countryDetails(model,id)
    curr_page=int(page)
    #return { 'bla': log_count}
    if id==0:
        ##TODO what was the idea behind "country_id!=None"?
        ##q = DBSession.query(Trackpoint).filter(Trackpoint.country_id!=None)
        #q = DBSession.query(Trackpoint)
        #images=fetch_images_for_trackpoints(q)
        images=Image.get_images()
        #print '\n\n\n\n\n'
        #print images
        #print '\n\n\n\n\n'
    elif action=='c':
        #q = DBSession.query(Trackpoint).filter(and_(Trackpoint.country_id==id))
        #images=fetch_images_for_trackpoints(q)
        images=Image.get_images()
    elif action=='log':
        #q = DBSession.query(Trackpoint).filter(and_(Trackpoint.id==id))
        #images=fetch_images_for_trackpoints(q)
        log = DBSession.query(Log).filter(Log.id==id).one()
        images = DBSession.query(Image).filter(Image.logs.contains(log)).order_by(Image.timestamp_original).all()
    elif action=='id':
        images = DBSession.query(Image).filter(Image.id==id).order_by(Image.timestamp_original).all()
    page_list=list()
    pages_list=list()
    i=0
    for image in images:
        page_list.append(image)
        i=i+1
        if i==10:
            page_list.reverse()
            pages_list.append(page_list)
            page_list=list()
            i=0
    if i<10 and i>0:
        page_list.reverse()
        pages_list.append(page_list)
    viewlist=list()
    #print page_list
    #print pages_list
    #print curr_page
    #print pages_list[curr_page]
    for image in pages_list[curr_page]:
        if image.trackpoint:
            trackpoint_id=image.trackpoint
        else:
            trackpoint_id=3572 #TODO
            prefix='near '
        q = DBSession.query(Trackpoint).filter(Trackpoint.id==trackpoint_id)
        try:
            trackpointinfo=q.one()
            #print '\n\n\n\n'
            #print trackpointinfo.location_ref[0].name
        except:
            trackpointinfo = Trackpoint(
                                    track_id = None,
                                    latitude = None,
                                    longitude = None,
                                    altitude = None,
                                    velocity = None,
                                    temperature = None,
                                    direction = None,
                                    pressure = None,
                                    timestamp = None,
                                    uuid = None
                                    )
        #print image.location.replace('/srv','')
        #print '\n\n\n\n'
        ##TODO: fix timezone
        ##q = DBSession.query(Timezone).filter(Timezone.id==trackpointinfo.timezone_id)
        q = DBSession.query(Timezone).filter(Timezone.id==8)
        timezone = q.one()
        localtime = image.timestamp_original+timezone.utcoffset
        deltaseconds=round(timezone.utcoffset.days*86400+timezone.utcoffset.seconds)
        #TODO THIS SUCKS!
        class Viewdetail(object):
            def __init__(self, image, photoid, name, location, title, comment, alt, aperture, shutter, focal_length, iso, trackpointinfo, localtime, timezone, utcoffset, log, author):
                self.image = image
                self.photoid=photoid
                self.name=name
                self.location=location
                self.title=title
                self.comment=comment
                self.alt=alt
                self.aperture= image.aperture
                self.shutter= image.shutter
                self.focal_length= image.focal_length
                self.iso= image.iso
                #logdate=c.loginfo.created.strftime('%Y-%m-%d') #needed for the imagepath
                self.trackpointinfo=trackpointinfo
                self.localtime=localtime
                self.timezone=timezone
                #calculate the offset in seconds
                self.utcoffset=utcoffset
                self.log = log
                self.author = image.author_img_ref
        viewdetail = Viewdetail(image, image.id, image.name, image.location.replace('/srv',''), image.title, image.comment, image.alt, image.aperture, image.shutter, image.focal_length, image.iso, trackpointinfo, localtime.strftime('%Y-%m-%d %H:%M:%S'), timezone, timediff(deltaseconds), image.log, image.author)
        viewlist.append(viewdetail)

    return {
        'pages_list': pages_list,
        'curr_page': int(curr_page),
        'viewlist': viewlist,
        'request': request,
        'action': action,
        'id': id,
    }