Esempio n. 1
0
def editor(request):
    owner = authenticated_userid(request)
    author = Author.get_author(owner)
    if request.matchdict:
        log_id = request.matchdict['logid']
        log = Log.get_log_by_id(log_id)
        images_json = json.dumps([i.reprJSON() for i in log.image],cls=ComplexEncoder) #TODO: Order by timestamp_original
        tracks_json = json.dumps([i.reprJSON() for i in log.track],cls=ComplexEncoder)
        log_json = json.dumps(log.reprJSON(),cls=ComplexEncoder)
        if not log.etappe: #log.etappe might still be empty, so we create an empty array for AngularJS
            etappe_json = json.dumps(dict(id=None, start_date = None, end_date = None, name = None))
        else:
            etappe_json = json.dumps(log.etappe_ref.reprJSON(),cls=ComplexEncoder)
    else:
        #no existing record, so we send empty objects to the template
        images_json = json.dumps([dict(id=None, name=None, location=None, title=None, \
                        alt=None, comment=None, hash=None, author=None, \
                        last_change=None, published=None)])
        etappe_json = json.dumps(dict(id=None, start_date = None, end_date = None, name = None))
        tracks_json = json.dumps([dict(id=None, reduced_trackpoints = list(), distance=None, \
                        timespan=None, trackpoint_count=None, start_time = None, end_time = None, \
                        color=None, author=None, uuid=None, published=None)])
        log_json = json.dumps(dict(id=None,topic=None, content=None, author=None, created=None, \
                        last_change=None, published=None))
    etappe_datestr = Etappe.get_etappe_dropdown_list(5)
    etappe_datestr_json = json.dumps([i.reprJSON() for i in etappe_datestr])
    return {'images': images_json, 'etappe_datestr_json': etappe_datestr_json, 'etappe' : etappe_json, 'tracks' : tracks_json, 'log': log_json, 'author': author}
Esempio n. 2
0
def tracksync(request):
    #print request.json_body
    track = Track.get_track_by_id(request.json_body['track']['id'])
    log_json = request.json_body['log']
    log = Log.get_log_by_id(log_json['id'])

    print track.reprJSON_extended()['author']
    
    if interlink_only:
        return Response(json.dumps({'log_id':log.id, 'type':'track', 'item_uuid':track.uuid, 'sync_status':'was_synced'}))
    else:
        headers = {'content-type':'application/json'}
        url = 'http://poab.org:6544/sync?type=status'
        payload = {'payloadtype':'track', 'track_json':json.dumps(track.reprJSON()), 'log_json':json.dumps(log.reprJSON())}
        remote_sync_info = requests.post(url, data=payload)
        print remote_sync_info.text
        sync_status=json.loads(remote_sync_info.text)['sync_status'] #if sync_status is 'was_synced', we already have this track on the server
        print '\n################ TRACK SYNC STATUS: '+sync_status+str(log.id) + '\n'
        #TODO: this prevents half uploaded trackpoints from beeing finished!!!!

        if sync_status == 'not_synced':
            headers = {'content-type':'application/json'}
            url = 'http://poab.org:6544/sync?type=track'
            payload = {'track':json.dumps(track.reprJSON_extended()), 'log_json':json.dumps(log.reprJSON())}
        
            remote_sync_info = requests.post(url, data=payload)
        
        return Response(remote_sync_info.text)
Esempio n. 3
0
def imagesync(request):
    image = Image.get_image_by_id(request.json_body['image']['id'])
    log_json = request.json_body['log']
    log = Log.get_log_by_id(log_json['id'])

    if interlink_only:
        return Response(json.dumps({'log_id':log.id,'type':'image',  'item_uuid':image.uuid, 'sync_status':'was_synced'}))
    else:
        if not image.trackpoint: #find trackpoint for image if there was none yet
            trackpoint=gpxtools.sync_image_trackpoint(image)
            if trackpoint:
                image.trackpoint = trackpoint.id
                DBSession.add(image)
                DBSession.flush()
        headers = {'content-type':'application/json'}
        url = 'http://poab.org:6544/sync?type=status'
        payload = {'payloadtype':'image', 'image_json':json.dumps(image.reprJSON()), 'log_json':json.dumps(log.reprJSON())}
        remote_sync_info = requests.post(url, data=payload)
        print remote_sync_info.text
        sync_status=json.loads(remote_sync_info.text)['sync_status'] #if sync_status is 'was_synced', we already have this image on the server
        print '\n################ IMAGE SYNC STATUS: '+sync_status+str(log.id) + '\n'

        if sync_status == 'not_synced':
            image_bin = open(image.location+image.name, 'rb')
            #image_bin = ''
            url = 'http://poab.org:6544/sync?type=image'
            payload = {'image_json':json.dumps(image.reprJSON_extended()), 'image_bin':image_bin, 'log':json.dumps(log.reprJSON())}
            headers = {'content-type':'application/json'}
            remote_sync_info = requests.post(url, files=payload)

        return Response(remote_sync_info.text)
Esempio n. 4
0
def delete_log(request):
    log_json = request.json_body
    log_id = log_json['id']
    log = Log.get_log_by_id(log_id)
    print log.id
    DBSession.delete(log)
    DBSession.flush()
    return Response(log.topic)
Esempio n. 5
0
def save_log(request):
    owner = authenticated_userid(request)
    author = Author.get_author(owner)
    log_json = request.json_body
    log_id = log_json['id']
    topic=log_json['topic']
    content=log_json['content']
    images = log_json['images']
    tracks = log_json['tracks']
    etappe = log_json['etappe']

    today=strftime("%Y-%m-%d")
    
    basedir = '/srv/trackdata/bydate'
    filedir = filetools.createdir(basedir, author.name, today) #TODO: 990-dir is created, img_large_w is ignored
    
    start_date = etappe['start_date']
    end_date = datetime.datetime.strptime(etappe['end_date'],'%Y-%m-%d') #unicode to datetime
    end_date = end_date+datetime.timedelta(days=1)-datetime.timedelta(seconds=1) #we need 23:59:59 this day, not 00:00:00
    name=etappe['name']

    if etappe['id']:
        print 'etappe-id:'+ str(etappe['id'])
        etappe = Etappe.get_etappe_by_id(etappe['id'])
        etappe.start_date = start_date
        etappe.end_date = end_date
        etappe.name = name
    else:
        etappe = Etappe(start_date=start_date, end_date=end_date, name=name)
    DBSession.add(etappe)
    DBSession.flush()
 
    if log_id:
        log = Log.get_log_by_id(log_id)
        log.topic = topic
        log.content = content
        log.last_change = timetools.now()
        log.etappe = etappe.id
    else:
        #log_id is None, so this is a new post
        log = Log(topic=topic, content=content, author=author.id, etappe=etappe.id, created=timetools.now(), uuid=str(uuid.uuid4()))
    DBSession.add(log)
    DBSession.flush()
    print 'logid='+str(log.id)
    for image in images:
        try:
            if image['id']:
                print 'imageid:'+ str(image['id'])
                image = Image.get_image_by_id(image['id'])
                log.image.append(image)
        except Exception, e:
            print e
            print 'ERROR while saving log'
Esempio n. 6
0
def logsync(request):
    log = Log.get_log_by_id(request.json_body['log']['id'])
    print log.id
    print log.reprJSON_extended()
    if interlink_only:
        return Response(json.dumps({'log_id':log.id, 'type':'log', 'item_uuid':log.uuid, 'sync_status':'was_synced'}))
    else:
        url = 'http://poab.org:6544/sync?type=log'
        payload = {'log_json':json.dumps(log.reprJSON_extended())}
        headers = {'content-type':'application/json'}
        remote_sync_info = requests.post(url, files=payload)
        sync_status=json.loads(remote_sync_info.text)['sync_status'] #if sync_status is True, we already have this image on the server
        if sync_status == 'is_synced':
            log.published = timetools.now()
            DBSession.add(log)
        return Response(remote_sync_info.text)
Esempio n. 7
0
def preview(request):
    if request.query_string:
        log_id = request.GET.get('logid')
        log = Log.get_log_by_id(log_id)
        imgid_list = re.findall("(\[imgid\d{1,}\])", log.content)
        preview = log.content
        img_large_w='500'
        for imgid in imgid_list:
            print imgid
            id = re.search("^\[imgid(\d{1,})\]$",imgid).group(1) #gets the id in [imgid123]
            image = Image.get_image_by_id(id)
            if image:
                if image.comment:
                    preview=preview.replace(imgid,'''<div class="log_inlineimage"><div class="imagecontainer"><img class="inlineimage" src="/static%s%s/%s" alt="%s"></div>
                                                     <span class="imagedescription">%s</span></div>''' % (image.location, img_large_w, image.name, image.alt, image.comment))
                else:
                   preview = preview.replace(imgid,'''<div class="log_inlineimage"> <div class="imagecontainer"><img class="inlineimage" src="/static%s%s/%s" alt="%s">
                                                      </div></div>''' % (image.location, img_large_w, image.name, image.alt))
                    #preview = preview.replace(imgid,'<img src="static'+image.location+'preview/'+image.name+'">')
    return {'log': log, 'preview': preview}