Ejemplo n.º 1
0
def editor(request):
    owner = authenticated_userid(request)
    author = Author.get_author(owner)
    if request.matchdict:
        log_id = request.matchdict['logid']
        log = Log.get_log_by_id(log_id)
        images_json = json.dumps([i.reprJSON() for i in log.image],cls=ComplexEncoder) #TODO: Order by timestamp_original
        tracks_json = json.dumps([i.reprJSON() for i in log.track],cls=ComplexEncoder)
        log_json = json.dumps(log.reprJSON(),cls=ComplexEncoder)
        if not log.etappe: #log.etappe might still be empty, so we create an empty array for AngularJS
            etappe_json = json.dumps(dict(id=None, start_date = None, end_date = None, name = None))
        else:
            etappe_json = json.dumps(log.etappe_ref.reprJSON(),cls=ComplexEncoder)
    else:
        #no existing record, so we send empty objects to the template
        images_json = json.dumps([dict(id=None, name=None, location=None, title=None, \
                        alt=None, comment=None, hash=None, author=None, \
                        last_change=None, published=None)])
        etappe_json = json.dumps(dict(id=None, start_date = None, end_date = None, name = None))
        tracks_json = json.dumps([dict(id=None, reduced_trackpoints = list(), distance=None, \
                        timespan=None, trackpoint_count=None, start_time = None, end_time = None, \
                        color=None, author=None, uuid=None, published=None)])
        log_json = json.dumps(dict(id=None,topic=None, content=None, author=None, created=None, \
                        last_change=None, published=None))
    etappe_datestr = Etappe.get_etappe_dropdown_list(5)
    etappe_datestr_json = json.dumps([i.reprJSON() for i in etappe_datestr])
    return {'images': images_json, 'etappe_datestr_json': etappe_datestr_json, 'etappe' : etappe_json, 'tracks' : tracks_json, 'log': log_json, 'author': author}
Ejemplo n.º 2
0
def trackupload(request):
    filelist = request.POST.getall('uploadedFile')
    upload = request.POST.get('upload')
    print request.POST.get('upload')
    print request.POST.keys()
    print filelist
 
    owner = authenticated_userid(request)
    author = Author.get_author(owner)

    today=strftime("%Y-%m-%d")
    
    basedir = '/srv/trackdata/bydate'
    filedir = filetools.createdir(basedir, author.name, today)
    trackdir = filedir+'trackfile/'
    tracks_in_db = list()

    for file in filelist:
        if upload: #only save files when upload-checkbox has been ticked
            filehash = filetools.safe_file_local(trackdir, file)
            print '\n'
            print file.filename
            print '\n'

        parsed_tracks = gpxtools.parse_gpx(trackdir+file.filename)

        for track_details in parsed_tracks:
            track = add_track_to_db( track_details, author )
            if track:
                add_trackpoints_to_db( track_details['trackpoints'], track )
                tracks_in_db.append(track)

    return Response(json.dumps({'tracks':tracks_in_db},cls=ComplexEncoder))
Ejemplo n.º 3
0
def authors_view(request):
    owner = authenticated_userid(request)
    author = Author.get_author(owner)

    authors = DBSession.query(Author).all()
    return {
        'authors': sorted([a.name for a in authors]), 
        'author': author
    }
Ejemplo n.º 4
0
def imageupload(request):
    filelist = request.POST.getall('uploadedFile')
    upload = request.POST.get('upload')
    print request.POST.get('upload')
    print request.POST.keys()
    print filelist
    
    owner = authenticated_userid(request)
    author = Author.get_author(owner)
    images_in_db = Image.get_images()
    today=strftime("%Y-%m-%d")
    
    basedir = '/srv/trackdata/bydate'
    img_large_w='990' #width of images in editor-preview
    img_medium_w='500' #width of images in editor-preview
    img_thumb_w='150' #width of images in editor-preview
    filedir = filetools.createdir(basedir, author.name, today) #TODO: 990-dir is created, img_large_w is ignored
    imgdir = filedir+'images/sorted/'
    images=list()

    for file in filelist:
        print '\n'
        print file.filename
        print '\n'
        filehash = hashlib.sha256(file.value).hexdigest()

        if not filetools.file_exists(images_in_db, filehash): #TODO: Uhm, wouldn't a simple db-query for the hash work too???
            if upload: #only save files when upload-checkbox has been ticked
                filehash = filetools.safe_file_local(imgdir, file)
                imagetools.resize(imgdir, imgdir+img_large_w+'/', file.filename, img_large_w)
                imagetools.resize(imgdir, imgdir+img_medium_w+'/', file.filename, img_medium_w)
                imagetools.resize(imgdir, imgdir+img_thumb_w+'/', file.filename, img_thumb_w)
            image = Image(name=file.filename, location=imgdir, title=None, comment=None, alt=None, \
                        aperture=None, shutter=None, focal_length=None, iso=None, timestamp_original=None, \
                        hash=filehash, hash_large=None, author=author.id, trackpoint=None, last_change=timetools.now(), \
                        published=None, uuid=str(uuid.uuid4()))
            image.aperture, image.shutter, image.focal_length, image.iso, image.timestamp_original = imagetools.get_exif(image)
            image.timestamp_original = image.timestamp_original#-timedelta(seconds=7200) #TODO
            trackpoint=gpxtools.sync_image_trackpoint(image)
            if trackpoint:
                image.trackpoint = trackpoint.id
            DBSession.add(image)
            DBSession.flush()
            image_json = image.reprJSON()
            images.append(image_json)
            print images
        else:
            image = Image.get_image_by_hash(filehash)
            image_json = image.reprJSON()
            images.append(image_json)
    #url = request.route_url('editor')
    #return HTTPFound(location=url)
    return Response(json.dumps({'images':images},cls=ComplexEncoder))
Ejemplo n.º 5
0
def save_log(request):
    owner = authenticated_userid(request)
    author = Author.get_author(owner)
    log_json = request.json_body
    log_id = log_json['id']
    topic=log_json['topic']
    content=log_json['content']
    images = log_json['images']
    tracks = log_json['tracks']
    etappe = log_json['etappe']

    today=strftime("%Y-%m-%d")
    
    basedir = '/srv/trackdata/bydate'
    filedir = filetools.createdir(basedir, author.name, today) #TODO: 990-dir is created, img_large_w is ignored
    
    start_date = etappe['start_date']
    end_date = datetime.datetime.strptime(etappe['end_date'],'%Y-%m-%d') #unicode to datetime
    end_date = end_date+datetime.timedelta(days=1)-datetime.timedelta(seconds=1) #we need 23:59:59 this day, not 00:00:00
    name=etappe['name']

    if etappe['id']:
        print 'etappe-id:'+ str(etappe['id'])
        etappe = Etappe.get_etappe_by_id(etappe['id'])
        etappe.start_date = start_date
        etappe.end_date = end_date
        etappe.name = name
    else:
        etappe = Etappe(start_date=start_date, end_date=end_date, name=name)
    DBSession.add(etappe)
    DBSession.flush()
 
    if log_id:
        log = Log.get_log_by_id(log_id)
        log.topic = topic
        log.content = content
        log.last_change = timetools.now()
        log.etappe = etappe.id
    else:
        #log_id is None, so this is a new post
        log = Log(topic=topic, content=content, author=author.id, etappe=etappe.id, created=timetools.now(), uuid=str(uuid.uuid4()))
    DBSession.add(log)
    DBSession.flush()
    print 'logid='+str(log.id)
    for image in images:
        try:
            if image['id']:
                print 'imageid:'+ str(image['id'])
                image = Image.get_image_by_id(image['id'])
                log.image.append(image)
        except Exception, e:
            print e
            print 'ERROR while saving log'
Ejemplo n.º 6
0
def overview(request):
    owner = authenticated_userid(request)
    author = Author.get_author(owner)
    if not author:
        loc = request.route_url('login')
        return HTTPFound(location=loc)
    if author.name == 'admin': #TODO
        logs = Log.get_logs()
    else:
        logs = Log.get_logs_by_author(author.id)
    if not logs:
        url = request.route_url('editor')
        return HTTPFound(location=url)
    logs_json = json.dumps([i.reprJSON() for i in logs],cls=ComplexEncoder)
    return {'logs': logs_json, 'author': author}
Ejemplo n.º 7
0
def login_view(request):
    next = request.params.get('next') or request.route_url('overview')
    name = ''
    did_fail = False
    authors = DBSession.query(Author).all()
    if 'submit' in request.POST:
        name = request.POST.get('name', '')
        password = request.POST.get('password', '')
        author = Author.get_author(name)
        if author and author.validate_password(password):
            headers = remember(request, name)
            return HTTPFound(location=next, headers=headers)
        did_fail = True

    return {
        'name': name,
        'next': next,
        'failed_attempt': did_fail,
        'authors': authors,
        'request': request
    }