def imagesync(request): image = Image.get_image_by_id(request.json_body['image']['id']) log_json = request.json_body['log'] log = Log.get_log_by_id(log_json['id']) if interlink_only: return Response(json.dumps({'log_id':log.id,'type':'image', 'item_uuid':image.uuid, 'sync_status':'was_synced'})) else: if not image.trackpoint: #find trackpoint for image if there was none yet trackpoint=gpxtools.sync_image_trackpoint(image) if trackpoint: image.trackpoint = trackpoint.id DBSession.add(image) DBSession.flush() headers = {'content-type':'application/json'} url = 'http://poab.org:6544/sync?type=status' payload = {'payloadtype':'image', 'image_json':json.dumps(image.reprJSON()), 'log_json':json.dumps(log.reprJSON())} remote_sync_info = requests.post(url, data=payload) print remote_sync_info.text sync_status=json.loads(remote_sync_info.text)['sync_status'] #if sync_status is 'was_synced', we already have this image on the server print '\n################ IMAGE SYNC STATUS: '+sync_status+str(log.id) + '\n' if sync_status == 'not_synced': image_bin = open(image.location+image.name, 'rb') #image_bin = '' url = 'http://poab.org:6544/sync?type=image' payload = {'image_json':json.dumps(image.reprJSON_extended()), 'image_bin':image_bin, 'log':json.dumps(log.reprJSON())} headers = {'content-type':'application/json'} remote_sync_info = requests.post(url, files=payload) return Response(remote_sync_info.text)
def imageupload(request): filelist = request.POST.getall('uploadedFile') upload = request.POST.get('upload') print request.POST.get('upload') print request.POST.keys() print filelist owner = authenticated_userid(request) author = Author.get_author(owner) images_in_db = Image.get_images() today=strftime("%Y-%m-%d") basedir = '/srv/trackdata/bydate' img_large_w='990' #width of images in editor-preview img_medium_w='500' #width of images in editor-preview img_thumb_w='150' #width of images in editor-preview filedir = filetools.createdir(basedir, author.name, today) #TODO: 990-dir is created, img_large_w is ignored imgdir = filedir+'images/sorted/' images=list() for file in filelist: print '\n' print file.filename print '\n' filehash = hashlib.sha256(file.value).hexdigest() if not filetools.file_exists(images_in_db, filehash): #TODO: Uhm, wouldn't a simple db-query for the hash work too??? if upload: #only save files when upload-checkbox has been ticked filehash = filetools.safe_file_local(imgdir, file) imagetools.resize(imgdir, imgdir+img_large_w+'/', file.filename, img_large_w) imagetools.resize(imgdir, imgdir+img_medium_w+'/', file.filename, img_medium_w) imagetools.resize(imgdir, imgdir+img_thumb_w+'/', file.filename, img_thumb_w) image = Image(name=file.filename, location=imgdir, title=None, comment=None, alt=None, \ aperture=None, shutter=None, focal_length=None, iso=None, timestamp_original=None, \ hash=filehash, hash_large=None, author=author.id, trackpoint=None, last_change=timetools.now(), \ published=None, uuid=str(uuid.uuid4())) image.aperture, image.shutter, image.focal_length, image.iso, image.timestamp_original = imagetools.get_exif(image) image.timestamp_original = image.timestamp_original#-timedelta(seconds=7200) #TODO trackpoint=gpxtools.sync_image_trackpoint(image) if trackpoint: image.trackpoint = trackpoint.id DBSession.add(image) DBSession.flush() image_json = image.reprJSON() images.append(image_json) print images else: image = Image.get_image_by_hash(filehash) image_json = image.reprJSON() images.append(image_json) #url = request.route_url('editor') #return HTTPFound(location=url) return Response(json.dumps({'images':images},cls=ComplexEncoder))
def image_location(request): updated_images=list() images = Image.get_images() for image in images: if not image.trackpoint: print image.id trackpoint=gpxtools.sync_image_trackpoint(image) print trackpoint if trackpoint: image.trackpoint = trackpoint.id DBSession.add(image) DBSession.flush() updated_images.append(image.id) print updated_images return Response(str(updated_images))