def imagesync(request): sync_status='sync_error' print request.POST.keys() image_json = request.POST.get('image_json') log_json = json.loads(request.POST.get('log').value) image_bin = request.POST.get('image_bin') image_json = json.loads(image_json.value) author = Author.get_author(image_json['author']['name']) image = Image.get_image_by_uuid(image_json['uuid']) #does image with this uuid(from json-info) already exist in our db if not image: basedir = '/srv/trackdata/bydate' img_prvw_w='500' img_large_w='990' created=datetime.strptime(log_json['created'], "%Y-%m-%d %H:%M:%S") #we use the timestamp from log_json['created'] for the image-location datepath=created.strftime("%Y-%m-%d") filedir = filetools.createdir(basedir, author.name, datepath) imgdir = filedir+'images/sorted/' filehash = filetools.safe_file(imgdir, image_json['name'], image_bin.value) imagetools.resize(imgdir, imgdir+img_prvw_w+'/', image_json['name'], img_prvw_w) imagetools.resize(imgdir, imgdir+img_large_w+'/',image_json['name'] , img_large_w) #TODO: what happens when a 990px-wide img was uploaded? hash_large=hashlib.sha256(open(imgdir+img_large_w+'/'+image_json['name'], 'rb').read()).hexdigest() #TODO filehash=hashlib.sha256(open(imgdir+'/'+image_json['name'], 'rb').read()).hexdigest() #TODO image = Image( name = image_json['name'], location = imgdir, title = image_json['title'], comment = image_json['comment'], alt = image_json['alt'], aperture = image_json['aperture'], shutter = image_json['shutter'], focal_length = image_json['focal_length'], iso = image_json['iso'], timestamp_original = image_json['timestamp_original'], hash = filehash, hash_large = hash_large, #TODO: we need the real file's hash if 990px was uploaded and not converted author = author.id, trackpoint = None, last_change = timetools.now(), published = timetools.now(), uuid = image_json['uuid'] ) DBSession.add(image) DBSession.flush() sync_status = 'is_synced' else: #TODO: So our image is actually in the db - why has this been found earlier in sync?type=status??? print 'ERROR: Image found in DB, but this should have happened in /sync?type=status' sync_status='sync_error' return Response(json.dumps({'log_id' : log_json['id'], 'type':'image', 'item_uuid':image_json['uuid'], 'sync_status':sync_status})) #Something went very wrong
def __init__(self, start_date=timetools.now(), end_date=timetools.now(), name=None, uuid=str(uuidlib.uuid4())): self.start_date = start_date self.end_date = end_date self.name = name self.uuid = uuid
def gpxprocess(request): class trkpt: def __init__(self, latitude, longitude): self.latitude = latitude self.longitude = longitude trkptlist=list() gpx_ns = "http://www.topografix.com/GPX/1/1" filename = request.POST['gpx'].filename input_file = request.POST['gpx'].file root = etree.parse(input_file).getroot() trackSegments = root.getiterator("{%s}trkseg"%gpx_ns) for trackSegment in trackSegments: for trackPoint in trackSegment: lat=trackPoint.attrib['lat'] lon=trackPoint.attrib['lon'] new_trkpt=trkpt(lat,lon) trkptlist.append(new_trkpt) reduced_trkpts=reduce_trackpoints(trkptlist) json_string=create_json_for_db(reduced_trkpts) track=Track(timetools.now(),len(trkptlist),0,'00:00:00',None,None,None,None,None,None,None,json_string) DBSession.add(track) DBSession.flush() #raise HTTPFound(request.route_url('track','fromgpx',track.id)) route=request.route_url('track','fromgpx',track.id) response = Response('<a href="%s">%s</a>' % (route,route)) response.content_type = 'text/html' return(response)
class Log(Base): __tablename__ = 'log' id = Column("id", Integer, primary_key=True, autoincrement=True) infomarker = Column("infomarker", types.Integer, ForeignKey('trackpoint.id')) topic = Column("topic", types.UnicodeText) content = Column("content", types.UnicodeText) author = Column( Integer, ForeignKey('author.id', onupdate="CASCADE", ondelete="CASCADE")) etappe = Column( Integer, ForeignKey('etappe.id', onupdate="CASCADE", ondelete="CASCADE")) created = Column("created", types.TIMESTAMP(timezone=False), default=timetools.now()) published = Column("published", types.TIMESTAMP(timezone=False), default=timetools.now()) uuid = Column("uuid", postgresql.UUID, unique=True) image = relationship('Image', secondary=log_image_table, backref='logs') track = relationship('Track', secondary=log_track_table, backref='logs') def __init__(self, infomarker, topic, content, author, etappe, created, published, uuid): self.infomarker = infomarker self.topic = topic self.content = content self.author = author self.etappe = etappe self.created = created self.published = published self.uuid = uuid @classmethod def get_log_by_uuid(self, uuid): try: log = DBSession.query(Log).filter(Log.uuid == uuid).one() return log except Exception, e: print "Error retrieving log %s: ", e return None
class Etappe(Base): __tablename__ = 'etappe' id = Column(Integer, primary_key=True) start_date = Column(types.TIMESTAMP(timezone=False), default=timetools.now()) end_date = Column(types.TIMESTAMP(timezone=False), default=timetools.now()) uuid = Column("uuid", postgresql.UUID, unique=True) name = Column(Text) log = relationship('Log', backref='etappe_ref') track = relationship('Track', backref='etappe_ref') __table_args__ = (UniqueConstraint('start_date', 'end_date', name='etappe_start_end'), {}) def __init__(self, start_date=timetools.now(), end_date=timetools.now(), name=None, uuid=str(uuidlib.uuid4())): self.start_date = start_date self.end_date = end_date self.name = name self.uuid = uuid @classmethod def get_etappen(self): etappen = DBSession.query(Etappe).all() return etappen @classmethod def get_etappe_by_id(self, id): etappe = DBSession.query(Etappe).filter(Etappe.id == id).one() return etappe @classmethod def get_etappe_by_uuid(self, uuid): try: etappe = DBSession.query(Etappe).filter(Etappe.uuid == uuid).one() return etappe except Exception, e: print "Error retrieving etappe %s: ", e return None
def logsync(request): sync_status='sync_error' log_json = json.loads(request.POST.get('log_json').value) print log_json etappe_json = log_json['etappe'] #TODO: might be better with dates instead of uuid etappe = Etappe.get_etappe_by_uuid(etappe_json['uuid']) if not etappe: etappe = Etappe( start_date = etappe_json['start_date'], end_date = etappe_json['end_date'], name = etappe_json['name'], uuid = etappe_json['uuid'] ) DBSession.add(etappe) DBSession.flush() log = Log.get_log_by_uuid(log_json['uuid']) if not log: print 'No log found, adding new log.' print 'Author: '+log_json['author'] author = Author.get_author(log_json['author']) print author log = Log( infomarker = None, topic=log_json['topic'], content=log_json['content'], author=author.id, etappe=etappe.id, created=log_json['created'], published=timetools.now(), uuid=log_json['uuid'] ) DBSession.add(log) DBSession.flush() sync_status = 'is_synced'; #Item was not synced before we started elif log: #TODO: Updating log, needs last_change comparison and stuff print 'Log already exists on server' sync_status = 'was_synced' #Item was already on the server earlier else: sync_status = 'sync_error' #something is wrong here! return Response(json.dumps({'log_id':log_json['id'], 'type':'log', 'item_uuid':log_json['uuid'], 'sync_status':sync_status}))
def __init__(self, name, location, title, comment, alt, aperture, shutter, focal_length, iso, \ timestamp_original, hash, hash_large, author, trackpoint, uuid, last_change=timetools.now(), published=None): self.name = name self.location = location self.title = title self.comment = comment self.alt = alt self.aperture = aperture self.shutter = shutter self.focal_length = focal_length self.iso = iso self.timestamp_original = timestamp_original self.hash = hash self.hash_large = hash_large self.author = author self.trackpoint = trackpoint self.uuid = uuid self.last_change = last_change self.published = published
class Image(Base): __tablename__ = 'image' id = Column(Integer, primary_key=True) name = Column("name", types.UnicodeText) location = Column("location", types.UnicodeText) title = Column("title", types.UnicodeText) comment = Column("comment", types.UnicodeText) alt = Column("alt", types.UnicodeText) aperture = Column(Text) shutter = Column(Text) focal_length = Column(Text) iso = Column(Text) timestamp_original = Column(types.TIMESTAMP(timezone=False)) hash = Column("hash", types.UnicodeText) hash_large = Column("hash_large", types.UnicodeText) #hash of the image with 990px width author = Column( Integer, ForeignKey('author.id', onupdate="CASCADE", ondelete="CASCADE")) trackpoint = Column( Integer, ForeignKey('trackpoint.id', onupdate="CASCADE", ondelete="CASCADE")) last_change = Column(types.TIMESTAMP(timezone=False), default=timetools.now()) published = Column(types.TIMESTAMP(timezone=False)) uuid = Column("uuid", postgresql.UUID, unique=True) log = relationship('Log', secondary=log_image_table, backref='images') __table_args__ = (UniqueConstraint('location', 'name', name='image_location_name'), {}) def __init__(self, name, location, title, comment, alt, aperture, shutter, focal_length, iso, \ timestamp_original, hash, hash_large, author, trackpoint, uuid, last_change=timetools.now(), published=None): self.name = name self.location = location self.title = title self.comment = comment self.alt = alt self.aperture = aperture self.shutter = shutter self.focal_length = focal_length self.iso = iso self.timestamp_original = timestamp_original self.hash = hash self.hash_large = hash_large self.author = author self.trackpoint = trackpoint self.uuid = uuid self.last_change = last_change self.published = published def reprJSON(self): if self.published: published = self.published.strftime("%Y-%m-%d") else: published = self.published return dict(id=self.id, name=self.name, location=self.location, title=self.title, alt=self.alt, comment=self.comment, hash=self.hash, hash_large=self.hash_large, author=self.author, last_change=self.last_change.strftime("%Y-%m-%d"), published=published, uuid=self.uuid) @classmethod def get_images(self): images = DBSession.query(Image).order_by( Image.timestamp_original).all() return images @classmethod def get_image_by_id(self, id): try: image = DBSession.query(Image).filter(Image.id == id).one() return image except Exception, e: print "Error retrieving extension %s: ", e return None
def copy_db(request): #tracks_old = DBSession.query(TrackOld).filter(TrackOld.id == 141).all() #TRACK/TRACKPOINT # #tracks_old = DBSession.query(TrackOld).filter(TrackOld.id == 141).all() # tracks_old = DBSession.query(TrackOld).all() # print tracks_old # for track_old in tracks_old: # track_query = DBSession.query(Track).filter(Track.id == track_old.id) # if track_query.count() < 1: # start_time = track_old.date # end_time = track_old.date # trackpoint_count = track_old.trkptnum # distance = str(track_old.distance) # timespan = track_old.timespan # color = track_old.color # track = Track( # id = track_old.id, # start_time = start_time, # end_time = end_time, # trackpoint_count = trackpoint_count, # distance = distance, # timespan = timespan, # color = color, # reduced_trackpoints = None, # author = 1, # etappe = 1, # uuid = None # ) # DBSession.add(track) # print track_old.distance # print track.distance # trackpoints_old = DBSession.query(TrackpointOld).filter(TrackpointOld.track_id == track.id).all() # trackpoint_list=list() # for trackpoint_old in trackpoints_old: # #print trackpoint_old.altitude,trackpoint_old.velocity,trackpoint_old.temperature,trackpoint_old.direction,trackpoint_old.pressure # trackpoint = Trackpoint( # id = trackpoint_old.id, # track_id = track.id, # latitude = trackpoint_old.latitude, # longitude = trackpoint_old.longitude, # altitude = trackpoint_old.altitude, # velocity = trackpoint_old.velocity, # temperature = trackpoint_old.temperature, # direction = trackpoint_old.direction, # pressure = trackpoint_old.pressure, # timestamp = trackpoint_old.timestamp, # uuid = None # ) # DBSession.add(trackpoint) # if trackpoint_old.location or trackpoint_old.country_id: # location = Location( # trackpoint_id = trackpoint.id, # country_id = trackpoint_old.country_id, # name = trackpoint_old.location # ) # DBSession.add(location) # trackpoint_list.append(trackpoint) # # trackpoint_list.sort(key = lambda trackpoint: trackpoint.timestamp) # if track_old.id != 140 and track_old.id != 145: # reduced_trkpts=reduce_trackpoints(trackpoint_list, 0.0002) # track.reduced_trackpoints = reduced_trkpts # print reduced_trkpts # DBSession.add(track) # else: # track = track_query.one() # # #TRACKPOINTS with Track # trackpoints_unlinked = DBSession.query(TrackpointOld).filter(TrackpointOld.track_id == None).all() # trackpoint_list=list() # for trackpoint_unlinked in trackpoints_unlinked: # #print trackpoint_old.altitude,trackpoint_old.velocity,trackpoint_old.temperature,trackpoint_old.direction,trackpoint_old.pressure # trackpoint = Trackpoint( # id = trackpoint_unlinked.id, # track_id = track.id, # latitude = trackpoint_unlinked.latitude, # longitude = trackpoint_unlinked.longitude, # altitude = trackpoint_unlinked.altitude, # velocity = trackpoint_unlinked.velocity, # temperature = trackpoint_unlinked.temperature, # direction = trackpoint_unlinked.direction, # pressure = trackpoint_unlinked.pressure, # timestamp = trackpoint_unlinked.timestamp, # uuid = None # ) # DBSession.add(trackpoint) # if trackpoint_unlinked.location or trackpoint_unlinked.country_id: # location = Location( # trackpoint_id = trackpoint.id, # country_id = trackpoint_unlinked.country_id, # name = trackpoint_unlinked.location # ) # DBSession.add(location) # # ##LOG # # #logs_old = DBSession.query(LogOld).filter(LogOld.id==446).all() # logs_old = DBSession.query(LogOld).all() # # for log_old in logs_old: # log_query = DBSession.query(Log).filter(Log.id == log_old.id) # if log_query.count() < 1: # content = log_old.content # imgid_tag_list = re.findall("(\[imgid[0-9A-Za-z-]{1,}\])", content) # for tag in imgid_tag_list: # print tag # imgid=re.search("^\[imgid([0-9A-Za-z-]{1,})\]$",tag).group(1) # content=content.replace(tag,('[imgid=%s]') % imgid) # log = Log( # id = log_old.id, # infomarker = log_old.infomarker_id, # topic = log_old.topic, # content = content, # created = log_old.createdate, # etappe = 1, # author = 1, # uuid = None # ) # DBSession.add(log) # DBSession.flush() # else: # log=log_query.one() # track = DBSession.query(Track).filter(Track.id == log.trackpoint_log_ref.track_id).one() # log.track.append(track) # # #IMAGE #images_old = DBSession.query(Imageinfo).filter(Imageinfo.log_id==446).all() images_old = DBSession.query(Imageinfo).all() for image_old in images_old: image_query = DBSession.query(Image).filter(Image.id == image_old.id) if image_query.count() < 1: name = image_old.imgname.split('/')[-1] location_old = image_old.imgname location_old_prefix = '/srv' #location_old_prefix = '/media/backup2/images/images_backup2/srv' basedir = '/srv/trackdata/bydate' img_large_w='990' #width of images in editor-preview img_medium_w='500' #width of images in editor-preview img_thumb_w='150' #width of images in editor-preview print location_old.split('/')[-5] if re.findall("best",location_old): location_old_fullsize = location_old_prefix+location_old.replace(location_old.split('/')[-2]+'/','best/') location_new = filetools.createdir('/srv/trackdata/bydate','christian',location_old.split('/')[-4])+'images/sorted/' os.popen('/bin/cp %s %s' %(location_old_fullsize, location_new)) imagetools.resize(location_new, location_new+img_large_w+'/', name, img_large_w) else: location_old_fullsize = location_old_prefix+location_old.replace(location_old.split('/')[-2]+'/','') location_new = filetools.createdir('/srv/trackdata/bydate','christian',location_old.split('/')[-5])+'images/sorted/' os.popen('/bin/cp %s%s %s%s/' %(location_old_prefix, location_old, location_new, img_large_w)) os.popen('/bin/cp %s %s' %(location_old_fullsize, location_new)) print location_old_prefix+location_old print location_old_fullsize print location_new imagetools.resize(location_new, location_new+img_medium_w+'/', name, img_medium_w) imagetools.resize(location_new, location_new+img_thumb_w+'/', name, img_thumb_w) image = Image( id = image_old.id, name = name, location = location_new, title = image_old.flickrtitle, comment = image_old.flickrdescription, alt = None, aperture = image_old.aperture, shutter = image_old.shutter, focal_length = image_old.focal_length, iso = image_old.iso, timestamp_original = image_old.flickrdatetaken, hash = image_old.photohash, hash_large = image_old.photohash_990, #TODO: we need the real file's hash if 990px was uploaded and not converted author = 1, trackpoint = image_old.trackpoint_id, last_change = timetools.now(), published = timetools.now(), uuid = None ) DBSession.add(image) DBSession.flush() flickr = FlickrImage( image = image.id, farm = image_old.flickrfarm, server = image_old.flickrserver, photoid = image_old.flickrphotoid, secret = image_old.flickrsecret ) DBSession.add(flickr) DBSession.flush() else: image = image_query.one() log=DBSession.query(Log).filter(Log.id == image_old.log_id).one() log.image.append(image) return Response('ok')