Esempio n. 1
0
	def post(self):
		file_info = self.get_file_infos()[0]
		self.response.headers['Content-Type'] = "application/json"
		year = self.request.get('year')
		month = self.request.get('month')
		day = self.request.get('day')
		date = datetime.datetime(int(year), int(month), int(day))

		if file_info.content_type.lower() not in ('image/jpeg', 'image/jpg', 'image/png', 'image/gif', 'image/bmp'):
			return self.response.write(json.dumps({"status" : "error", "message" : "Unsupported content type: " + file_info.content_type}))

		bytes = filestore.read(file_info.gs_object_name)
		existing_images = [u.filename for u in UserImage.query(UserImage.date == date).fetch()]

		filename = UserImage.create_image_name(file_info.filename, date, existing_images)
		img = UserImage()
		img.import_image(filename, file_info.filename, bytes, date, None)
		img.put()
		filestore.delete(file_info.gs_object_name)
		#If there's a post here we should add the image...
		post = Post.query(Post.date == date).get()
		if post:
			post.has_images = True
			if post.images is None:
				post.images = []
			post.images.append(filename)
			post.put()

		self.response.write(json.dumps({"status" : "ok", "filename" : filename}))
	def post(self):

		task_key = ndb.Key(urlsafe=self.request.get('task'))
		task = task_key.get()

		task.update('Starting migration...', status='inprogress')

		logging.info('Starting migration ...')
		try:

			images = [i for i in UserImage.query() if i.filename != i.original_size_key]

			task.update('Migrating...', total_images=len(images))
			logging.info('Migrating %s images' % len(images))

			for img in images:
								
				img.migrate_to_gcs()
				task.migrated_images += 1
				if task.migrated_images % 3 == 0:
					task.update('Migrated %s/%s images' % (task.migrated_images, task.total_images))
					logging.info(task.message)
					task.put()


			
			task.update('Finished migrating images. Have a nice day :)', status='finished')
			logging.info(task.message)
		except Exception, ex:
			task.update('Failed to migrate: %s' % ex, status='failed')
			log_error('Failed migrate images', traceback.format_exc(6))
Esempio n. 3
0
	def post(self, filename):
		self.response.headers['Content-Type'] = "application/json"
		img = UserImage.query(UserImage.filename == filename).get()
		if not img:
			return self.response.write(json.dumps({"status" : "error", "message" : "Image does not exit"}))

		post = Post.query(Post.date == img.date).get()
		
		#Remove it from the post
		if post:
			try:
				post.images.remove(filename)
				post.text = post.text.replace('$IMG:' + filename, '').replace('\n\n\n\n', '\n\n')
			except:
				pass

			if len(post.images) == 0:
				post.has_images = False

			post.put()

		filestore.delete(img.serving_size_key)
		filestore.delete(img.original_size_key)
		img.key.delete()

		self.response.write(json.dumps({"status" : "ok"}))
Esempio n. 4
0
	def post(self, filename):
		self.response.headers['Content-Type'] = "application/json"
		img = UserImage.query(UserImage.filename == filename).get()
		if not img:
			return self.response.write(json.dumps({"status" : "error", "message" : "Image does not exit"}))

		post = Post.query(Post.date == img.date).get()
		
		#Remove it from the post
		if post:
			try:
				post.images.remove(filename)
			except:
				pass

			if len(post.images) == 0:
				post.has_images = False

			post.put()

		filestore.delete(img.serving_size_key)
		filestore.delete(img.original_size_key)
		img.key.delete()

		self.response.write(json.dumps({"status" : "ok"}))
Esempio n. 5
0
    def post(self):

        task_key = ndb.Key(urlsafe=self.request.get('task'))
        task = task_key.get()

        task.update('Starting migration...', status='inprogress')

        logging.info('Starting migration ...')
        try:

            images = [
                i for i in UserImage.query()
                if i.filename != i.original_size_key
            ]

            task.update('Migrating...', total_images=len(images))
            logging.info('Migrating %s images' % len(images))

            for img in images:

                img.migrate_to_gcs()
                task.migrated_images += 1
                if task.migrated_images % 3 == 0:
                    task.update('Migrated %s/%s images' %
                                (task.migrated_images, task.total_images))
                    logging.info(task.message)
                    task.put()

            task.update('Finished migrating images. Have a nice day :)',
                        status='finished')
            logging.info(task.message)
        except Exception, ex:
            task.update('Failed to migrate: %s' % ex, status='failed')
            log_error('Failed migrate images', traceback.format_exc(6))
Esempio n. 6
0
    def process_attachments(self, mail_message, post):
        attachments = []

        try:
            attachments = mail_message.attachments
        except exceptions.AttributeError:
            pass  #No attachments, then the attribute doesn't even exist :/

        if attachments:
            logging.info('Received %s attachment(s)' % len(attachments))

        for attachment in attachments:

            original_filename = attachment.filename
            encoded_payload = attachment.payload
            content_id = attachment.content_id
            if content_id:
                content_id = content_id.replace('<', '').replace(
                    '>', ''
                )  # Don't want these around the id, messes with our tag handling
            logging.info('Processing attachment: %s' % original_filename)

            if re.search('\\.(jpe?g|png|bmp|gif)$', original_filename.lower()):
                if post.images is None:
                    post.images = []

                bytes = encoded_payload.payload
                if encoded_payload.encoding:
                    bytes = bytes.decode(encoded_payload.encoding)

                post.has_images = True
                user_image = UserImage()
                img_name = UserImage.create_image_name(original_filename,
                                                       post.date, post.images)
                user_image.import_image(img_name, original_filename, bytes,
                                        post.date, content_id)
                post.images.append(img_name)

                user_image.is_inline = False
                if content_id:
                    placeholder = '$IMG:' + content_id
                    if placeholder in post.text:
                        user_image.is_inline = True
                        #Ok, lets put in a filename instead of the content_id
                        post.text = post.text.replace(placeholder,
                                                      '$IMG:' + img_name)

                user_image.put()

            else:
                logging.warning('Received unsupported attachment, %s' %
                                original_filename)
Esempio n. 7
0
	def post(self):
		file_info = self.get_file_infos()[0]
		self.response.headers['Content-Type'] = "application/json"
		year = self.request.get('year')
		month = self.request.get('month')
		day = self.request.get('day')
		date = datetime.datetime(int(year), int(month), int(day))

		if file_info.content_type.lower() not in ('image/jpeg', 'image/jpg', 'image/png', 'image/gif', 'image/bmp'):
			return self.response.write(json.dumps({"status" : "error", "message" : "Unsupported content type: " + file_info.content_type}))

		bytes = filestore.read(file_info.gs_object_name)
		existing_images = [u.filename for u in UserImage.query(UserImage.date == date).fetch()]

		filename = UserImage.create_image_name(file_info.filename, date, existing_images)
		img = UserImage()
		img.import_image(filename, file_info.filename, bytes, date)
		img.put()
		filestore.delete(file_info.gs_object_name)
		#If there's a post here we should add the image...
		post = Post.query(Post.date == date).get()
		if post:
			post.has_images = True
			if post.images is None:
				post.images = []
			post.images.append(filename)
			post.put()

		self.response.write(json.dumps({"status" : "ok", "filename" : filename}))
Esempio n. 8
0
	def process_attachments(self, mail_message, post):
		attachments = []

		try:
			attachments = mail_message.attachments
		except exceptions.AttributeError:
			pass #No attachments, then the attribute doesn't even exist :/

		if attachments:
			logging.info('Received %s attachment(s)' % len(attachments))
		
		for original_filename, encoded_payload in attachments:
			logging.info('Processing attachment: %s' % original_filename)

			if re.search('\\.(jpe?g|png|bmp|gif)$', original_filename.lower()):
				if post.images is None:
					post.images = []

				bytes = encoded_payload.payload
				if encoded_payload.encoding:
					bytes = bytes.decode(encoded_payload.encoding)
				
				post.has_images = True
				user_image = UserImage()
				img_name = UserImage.create_image_name(original_filename, post.date, post.images)
				user_image.import_image(img_name, original_filename, bytes, post.date)
				post.images.append(img_name)
				user_image.put()

			else:
				logging.warning('Received unsupported attachment, %s' % original_filename)
Esempio n. 9
0
    def get(self, filename):

        image = UserImage.query(UserImage.filename == filename).get()

        if self.request.get('fullsize'):
            key = image.original_size_key
        else:
            key = image.serving_size_key

        if not image:
            self.error(404)
        else:
            self.send_blob(filestore.get_blob_key(key))
Esempio n. 10
0
    def get(self, filename):

        image = UserImage.query(UserImage.filename == filename).get()

        if self.request.get('fullsize'):
            key = image.original_size_key
        else:
            key = image.serving_size_key

        if not image:
            self.error(404)
        else:
            self.send_blob(filestore.get_blob_key(key))
Esempio n. 11
0
	def add_images_to_zip(self, export_task, archive):
		export_task.update('Fetching image information...')

		images = [i for i in UserImage.query().order(UserImage.filename).fetch()]

		export_task.update('Found %s images...' % len(images))

		for i, img in enumerate(images):
			img_data = filestore.read(img.original_size_key)
			archive.writestr('/img_%s' % img.filename.replace('.jpg', '.jpeg'), img_data)
			if i % 5 == 0:
				export_task.update('Added %s of %s images to zip... ' % (i+1,len(images)))

		export_task.update('Finished adding images...')
Esempio n. 12
0
	def process_attachments(self, mail_message, post):
		attachments = []

		try:
			attachments = mail_message.attachments
		except exceptions.AttributeError:
			pass #No attachments, then the attribute doesn't even exist :/

		if attachments:
			logging.info('Received %s attachment(s)' % len(attachments))
		
		for attachment in attachments:
			
 			original_filename = attachment.filename
 			encoded_payload = attachment.payload
 			content_id = attachment.content_id
 			if content_id:
 				content_id = content_id.replace('<', '').replace('>', '') # Don't want these around the id, messes with our tag handling
			logging.info('Processing attachment: %s' % original_filename)

			if re.search('\\.(jpe?g|png|bmp|gif)$', original_filename.lower()):
				if post.images is None:
					post.images = []

				bytes = encoded_payload.payload
				if encoded_payload.encoding:
					bytes = bytes.decode(encoded_payload.encoding)
				
				post.has_images = True
				user_image = UserImage()
				img_name = UserImage.create_image_name(original_filename, post.date, post.images)
				user_image.import_image(img_name, original_filename, bytes, post.date, content_id)
				post.images.append(img_name)
				
				user_image.is_inline = False
				if content_id:
					placeholder = '$IMG:' + content_id
					if placeholder in post.text:
						user_image.is_inline = True
						#Ok, lets put in a filename instead of the content_id
						post.text = post.text.replace(placeholder, '$IMG:' + img_name)
				
				user_image.put()

			else:
				logging.warning('Received unsupported attachment, %s' % original_filename)
Esempio n. 13
0
	def delete_post(self, post):
		images = UserImage.query(UserImage.date == post.date).fetch()

		for img in images:
			filestore.delete(img.serving_size_key)
			filestore.delete(img.original_size_key)
			img.key.delete()

		emails = RawMail.query(RawMail.date == post.date).fetch()
		for email in emails:
			email.key.delete()

		post.key.delete()
		PostCounter.get().decrement(post.date.year, post.date.month)
		
		logging.info('Deleted %s images, %s emails and 1 post from %s' % (len(images), len(emails), post.date.strftime('%Y-%m-%d')))
Esempio n. 14
0
	def delete_post(self, post):
		images = UserImage.query(UserImage.date == post.date).fetch()

		for img in images:
			filestore.delete(img.serving_size_key)
			filestore.delete(img.original_size_key)
			img.key.delete()

		emails = RawMail.query(RawMail.date == post.date).fetch()
		for email in emails:
			email.delete()

		post.key.delete()
		PostCounter.get().decrement(post.date.year, post.date.month)
		
		logging.info('Deleted %s images, %s emails and 1 post from %s' % (len(images), len(emails), post.date.strftime('%Y-%m-%d')))
Esempio n. 15
0
	def get(self):

		#Check whether the migration is done so we can see whether to show the Blobstore Migration
		#or not...
		settings = Settings.get()

		if not settings.blobstore_migration_done:
			migration_task_finished = bool(MigrateTask.query(MigrateTask.status == 'finished').get())
			if migration_task_finished:
				settings.blobstore_migration_done = True
				settings.put()
			else:
				#Try to figure out whether this is a new user that has nothing in the blobstore...
				if not UserImage.query().get():
					settings.blobstore_migration_done = True
					settings.put()

		self._render(settings)
Esempio n. 16
0
    def add_images_to_zip(self, export_task, archive):
        export_task.update('Fetching image information...')

        images = [
            i for i in UserImage.query().order(UserImage.filename).fetch()
        ]

        export_task.update('Found %s images...' % len(images))

        for i, img in enumerate(images):
            img_data = filestore.read(img.original_size_key)
            archive.writestr('/img_%s' % img.filename.replace('.jpg', '.jpeg'),
                             img_data)
            if i % 5 == 0:
                export_task.update('Added %s of %s images to zip... ' %
                                   (i + 1, len(images)))

        export_task.update('Finished adding images...')
Esempio n. 17
0
	def post(self, kind, year, month, day):
		date = datetime.datetime(int(year),int(month),int(day)).date()		
		post = Post.query(Post.date == date).get()
		
		is_new = False
		if not post:
			post = Post(date=date, source='web',images=[])
			is_new = True
		
		post.text = self.request.get('text')

		save = self.request.get('action') == 'save'
		delete = self.request.get('action') == 'delete'

		if save and delete:
			raise Exception('Something weird happened...')

		if save:
			if is_new:
				post.images = [u.filename for u in UserImage.query(UserImage.date == date).fetch()]
				post.images.sort()
				post.has_images = True

			post.put()
			if is_new:
				PostCounter.get().increment(post.date.year, post.date.month)

			self.redirect_to_date(post.date)
		elif delete:
			self.delete_post(post)

			next_post = Post.query(Post.date > date).order(Post.date).get()
			if next_post and next_post.date.month == date.month:
				return self.redirect_to_date(next_post.date)			

			#No way, we'll have to just redirect to the empty month
			self.redirect('/past/%s' % date.strftime('%Y-%m'))
		else:
			raise Exception('How the hell did we get here...?')
Esempio n. 18
0
	def post(self, kind, year, month, day):
		date = datetime.datetime(int(year),int(month),int(day)).date()		
		post = Post.query(Post.date == date).get()
		
		is_new = False
		if not post:
			post = Post(date=date, source='web',images=[])
			is_new = True
		
		post.text = self.request.get('text')

		save = self.request.get('action') == 'save'
		delete = self.request.get('action') == 'delete'

		if save and delete:
			raise Exception('Something weird happened...')

		if save:
			if is_new:
				post.images = [u.filename for u in UserImage.query(UserImage.date == date).fetch()]
				post.images.sort()
				post.has_images = True

			post.put()
			if is_new:
				PostCounter.get().increment(post.date.year, post.date.month)

			self.redirect_to_date(post.date)
		elif delete:
			self.delete_post(post)

			next_post = Post.query(Post.date > date).order(Post.date).get()
			if next_post and next_post.date.month == date.month:
				return self.redirect_to_date(next_post.date)			

			#No way, we'll have to just redirect to the empty month
			self.redirect('/past/%s' % date.strftime('%Y-%m'))
		else:
			raise Exception('How the hell did we get here...?')
Esempio n. 19
0
	def get(self, kind, year, month, day):
		date = datetime.datetime(int(year),int(month),int(day)).date()
		
		post = Post.query(Post.date == date).get()
		if kind == 'write' and post:
			return self.redirect('/edit/%s' % date.strftime('%Y-%m-%d'))
		if kind == 'edit' and not post:
			return self.redirect('/write/%s' % date.strftime('%Y-%m-%d'))
		
		data = { 
			"date" : date,
			"text" : "",
			"page" : "write",
			"kind" : kind
		}
		if post:
			data["page"] = "edit"
			data["text"] = post.text
			data["images"] = post.images
		else:
			data["images"] = [u.filename for u in UserImage.query(UserImage.date == date).fetch()]

		self.response.write(get_template('edit.html').render(data))
Esempio n. 20
0
	def get(self, kind, year, month, day):
		date = datetime.datetime(int(year),int(month),int(day)).date()
		
		post = Post.query(Post.date == date).get()
		if kind == 'write' and post:
			return self.redirect('/edit/%s' % date.strftime('%Y-%m-%d'))
		if kind == 'edit' and not post:
			return self.redirect('/write/%s' % date.strftime('%Y-%m-%d'))
		
		data = { 
			"date" : date,
			"text" : "",
			"page" : "write",
			"kind" : kind
		}
		if post:
			data["page"] = "edit"
			data["text"] = post.text
			data["images"] = post.images
		else:
			data["images"] = [u.filename for u in UserImage.query(UserImage.date == date).fetch()]

		self.response.write(get_template('edit.html').render(data))
Esempio n. 21
0
	def post(self):

		import_task_key = ndb.Key(urlsafe=self.request.get('task'))
		import_task = import_task_key.get()

		import_task.update('Unpacking zip file...', status='inprogress')

		logging.info('Starting import ...')
		counter = PostCounter.get()
		try:


			posts, images = self.read_zip_file(import_task.uploaded_file)

			import_task.update('Importing...', total_photos=len(images), total_posts=len(posts))
			logging.info('Importing %s posts, %s images' % (len(posts), len(images)))

			posts = self.filter_posts(posts)
			
			for date, text in posts:
				str_date = date.strftime('%Y-%m-%d')

				p = Post(
					date=date,
					source='ohlife',
					text=text.decode('utf-8')
				)

				p.images = []
				p.has_images = False

				post_images = [(k,images[k]) for k in images.keys() if str_date in k]

				if len(post_images):
					logging.info('Importing %s images for date %s' % (len(post_images), str_date))
					p.images = []
					p.has_images = True
					for name, bytes in post_images:
						user_image = UserImage()
						img_name = name.replace('img_', '').replace('.jpeg', '.jpg')
						user_image.import_image(img_name, name, bytes, date)
						p.images.append(img_name)
						import_task.imported_photos += 1
						user_image.put()

				p.put()
				counter.increment(p.date.year, p.date.month, False)

				import_task.imported_posts += 1
				if import_task.imported_posts % 10 == 0:

					import_task.update('Imported %s/%s post, %s/%s photos...' % (import_task.imported_posts, import_task.total_posts,import_task.imported_photos, import_task.total_photos))
					logging.info(import_task.message)
					counter.put()

			counter.put()

			skipped_posts = import_task.total_posts - import_task.imported_posts
			skipped_photos = import_task.total_photos - import_task.imported_photos
			msg = 'Imported %s posts and %s photos.' % (import_task.imported_posts, import_task.imported_photos)
			if skipped_posts or skipped_photos:
				msg += ' %s posts and %s photos already existed and were skipped.' % (skipped_posts, skipped_photos)
			
			import_task.update(msg, status='finished')
			logging.info(import_task.message)
			filestore.delete(import_task.uploaded_file)
		except Exception, ex:
			try:
				filestore.delete(import_task.uploaded_file)
			except:
				pass
				
			try:
				counter.put()
			except:
				pass
			import_task.update('Failed to import: %s' % ex, status='failed')
			log_error('Failed import', traceback.format_exc(6))
Esempio n. 22
0
    def get(self):
        images_total = 0
        images_backed_up = 0
        try:
            self.response.headers['Content-Type'] = 'text/plain'
            settings = Settings.get()

            if not settings.dropbox_access_token:
                self.log(
                    'No access token available, no backup will be performed.')
                return

            posts = [p for p in Post.query().order(Post.date).fetch()]

            self.log('Backing up %s posts to Dropbox' % len(posts))
            post_text = StringIO()
            for p in posts:
                post_text.write(p.date.strftime('%Y-%m-%d'))
                post_text.write('\r\n\r\n')
                post_text.write(
                    p.text.replace('\r\n', '\n').replace('\n',
                                                         '\r\n').rstrip())
                post_text.write('\r\n\r\n')

            result = self.put_file(settings.dropbox_access_token, 'MyLife.txt',
                                   post_text.getvalue().encode('utf-8'))
            post_text.close()
            self.log('Backed up posts. Revision: %s' % result['rev'])

            self.log('Fetching Dropbox file list')

            files_in_dropbox = self.get_dropbox_filelist(
                settings.dropbox_access_token)

            self.log('Got %s files from Dropbox' % len(files_in_dropbox))

            self.log('Fetching images...')
            images = [
                i for i in UserImage.query().order(UserImage.date).fetch()
            ]

            self.log('Total images in MyLife: %s' % len(images))

            not_backed_up = [i for i in images if not i.backed_up_in_dropbox]
            not_in_dropbox = [
                i for i in images if not i.filename in files_in_dropbox
            ]

            self.log('\nFiles not backed up: \n\n' +
                     '\n'.join([i.filename for i in not_backed_up]))
            self.log('\nFiles marked as backed up, but not in Dropbox: \n\n' +
                     '\n'.join([i.filename for i in not_in_dropbox]))

            images = not_backed_up + not_in_dropbox

            images_total = len(images)
            self.log('Found %s images that need to be backed up in Dropbox' %
                     images_total)
            for img in images:
                self.log('Backing up %s' % img.filename)
                bytes = filestore.read(img.original_size_key)
                result = self.put_file(settings.dropbox_access_token,
                                       img.filename, bytes)
                self.log('Backed up %s. Revision: %s' %
                         (img.filename, result['rev']))
                img.backed_up_in_dropbox = True
                img.put()
                images_backed_up += 1

            settings.dropbox_last_backup = datetime.datetime.now()
            settings.put()
            self.log('Finished backup successfully')
        except apiproxy_errors.OverQuotaError, ex:
            self.log(ex)
            log_error(
                'Error backing up to Dropbox, quota exceeded',
                'The backup operation did not complete because it ran out of quota. '
                +
                'The next time it runs it will continue backing up your posts and images.'
                + '%s images out of %s were backed up before failing' %
                (images_backed_up, images_total))
Esempio n. 23
0
    def post(self):

        import_task_key = ndb.Key(urlsafe=self.request.get('task'))
        import_task = import_task_key.get()

        import_task.update('Unpacking zip file...', status='inprogress')

        logging.info('Starting import ...')
        counter = PostCounter.get()
        try:

            posts, images = self.read_zip_file(import_task.uploaded_file)

            import_task.update('Importing...',
                               total_photos=len(images),
                               total_posts=len(posts))
            logging.info('Importing %s posts, %s images' %
                         (len(posts), len(images)))

            posts = self.filter_posts(posts)

            for date, text in posts:
                str_date = date.strftime('%Y-%m-%d')

                p = Post(date=date, source='ohlife', text=text.decode('utf-8'))

                p.images = []
                p.has_images = False

                post_images = [(k, images[k]) for k in images.keys()
                               if str_date in k]

                if len(post_images):
                    logging.info('Importing %s images for date %s' %
                                 (len(post_images), str_date))
                    p.images = []
                    p.has_images = True
                    for name, bytes in post_images:
                        user_image = UserImage()
                        img_name = name.replace('img_',
                                                '').replace('.jpeg', '.jpg')
                        user_image.import_image(img_name, name, bytes, date)
                        p.images.append(img_name)
                        import_task.imported_photos += 1
                        user_image.put()

                p.put()
                counter.increment(p.date.year, p.date.month, False)

                import_task.imported_posts += 1
                if import_task.imported_posts % 10 == 0:

                    import_task.update(
                        'Imported %s/%s post, %s/%s photos...' %
                        (import_task.imported_posts, import_task.total_posts,
                         import_task.imported_photos,
                         import_task.total_photos))
                    logging.info(import_task.message)
                    counter.put()

            counter.put()

            skipped_posts = import_task.total_posts - import_task.imported_posts
            skipped_photos = import_task.total_photos - import_task.imported_photos
            msg = 'Imported %s posts and %s photos.' % (
                import_task.imported_posts, import_task.imported_photos)
            if skipped_posts or skipped_photos:
                msg += ' %s posts and %s photos already existed and were skipped.' % (
                    skipped_posts, skipped_photos)

            import_task.update(msg, status='finished')
            logging.info(import_task.message)
            filestore.delete(import_task.uploaded_file)
        except Exception, ex:
            try:
                filestore.delete(import_task.uploaded_file)
            except:
                pass

            try:
                counter.put()
            except:
                pass
            import_task.update('Failed to import: %s' % ex, status='failed')
            log_error('Failed import', traceback.format_exc(6))
Esempio n. 24
0
	def get(self):
		images_total = 0
		images_backed_up = 0
		try:
			self.response.headers['Content-Type'] = 'text/plain'
			settings = Settings.get()

			if not settings.dropbox_access_token:
				self.log('No access token available, no backup will be performed.')
				return


			posts = [p for p in Post.query().order(Post.date).fetch()]

			self.log('Backing up %s posts to Dropbox' % len(posts))
			post_text = StringIO()
			for p in posts:
				post_text.write(p.date.strftime('%Y-%m-%d'))
				post_text.write('\r\n\r\n')
				post_text.write(p.text.replace('\r\n', '\n').replace('\n', '\r\n').rstrip())
				post_text.write('\r\n\r\n')

			result = self.put_file(settings.dropbox_access_token, 'MyLife.txt', post_text.getvalue().encode('utf-8'))
			post_text.close()
			self.log('Backed up posts. Revision: %s' % result['rev'])

			self.log('Fetching Dropbox file list')
			
			files_in_dropbox = self.get_dropbox_filelist(settings.dropbox_access_token)
			
			self.log('Got %s files from Dropbox' % len(files_in_dropbox))

			self.log('Fetching images...')
			images = [i for i in UserImage.query().order(UserImage.date).fetch()]

			self.log('Total images in MyLife: %s' % len(images))

			not_backed_up = [i for i in images if not i.backed_up_in_dropbox]
			not_in_dropbox = [i for i in images if not i.filename in files_in_dropbox]

			self.log('\nFiles not backed up: \n\n' + '\n'.join([i.filename for i in not_backed_up]))
			self.log('\nFiles marked as backed up, but not in Dropbox: \n\n' + '\n'.join([i.filename for i in not_in_dropbox]))

			images = not_backed_up + not_in_dropbox

			images_total = len(images)
			self.log('Found %s images that need to be backed up in Dropbox' % images_total)
			for img in images:
				self.log('Backing up %s' % img.filename)
				bytes = filestore.read(img.original_size_key)
				result = self.put_file(settings.dropbox_access_token, img.filename, bytes)
				self.log('Backed up %s. Revision: %s' % (img.filename, result['rev']))
				img.backed_up_in_dropbox = True
				img.put()
				images_backed_up += 1


			settings.dropbox_last_backup = datetime.datetime.now()
			settings.put()
			self.log('Finished backup successfully')
		except apiproxy_errors.OverQuotaError, ex:
			self.log(ex)
			log_error('Error backing up to Dropbox, quota exceeded', 'The backup operation did not complete because it ran out of quota. ' +
				'The next time it runs it will continue backing up your posts and images.' +
				'%s images out of %s were backed up before failing' % (images_backed_up, images_total))