def publish(self,pageStore=None): OBSOLETE vprint("PUBLISHING ",self.__dict__) alb = self.topic tpo = misc.topicOb(alb) imtopic = tpo.imageTopic() imdir = tpo.imageOwner + "/" + tpo.imageName vprint("PUBLISH ALBUM",self.topic," with IMAGE ",imtopic,"imdir",imdir) js = self.compute_json(False) self.publishJson(js) tp = self.topic imageD = image.loadImageD(imtopic,getattr(self,"pageStore",None)) pg = self.genPage(self.computeTitle(imageD)) self.publishHtml(pg) snapDs = self.snaps() cropids = [] newPubs = [] fls = [] for snap in snapDs: if snap["published"]: continue crid = snap["cropid"] cropids.append(crid) fls.append(imdir+"/snap/"+str(crid)+".jpg") fls.append(imdir+"/snapthumb/"+str(crid)+".jpg") newPubs.append(snap) for fl in fls: vprint("about to save "+fl) s3.s3SaveFile(fl,relativeTo="images",contentType="image/jpeg") dynamo.assertPublished(self) self.setSnapsPublished(newPubs,1)
def publish(self,includeImages=True,pageStore=None): #vprint("PUBLISHING ",self.__dict__) """ the snap images appear under the image directory (/imagowner/imagename). pageOnly means don't bother with generating json""" alb = self.album tpo = misc.topicOb(alb) imtopic = tpo.imageTopic() imdir = tpo.imageOwner + "/" + tpo.imageName vprint("PUBLISH SNAP",self.topic," with IMAGE ",imtopic,"imdir",imdir) js = self.compute_json(False) tp = self.topic #topicdir = "/topicd/" if constants.publishToS3Dev else "/topic/" s3path = constants.topicDir+tp+"/main.json" #the path where the page will finally end up s3.s3SetContents(s3path,contents=js,relativeTo="",contentType="application/json") self.genPage(True) if not includeImages: return imageD = image.loadImageD(imtopic,getattr(self,"pageStore",None)) """ public = getattr(imageD,"isPublic",None) if not public: imageD.isPublic = 1 imageD.dynsave(False) """ crid = self.cropid; fls = [imdir+"/snap/"+str(crid)+".jpg",imdir+"/snapthumb/"+str(crid)+".jpg"] for fl in fls: vprint("about to save "+fl) s3.s3SaveFile(fl,relativeTo="images",contentType="image/jpeg")
def rih(im): tp = "/image/cg/"+im models.reduceImageHeight(tp,100) s3.s3SaveFile("/resizedh100/cg/"+im+".jpg") print "TOPIC ",tp im = models.loadImageD(tp) im.atS3 = 1 im.beenTiled = 1 im.dynsave()
def toS3(job, backend=False): vprint("TO S3 **************************") frontend = not backend s3.s3Init() im = imageForJob(job) files = im.resizeFiles() files.extend(im.tileFiles()) numfiles = len(files) job.total = numfiles print "Sending ", numfiles, " to S3 for " + im.topic job.status = "active" if frontend: saveJob(job) cnt = 0 tsz = 0 stm = time.time() cnt = 0 for fl in files: """ sz not used now; determined from the tiling dir size locally """ updateDb = frontend and (cnt % 20 == 0) vprint("about to save " + fl) sz = s3.s3SaveFile(fl, relativeTo="images", contentType="image/jpeg") if updateDb: if checkJobCanceled(job): return cnt = cnt + 1 vprint("size " + str(sz)) if cnt % 25 == 0: print "Sent ", cnt tsz += sz #cnt = cnt+1 job.so_far = cnt if updateDb: saveJob(job) im = imageForJob(job) im.atS3 = 1 im.dynsave() imdir = im.imDir() saveFileSizes(imdir + "resized") saveFileSizes(imdir + "tiling") #im.removeSubdir("resized") #im.removeSubdir("tiling") job.so_far = numfiles job.status = "done" job.resources_used = json.dumps({ "time": time.time() - stm, "num_files": len(files) }) saveJob(job) print "Done importing: " + im.topic return tsz
def sendPageToS3(self,srcFile=None): s3p = self.s3path() if srcFile: s3.s3SetContents(s3p,srcFile,relativeTo="topic",contentType="text/html") else: s3.s3SaveFile(s3p,relativeTo="topic",contentType="text/html")