def toS3(job, backend=False): vprint("TO S3 **************************") frontend = not backend s3.s3Init() im = imageForJob(job) files = im.resizeFiles() files.extend(im.tileFiles()) numfiles = len(files) job.total = numfiles print "Sending ", numfiles, " to S3 for " + im.topic job.status = "active" if frontend: saveJob(job) cnt = 0 tsz = 0 stm = time.time() cnt = 0 for fl in files: """ sz not used now; determined from the tiling dir size locally """ updateDb = frontend and (cnt % 20 == 0) vprint("about to save " + fl) sz = s3.s3SaveFile(fl, relativeTo="images", contentType="image/jpeg") if updateDb: if checkJobCanceled(job): return cnt = cnt + 1 vprint("size " + str(sz)) if cnt % 25 == 0: print "Sent ", cnt tsz += sz #cnt = cnt+1 job.so_far = cnt if updateDb: saveJob(job) im = imageForJob(job) im.atS3 = 1 im.dynsave() imdir = im.imDir() saveFileSizes(imdir + "resized") saveFileSizes(imdir + "tiling") #im.removeSubdir("resized") #im.removeSubdir("tiling") job.so_far = numfiles job.status = "done" job.resources_used = json.dumps({ "time": time.time() - stm, "num_files": len(files) }) saveJob(job) print "Done importing: " + im.topic return tsz
""" """ from api.job import buildTiling,s3Init,s3SaveFile #rr = logs.logKeys() #rrr = [ky for ky in rr] #x = logs.readLog(rrr[0],1) x = logs.readLogs() logs.storeLogs(x) #tt = models.loadImageD("/image/cg/test4") """ s3.s3Init() def rih(im): tp = "/image/cg/"+im models.reduceImageHeight(tp,100) s3.s3SaveFile("/resizedh100/cg/"+im+".jpg") print "TOPIC ",tp im = models.loadImageD(tp) im.atS3 = 1 im.beenTiled = 1 im.dynsave() rih("The_Ambassadors") rih("The_Dutch_Proverbs") rih("earthly_delights_1")
def logKeys(): bk = s3.s3Init("imagediver_log") rs = boto.s3.bucketlistresultset.BucketListResultSet(bk) return rs