def GetAllFilesOfStorage(): global domain_name try: bucket = Bucket(domain_name) fileList = bucket.list() L = [] count = 0 for f in fileList: #fileTime = f.last_modified.encode() #fileTime = fileTime.replace("T", " ").split(".")[0] #timeArray = time.strptime(fileTime, "%Y-%m-%d %H:%M:%S") #timeStamp = int(time.mktime(timeArray)) #startTime = "2015-8-2 00:00:00" #if timeStamp > time.mktime(time.strptime(startTime,'%Y-%m-%d %H:%M:%S')): L.append({ "name": f.name, "size": f.bytes, "url": bucket.generate_url(f.name), "last_modified": f.last_modified }) count = count + 1 Log("GetAllFilesOfStorage Count %s" % str(count)) return L except Exception, e: Log("Error when GetAllFilesOfStorage: %s" % e) return None
def get_max_file_index(bucket_name, folder_path): """get the max file index in specific folder Args: bucket_name: name of bucket folder_path: folder path Returns: -1 if no files an positive int (including zero) indicating the max file index """ bucket = Bucket(bucket_name) folder_path = complete_folder(folder_path) # read files files = [i for i in bucket.list(path=folder_path)] # if the folder not exists or folder has no file if len(files)==0: return -1 # if has files, return the max file index max_index = 0 for i in files: num = int(i['name'].split(folder_path)[1].split('.')[0]) if num>max_index: max_index = num return max_index
def get(self, request, *args, **kwargs): from sae.deferredjob import MySQLExport, DeferredJob from sae.storage import Bucket as SBucket import time import datetime export_bucket = 'xkongbackup' bucket = SBucket(export_bucket) now = time.strftime('%Y_%m_%d_%H_%M_%S') filename = 'app_ninan_%s.zip' % now deferred_job = DeferredJob() job = MySQLExport(export_bucket, filename, 'note_note', 'backends/backupsuccess/') deferred_job.add(job) resp = {'touch': filename} # Delete all files in this bucket created a month ago a_month_ago = datetime.datetime.now() - datetime.timedelta(days=30) for object_ in bucket.list(): last_modified = object_['last_modified'] if last_modified: mtime = datetime.datetime.strptime(last_modified, '%Y-%m-%dT%H:%M:%S.%f') else: continue if object_['content_type'] is not None and mtime < a_month_ago: bucket.delete_object(object_['name']) return self.render_to_response(resp)
def MakeBackup(): """ 定时备份文件任务 """ dbchgcounter = kv.get("kvdbchg") if dbchgcounter == None: dbchgcounter = 0 if dbchgcounter == 0: return u"数据未改变" dbchgcounter = 0 kv.set("kvdbchg",dbchgcounter) bucket = Bucket('backup') tm = datetime.now() # 删除过期文件 dellist = [] fdlist = [] fwlist = [] for finf in bucket.list(): last_modified = str(finf[u'last_modified']) last_modified = last_modified[:last_modified.index(".")]#2013-05-22T05:09:32.259140 -> 2013-05-22T05:09:32 filetime = datetime.strptime(last_modified,"%Y-%m-%dT%H:%M:%S") fname = str(finf[u"name"]) if "d.zip.data" in fname: fdlist.append((fname,tm-filetime)) else: fwlist.append((fname,tm-filetime)) if len(fdlist) > 3: sorted(fdlist,key = lambda x:x[1]) dellist = fdlist[3:] if len(fwlist) > 4: sorted(fwlist,key = lambda x:x[1]) dellist += fdlist[4:] for fname in dellist: bucket.delete_object(fname[0]) #备份新文件 filename = tm.strftime(u"%Y-%m-%d_%H_%M_%S") if tm.weekday() == 5: #周六 filename += "w.zip.data" else: filename += "d.zip.data" WriteZipFile(filename) return u"已备份"
def get_urls(bucket_name, folder_path): """get resource urls from storage Args: bucket_name: the name of bucket folder_path: resources folder path Returns: list of resource urls """ bucket = Bucket(bucket_name) folder_path+='/' # folder should be end with '/' # if no file files = [i for i in bucket.list(path=folder_path)] if len(files) == 0: return [] # else has files filenames = [] for item in [i['name'] for i in bucket.list(path=folder_path) if i['name'][-3:]=="jpg"]: filenames.append(item) return [bucket.generate_url(i) for i in filenames]
def listDirOfSAEStorage(path=''): monkey.patch_all() bucket = Bucket('media') bucket.put() bucket.post( acl= '.r:.sinaapp.com,.r:sae.sina.com.cn,.r:.vipsinaapp.com,.r:.qq.com,.r:.wx.qq.com', metadata={'expires': '7d'}) if not path.endswith('/'): if path == '': pass else: path = path + '/' return [i for i in bucket.list(path)]
def mapp(request): A = [] from sae.storage import Bucket bucket = Bucket('abc') if request.GET: if request.GET.has_key('dtitle'): name = request.GET['dtitle'] A = imagess.objects.filter(title = name) else: a = bucket.list(path='stati/') for i in a: s = i.name.split('/')[-1] ta = imagess.objects.filter(title = s) if (len(ta)!= 0): A.append(ta[0]) return render_to_response('map.html',{'A':A },\ context_instance=RequestContext(request))
def handler(): bucket = Bucket("spider") lines = [] for link in bucket.list(): # title = bucket.stat_object(link.name).metadata['title'] title = link.name href = "http://spidertests-spider.stor.sinaapp.com/%s" % link.name decoded_title = title.decode("utf8") linked_text = decoded_title[:-5] anchor = '<a href="%s">%s</a>' % (href, linked_text) button = '<button id="%s">click</button>' % link.name lines.append("%s%s<br>" % (anchor, button)) content = Markup("\n".join(lines)) return render_template("view.html", content=content)
def get(self): self.set_header('Content-Type', 'text/xml; charset=utf-8') if 'sae' in modules: from sae.storage import Bucket bucket = Bucket('oerp') # response = bucket.list(u'oerp', r'/uploadimg/') self.write('<images>') for result in bucket.list(path=''): # 从新浪云端拿到所有文件并输出其路径 self.write('' % result.name) self.write('</images>') else: import os self.write('<images>') path = os.path.normcase("e:/upload/") #OS.path.normcase方法可一自动把正斜杠转化成反斜杠 for img in os.listdir('e:\upload'): imagepath = path+img self.write('' % imagepath) self.write('</images>')
class SaeStorageSaver: def __init__(self, key): self.bucket = Bucket(key) def StoreTxtFile(self, path, content): self.bucket.put_object(path, content) def StoreBinFile(self, path, content): self.bucket.put_object(path, content) def GetObjectByPath(self, path): return self.bucket.get_object_contents(path) def GetItemUnder(self, path): return [x for x in self.bucket.list(path)] def GetBackupList(self): return self.GetItemUnder(g_backup_path) def DeleteObject(self, obj): self.bucket.delete_object(obj)
def POST(self): bucketId = 't' bucket = Bucket(bucketId) dateId = date.today().strftime("%Y-%m-%d") tupleId = random_str() while True: try: if list(bucket.list(prefix=(dateId+"/"+tupleId))): tupleId = random_str() else: break except sae.storage.Error: pass x = web.input(pic=[]) y = web.input(des=[]) t = web.input(pageTitle={}) data = {} data['title']=t.pageTitle data['des']=[] i = 0 for p, d in zip(x['pic'], y['des']): if len(p)==0: continue bucket.put_object(dateId+"/"+tupleId+"/"+str(i)+'.jpg', p) data['des'].append(d) i = i + 1 if i == 0: return render.index() data['number']=i bucket.put_object(dateId+"/"+tupleId+"/config.txt", pickle.dumps(data)) raise web.seeother("/default/"+bucketId+"/"+dateId+"/"+tupleId)
def POST(self): bucket = Bucket('backup') str_data = web.data() #获得post来的数据 js = json.loads(str_data,"utf-8") if "passwd" in js.keys(): if js["passwd"] != config.passwdDatas: return u"密码错误" else: return u"请求格式不正确" if "type" in js.keys(): if "filename" in js.keys(): stamp = kv.get("kvdatastamp") sha1=hashlib.sha1() sha1.update(stamp) sha1.update(config.keyUserRestore) hashcode=sha1.hexdigest() if not "hashcode" in js.keys(): return u"请求类型不正确" if hashcode != js["hashcode"]: return u"验证失败" if js["type"] == "restore": return ReadZipFile(js["filename"]) elif js["type"] == "getfile": return bucket.generate_url(js["filename"]) else: return u"请求类型不正确" elif js["type"] == "getfilelist": filelist = [] stamp = str(int(time.time())) filelist.append(stamp) kv.set("kvdatastamp",stamp) for finf in bucket.list(): filelist.append(str(finf[u'name'])) return ",".join(filelist) else: return u"请求格式不正确" else: return u"请求格式不正确"
class SAEStorageKVDB(object): ''' A bridge for pyutils kvdb wrapper to access sae storage like a kvdb. e.g. from pyutils import KVDBWrapper from sae_storage import SAEStorageKVDB kvclient = KVDBWrapper(SAEStorageKVDB, bucket='mybucket', accesskey='xxx', secretkey='yyy', account='myapp', prefix='PRE:', ...) kvclient.set(key,value) kvclient.get(key) ''' def __init__(self, **kwargs): bucket = kwargs['bucket'] if 'bucket' in kwargs else '' accesskey = kwargs['accesskey'] if 'accesskey' in kwargs else '' secretkey = kwargs['secretkey'] if 'secretkey' in kwargs else '' account = kwargs['account'] if 'account' in kwargs else '' # app name retries = long( kwargs['retries']) if 'retries' in kwargs else 3 # app name self.prefix = kwargs['prefix'] if 'prefix' in kwargs else '' if accesskey and secretkey and account: conn = Connection(accesskey, secretkey, account, retries) self.kv = conn.get_bucket(bucket) else: self.kv = Bucket(bucket) def info(self): return self.kv.stat() def get(self, key, **kwargs): k = self.prefix + key return self.kv.get_object(k, **kwargs) def set(self, key, value, **kwargs): k = self.prefix + key return self.kv.put_object(k, value, **kwargs) def delete(self, key, **kwargs): k = self.prefix + key return self.kv.delete_object(k, **kwargs) def exist(self, key, **kwargs): k = self.prefix + key rc = False st = self.kv.stat_object(k) if st: rc = True return rc def scan(self, cursor=None, count=100, **kwargs): ''' Retrieve keys by given arguments :param kwargs: 'count' for retrieve count; 'cursor' is the key for next time retrieve :return: ''' return self.kv.c(prefix=self.prefix, marker=cursor, limit=count, **kwargs) def scanv(self, cursor=None, count=100, **kwargs): ''' Retrieve key-values by given arguments :param kwargs: 'count' for retrieve count; 'cursor' is the key for next time retrieve :return: ''' return self.kv.list(prefix=self.prefix, marker=cursor, limit=count, **kwargs) def mget(self, keys, **kwargs): for key in keys: k = self.prefix + key val = self.get(k, **kwargs) yield val
class SaeStorage(Storage): """实现存储在SAE上的Storage """ def __init__(self, bucket_name, path): self.bucket_name = bucket_name self.folder = path self._bucket = Bucket(bucket_name) self.locks = {} def __repr__(self): return "%s(%r)(%r)" % (self.__class__.__name__, self.bucket_name, self.folder) def create(self): return self def destroy(self): # Remove all files self.clean() # REMOVE locks del self.locks def create_file(self, name, **kwargs): def onclose_fn(sfile): self._bucket.put_object(self._fpath(name), sfile.file.getvalue()) f = StructFile(BytesIO(), name=name, onclose=onclose_fn) return f def open_file(self, name, **kwargs): if self._bucket.stat_object(self._fpath(name)) is None: raise NameError(name) content = self._bucket.get_object_contents(self._fpath(name)) def onclose_fn(sfile): self._bucket.put_object(self._fpath(name), sfile.file.getvalue()) return StructFile(BytesIO(content), name=name, onclose=onclose_fn) def _fpath(self, fname): return os.path.join(self.folder, fname) def clean(self): files = self.list() for fname in files: self._bucket.delete_object(self._fpath(fname)) def list(self): file_generate = self._bucket.list(path=self._fpath("")) file_names = [] for f in file_generate: file_names.append(f['name'][len(self.folder) + 1:]) return file_names def file_exists(self, name): return name in self.list() def file_modified(self, name): return self._bucket.stat_object(self._fpath(name))\ .get('last_modified', '') def file_length(self, name): return int(self._bucket.stat_object(self._fpath(name))['bytes']) def delete_file(self, name): self._bucket.delete_object(self._fpath(name)) def rename_file(self, name, newname, safe=False): if name not in self.list(): raise NameError(name) if safe and newname in self.list(): raise NameError("File %r exists" % newname) content = self._bucket.get_object_contents(self._fpath(name)) self._bucket.delete_object(self._fpath(name)) self._bucket.put_object(self._fpath(newname), content) def lock(self, name): if name not in self.locks: self.locks[name] = Lock() return self.locks[name] def temp_storage(self, name=None): name = name or "%s.tmp" % random_name() path = os.path.join(self.folder, name) tempstore = SaeStorage(self.bucket_name, path) return tempstore.create()
class SAEStorage(LocalStorage): def __init__(self, bucket): from sae.storage import Bucket self.bucket = Bucket(bucket) bucket_stat = self.bucket.stat() #self.last_mark = bucket_stat.objects + \ # bucket_stat.bytes self.last_mark = 0 def list(self): articles = self.bucket.list() filter_func = lambda x : self.is_article(x.name) articles = filter(filter_func, articles) articles = self._sorted_files(articles) rst = [] for article in articles: article_name = article.name content = self.bucket.get_object_contents(article_name) content = content.decode('utf-8') art_meta = self._get_metadatas(content) art_meta['filename'] = article_name if type(article.name) == unicode: adjust_name = article_name.encode('utf-8') else : adjust_name = article_name art_meta['filename_url_encode'] = \ base64.urlsafe_b64encode(adjust_name) if not art_meta['date']: art_meta['date'] = article.last_modified if not art_meta["slug"]: art_meta['slug'] = article_name.rpartition(".")[0] art_meta['slug'] = art_meta['slug'].replace("_", " ") rst.append(art_meta) return rst def get(self, article, cut = -1): content = self.bucket.get_object_contents(article) content = content.decode('utf-8') content = unicode(content) mdparse = MDParse() if cut != -1: content = content[:cut] content += "\n....." content = self._clean_metadatas(content) return mdparse.parse(content) def save(self, name, content): self.bucket.put_object(name, content) def delete(self, name): self.bucket.delete_object(name) def update_time(self, article): stat = self.bucket.stat_object(article) tmp = float(stat.timestamp) d = datetime.datetime.fromtimestamp(tmp) return d.strftime("%Y-%m-%d %H:%M:%S") def has_last(self): bucket_stat = self.bucket.stat() curr_mark = bucket_stat.objects + bucket_stat.bytes res = self.last_mark == curr_mark self.last_mark = curr_mark return not res def _sorted_files(self, articles): def key_func(x): stat = self.bucket.stat_object(x.name) return float(stat.timestamp) return sorted(articles, key=key_func, reverse=True)
def beauti(request): r = g = b = s = 0 from sae.storage import Bucket bucket = Bucket("abc") aa = bucket.list(path="meihua/") for ii in aa: bucket.delete_object(ii.name) if request.GET: if request.GET.has_key("btitle"): b = request.GET["btitle"] if request.GET.has_key("ptitle"): g = request.GET["ptitle"] if request.POST: if request.POST.has_key("lvjing"): if request.FILES or request.GET.has_key("btitle"): if request.FILES: f = request.FILES["file"] bucket.put_object("lvjing/" + f.name, f) img = bucket.get_object_contents("lvjing/" + f.name) s = "lv" + str(f) elif request.GET.has_key("btitle"): name = request.GET["btitle"] img = bucket.get_object_contents("stati/" + name) s = "lv" + name im = Image.open(StringIO.StringIO(img)) im.getdata() out = im.split() imgout = StringIO.StringIO() out[0].save(imgout, "jpeg") img_data = imgout.getvalue() bucket.put_object("meihua/" + s, img_data) if request.POST.has_key("suotu"): if request.FILES or request.GET.has_key("btitle"): if request.FILES: f = request.FILES["file"] bucket.put_object("suotu/" + f.name, f) img = bucket.get_object_contents("suotu/" + f.name) s = "su" + str(f) elif request.GET.has_key("btitle"): name = request.GET["btitle"] img = bucket.get_object_contents("stati/" + name) s = "su" + name im = Image.open(StringIO.StringIO(img)) out = im.resize((128, 128)) imgout = StringIO.StringIO() out.save(imgout, "jpeg") img_data = imgout.getvalue() bucket.put_object("meihua/" + s, img_data) if request.POST.has_key("xuanzhuan"): if request.FILES or request.GET.has_key("btitle"): if request.POST["dushu"] != "": dushu = request.POST["dushu"] i = string.atoi(dushu) else: i = 0 if request.FILES: f = request.FILES["file"] bucket.put_object("xuanzhuan/" + f.name, f) img = bucket.get_object_contents("xuanzhuan/" + f.name) s = "xu" + str(f) else: name = request.GET["btitle"] img = bucket.get_object_contents("stati/" + name) s = "xu" + name im = Image.open(StringIO.StringIO(img)) out = im.rotate(i) imgout = StringIO.StringIO() out.save(imgout, "jpeg") img_data = imgout.getvalue() bucket.put_object("meihua/" + s, img_data) if request.POST.has_key("huidu1"): if request.FILES or request.GET.has_key("btitle"): if request.FILES: f = request.FILES["file"] bucket.put_object("huidu1/" + f.name, f) img = bucket.get_object_contents("huidu1/" + f.name) s = "h1" + str(f) else: name = request.GET["btitle"] img = bucket.get_object_contents("stati/" + name) s = "h1" + name im = Image.open(StringIO.StringIO(img)) im.getdata() out = im.split() if len(out) > 0: imgout = StringIO.StringIO() out[0].save(imgout, "jpeg") else: im.save(imgout, "jpeg") img_data = imgout.getvalue() bucket.put_object("meihua/" + s, img_data) if request.POST.has_key("huidu2"): if request.FILES or request.GET.has_key("btitle"): if request.FILES: f = request.FILES["file"] bucket.put_object("huidu2/" + f.name, f) img = bucket.get_object_contents("huidu2/" + f.name) s = "h2" + str(f) elif request.GET.has_key("btitle"): name = request.GET["btitle"] img = bucket.get_object_contents("stati/" + name) s = "h2" + name im = Image.open(StringIO.StringIO(img)) im.getdata() out = im.split() imgout = StringIO.StringIO() if len(out) > 1: out[1].save(imgout, "jpeg") else: im.save(imgout, "jpeg") img_data = imgout.getvalue() bucket.put_object("meihua/" + s, img_data) if request.POST.has_key("huidu3"): if request.FILES or request.GET.has_key("btitle"): if request.FILES: f = request.FILES["file"] bucket.put_object("huidu3/" + f.name, f) img = bucket.get_object_contents("huidu3/" + f.name) s = "h3" + str(f) elif request.GET.has_key("btitle"): name = request.GET["btitle"] img = bucket.get_object_contents("stati/" + name) s = "h3" + name im = Image.open(StringIO.StringIO(img)) im.getdata() out = im.split() imgout = StringIO.StringIO() if len(out) > 2: out[2].save(imgout, "jpeg") else: im.save(imgout, "jpeg") img_data = imgout.getvalue() bucket.put_object("meihua/" + s, img_data) return render_to_response("pilbeau.html", {"r": s, "s": b, "g": g}, context_instance=RequestContext(request))
def manage(request): from sae.storage import Bucket bucket = Bucket('abc') sa = t.objects.all() for x in sa: x.delete() if request.GET: #delete if request.GET.has_key('ctitle'): name = request.GET["ctitle"] bucket.delete_object('manage/'+name) bucket.delete_object('stati/'+name) ta = imagess.objects.filter(title = name) if (len(ta)!= 0): for i in ta: i.delete() if request.GET.has_key('stitle'): #save beautify name = request.GET["stitle"] if (name != '0'): new_name = name[2:] new_comment = '..' new_mood = '..' ta = imagess.objects.filter(title = new_name) if (len(ta)!= 0): new_comment = ta[0].comment new_mood = ta[0].mood new_lat = ta[0].lat new_lon = ta[0].lon new_photo = imagess(picture = 0, comment = new_comment, mood = new_mood, \ title = name, lat = new_lat, lon = new_lon) new_photo.save() obj = bucket.get_object_contents('meihua/'+name) im = Image.open(StringIO.StringIO(obj)) imgout = StringIO.StringIO() im.save(imgout,"jpeg") img_data = imgout.getvalue() bucket.put_object('stati/'+name, img_data) im = Image.open(StringIO.StringIO(obj)) out = im.resize((128, 128)) imgout = StringIO.StringIO() out.save(imgout,"jpeg") img_data = imgout.getvalue() bucket.put_object('manage/'+name, img_data) #陈列部分 A = [] a = bucket.list(path='manage/') for i in a: dic = [] s = i.name.split('/')[-1] dic.append(s) dic.append(i.last_modified) ta = imagess.objects.filter(title = s) if (len(ta)!= 0): dic.append(ta[0].mood) dic.append(ta[0].comment) A.append(dic) if request.GET: if request.GET.has_key('search'):#search if request.GET['writesearch'] != '': A=[] wcomment = request.GET['writesearch'] result = imagess.objects.filter(comment = wcomment) for i in range(0, len(result)): a = bucket.stat_object('manage/'+result[i].title) dic = [] dic.append(result[i].title) dic.append(a.last_modified) dic.append(result[i].mood) dic.append(result[i].comment) A.append(dic) return render_to_response('manage.html',{'A':A },\ context_instance=RequestContext(request))
def sae_bucket_list(self): ''' easy to debug sae storage,just ''' bk = Bucket(self.bucket_name) return bk.list()
class SaeStorage(Storage): """实现存储在SAE上的Storage """ def __init__(self, bucket_name, path): self.bucket_name = bucket_name self.folder = path self._bucket = Bucket(bucket_name) self.locks = {} def __repr__(self): return "%s(%r)(%r)" % (self.__class__.__name__, self.bucket_name, self.folder) def create(self): return self def destroy(self): # Remove all files self.clean() # REMOVE locks del self.locks def create_file(self, name, **kwargs): def onclose_fn(sfile): self._bucket.put_object(self._fpath(name), sfile.file.getvalue()) f = StructFile(BytesIO(), name=name, onclose=onclose_fn) return f def open_file(self, name, **kwargs): if self._bucket.stat_object(self._fpath(name)) is None: raise NameError(name) content = self._bucket.get_object_contents(self._fpath(name)) def onclose_fn(sfile): new_content = sfile.file.getvalue() if new_content != content: self._bucket.put_object(self._fpath(name), new_content) return StructFile(BytesIO(content), name=name, onclose=onclose_fn) def _fpath(self, fname): return os.path.join(self.folder, fname) def clean(self): files = self.list() for fname in files: self._bucket.delete_object(self._fpath(fname)) def list(self): file_generate = self._bucket.list(path=self._fpath("")) file_names = [] for f in file_generate: file_names.append(f['name'][len(self.folder)+1:]) return file_names def file_exists(self, name): return name in self.list() def file_modified(self, name): return self._bucket.stat_object(self._fpath(name))\ .get('last_modified', '') def file_length(self, name): return int(self._bucket.stat_object(self._fpath(name))['bytes']) def delete_file(self, name): self._bucket.delete_object(self._fpath(name)) def rename_file(self, name, newname, safe=False): name_list = self.list() if name not in name_list: raise NameError(name) if safe and newname in name_list: raise NameError("File %r exists" % newname) content = self._bucket.get_object_contents(self._fpath(name)) self._bucket.delete_object(self._fpath(name)) self._bucket.put_object(self._fpath(newname), content) def lock(self, name): if name not in self.locks: self.locks[name] = Lock() return self.locks[name] def temp_storage(self, name=None): temp_store = RamStorage() return temp_store.create()