def test_rawtar(self): """Create a normal tar archive and restore it""" raw = BytesIO() tarfile = TarFile(mode='w', fileobj=raw) testdata = rand_str(20) * 5000 inf = TarInfo("simpletar") fileraw = BytesIO() fileraw.write(testdata) inf.size = len(testdata) fileraw.seek(0) tarfile.addfile(inf, fileobj=fileraw) tarfile.close() raw.seek(0) data = raw.read() cnt = rand_str(20) ret = requests.put(self.make_uri("restore", container=cnt), data=data) self.assertEqual(ret.status_code, 201) meta, stream = self.conn.object_fetch(self.account, cnt, "simpletar") self.assertEqual( md5("".join(stream)).hexdigest(), md5(testdata).hexdigest())
def command_create(working_dir: str, project_name, *argv): print('working-dir:' + working_dir) pkg_root = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) project_path = os.path.abspath(os.path.join(working_dir, project_name)) print('Creating restfx project "%s"' % project_name) if os.path.exists(project_path): print('[ERROR] Project path "%s" exists.' % project_path) sys.exit(1) # 示例文件 sample_file = os.path.abspath( os.path.join(pkg_root, 'internal_assets', 'sample.tar.gz')) print('Creating project structure') import tarfile tarfile = tarfile.open(sample_file) tarfile.extractall(project_path) tarfile.close() command_genid(project_path) print("""Created ! It is time to have fun with restfx. """.format(project_name=project_name))
def createArchive(self, cfgOutName, name='', mode='w:gz'): """ create the archive to upload """ if not name: import uuid name = os.path.join(os.getcwd(), str(uuid.uuid4()) + 'default.tgz') import tarfile print 'opening tar file' tarfile = tarfile.open(name=name, mode=mode, dereference=True) print 'adding %s to the tarball' % cfgOutName tarfile.add(cfgOutName, arcname='PSet.py') #checkSum print 'calculating the checksum' lsl = [(x.name, int(x.size), int(x.mtime), x.uname) for x in tarfile.getmembers()] # hasher = hashlib.md5(str(lsl)) hasher = hashlib.sha256(str(lsl)) checksum = hasher.hexdigest() #end tarfile.close() return name, checksum
def w2p_unpack(filename, path, delete_tar=True): if filename=='welcome.w2p' and ( not os.path.exists('welcome.w2p') or \ os.path.exists('NEWINSTALL')): try: w2p_pack('welcome.w2p', 'applications/welcome') os.unlink('NEWINSTALL') except: msg = "New installation: unable to create welcome.w2p file" sys.stderr.write(msg) filename = abspath(filename) path = abspath(path) if filename[-4:] == '.w2p' or filename[-3:] == '.gz': if filename[-4:] == '.w2p': tarname = filename[:-4] + '.tar' else: tarname = filename[:-3] + '.tar' fgzipped = gzopen(filename, 'rb') tarfile = open(tarname, 'wb') tarfile.write(fgzipped.read()) tarfile.close() fgzipped.close() else: tarname = filename untar(tarname, path) if delete_tar: os.unlink(tarname)
def createArchive(self, cfgOutName , name = '', mode = 'w:gz'): """ create the archive to upload """ if not name: import uuid name = os.path.join(os.getcwd(), str(uuid.uuid4()) +'default.tgz') import tarfile print 'opening tar file' tarfile = tarfile.open(name=name , mode=mode, dereference=True) print 'adding %s to the tarball' % cfgOutName tarfile.add(cfgOutName, arcname='PSet.py') #checkSum print 'calculating the checksum' lsl = [(x.name, int(x.size), int(x.mtime), x.uname) for x in tarfile.getmembers()] # hasher = hashlib.md5(str(lsl)) hasher = hashlib.sha256(str(lsl)) checksum = hasher.hexdigest() #end tarfile.close() return name, checksum
def w2p_unpack(filename, path, delete_tar=True): if filename[-4:] == '.w2p' or filename[-3:] == '.gz': if filename[-4:] == '.w2p': tarname = filename[:-4] + '.tar' else: tarname = filename[:-3] + '.tar' fgzipped = gzopen(filename, 'rb') tarfile = open(tarname, 'wb') tarfile.write(fgzipped.read()) tarfile.close() fgzipped.close() else: tarname = filename untar(tarname, path) if delete_tar: os.unlink(tarname)
def bk_now(self, tables=None): ''' 使用逻辑备份导出指定用户的数据表,参数tables为表名元组,默认为空时导出整个用户空间。 ''' timestamp = str( time.strftime('%Y-%m-%d-%H_%M_%S', time.localtime(time.time()))) if utils.is_windows(): bak_dir = self.bkdir + '\\' + timestamp dmp_file = bak_dir + '\\' + timestamp + '_exp.dmp' log_file = bak_dir + '\\' + timestamp + '_exp.log' options = self.conf['seruser'] + '/' + self.conf['serpass'] + ' buffer=64000 file=' \ + dmp_file + ' log=' + log_file if tables is not None: options += ' tables=' + tables else: options += ' owner=' + self.conf['seruser'] os.mkdir(bak_dir) if os.system('exp ' + options) == 0: zip_file = bak_dir + '\\' + timestamp + '_exp.zip' myzipfile = zipfile.ZipFile(zip_file, 'w') myzipfile.write(dmp_file) myzipfile.close() return True, zip_file else: print 'backup oprations failed!' return False, None elif utils.is_linux(): bak_dir = self.bkdir + '/' + timestamp dmp_file = bak_dir + '/' + timestamp + '_exp.dmp' log_file = bak_dir + '/' + timestamp + '_exp.log' options = self.conf['seruser'] + '/' + self.conf['serpass'] + ' buffer=64000 file=' \ + dmp_file + ' log=' + log_file if tables is not None: options += ' tables=' + tables else: options += ' owner=' + self.conf['seruser'] os.mkdir(bak_dir) if os.system('exp ' + options) == 0: tar_file = bak_dir + '/' + timestamp + '_exp.tar.gz' tarfile = tarfile.open(tar_file, 'w:gz') tarfile.add(dmp_file) tarfile.close() return True, tar_file else: print 'Unkown System Type!' sys.exit(1)
def bk_now(self, tables=None): ''' 使用逻辑备份导出指定用户的数据表,参数tables为表名元组,默认为空时导出整个用户空间。 ''' timestamp = str(time.strftime('%Y-%m-%d-%H_%M_%S', time.localtime(time.time()))) if utils.is_windows(): bak_dir = self.bkdir + '\\' + timestamp dmp_file = bak_dir + '\\' + timestamp + '_exp.dmp' log_file = bak_dir + '\\' + timestamp + '_exp.log' options = self.conf['seruser'] + '/' + self.conf['serpass'] + ' buffer=64000 file=' \ + dmp_file + ' log=' + log_file if tables is not None: options += ' tables=' + tables else: options += ' owner=' + self.conf['seruser'] os.mkdir(bak_dir) if os.system('exp ' + options) == 0: zip_file = bak_dir + '\\' + timestamp + '_exp.zip' myzipfile = zipfile.ZipFile(zip_file, 'w') myzipfile.write(dmp_file) myzipfile.close() return True, zip_file else: print 'backup oprations failed!' return False, None elif utils.is_linux(): bak_dir = self.bkdir + '/' + timestamp dmp_file = bak_dir + '/' + timestamp + '_exp.dmp' log_file = bak_dir + '/' + timestamp + '_exp.log' options = self.conf['seruser'] + '/' + self.conf['serpass'] + ' buffer=64000 file=' \ + dmp_file + ' log=' + log_file if tables is not None: options += ' tables=' + tables else: options += ' owner=' + self.conf['seruser'] os.mkdir(bak_dir) if os.system('exp ' + options) == 0: tar_file = bak_dir + '/' + timestamp + '_exp.tar.gz' tarfile = tarfile.open(tar_file, 'w:gz') tarfile.add(dmp_file) tarfile.close() return True, tar_file else: print 'Unkown System Type!' sys.exit(1)
def glob_bak(self): timestamp = str( time.strftime('%Y-%m-%d-%H_%M_%S', time.localtime(time.time()))) if utils.is_windows(): dump_dir = self.bkdir + '\\' + timestamp dump_file = dump_dir + '\\' + timestamp + '_glob.sql' zip_file_path = dump_dir + '\\' + timestamp + '_glob.zip' log_file = dump_dir + '\\' + timestamp + '_glob.log' options = '-h' + self.conf['serip'] + ' -u' + self.conf['seruser'] \ + ' -p' + self.conf['serpass'] + ' --all-databases --flush-logs --master-data=2' os.mkdir(dump_dir) os.system('purge master logs') if os.system('mysqldump ' + options + ' > ' + dump_file) == 0: myzipfile = zipfile.ZipFile(zip_file_path, 'w') myzipfile.write(dump_file) myzipfile.close() os.system('echo "DataBase Backup Success!" >> ' + log_file) return True, zip_file_path else: os.system('echo "DataBase Backup Failed! >> ' + log_file) return False, None elif utils.is_linux(): dump_dir = self.bkdir + '/' + timestamp dump_file = dump_dir + '/' + timestamp + '_glob.sql' tar_file = dump_dir + '/' + timestamp + '_glob.tar.gz' log_file = dump_dir + '/' + timestamp + '_glob.log' options = '-h' + self.conf['serip'] + ' -u' + self.conf['seruser'] \ + ' -p' + self.conf['serpass'] + ' --all-databases --flush-logs --master-data=2' os.mkdir(dump_dir) os.system('purge master logs') if os.system('mysqldump ' + options + ' > ' + dump_file) == 0: tarfile = tarfile.open(tar_file, 'w:gz') tarfile.add(dump_file) tarfile.close() os.system('echo "DataBase Backup Success!" >> ' + log_file) return True, tar_file else: os.system('echo "DataBase Backup Failed! >> ' + log_file) return False, None else: print 'Unkown System Type!' sys.exit(1)
def incr_bak(self): timestamp = str( time.strftime('%Y-%m-%d-%H_%M_%S', time.localtime(time.time()))) if utils.is_windows(): dump_dir = self.bkdir + '\\' + timestamp zip_file_path = dump_dir + '\\' + timestamp + '_incr.zip' log_file = dump_dir + '\\' + timestamp + '_incr.log' options = '-h' + self.conf['serip'] + ' -u' + self.conf['seruser'] \ + ' -p' + self.conf['serpass'] + ' flush-logs' os.mkdir(dump_dir) if os.system('mysqladmin ' + options) == 0: logbin_file = self._get_logbin_last() print logbin_file myzipfile = zipfile.ZipFile(zip_file_path, 'w') myzipfile.write(logbin_file) myzipfile.close() os.system('echo "DataBase Backup Success!" >> ' + log_file) return True, zip_file_path else: os.system('echo "DataBase Backup Failed! >> ' + log_file) return False, None elif utils.is_linux(): dump_dir = self.bkdir + '/' + timestamp tar_file = dump_dir + '/' + timestamp + '_incr.tar.gz' log_file = dump_dir + '/' + timestamp + '_incr.log' options = '-h' + self.conf['serip'] + ' -u' + self.conf['seruser'] \ + ' -p' + self.conf['serpass'] + ' flush-logs' os.mkdir(dump_dir) if os.system('mysqladmin ' + options) == 0: logbin_file = self._get_logbin_last() tarfile = tarfile.open(tar_file, 'w:gz') tarfile.add(logbin_file) tarfile.close() os.system('echo "DataBase Backup Success!" >> ' + log_file) return True, tar_file else: os.system('echo "DataBase Backup Failed! >> ' + log_file) return False, None else: print 'Unkown System Type!' sys.exit(1) pass
def incr_bak(self): timestamp = str(time.strftime('%Y-%m-%d-%H_%M_%S', time.localtime(time.time()))) if utils.is_windows(): dump_dir = self.bkdir + '\\' + timestamp zip_file_path = dump_dir + '\\' + timestamp + '_incr.zip' log_file = dump_dir + '\\' + timestamp + '_incr.log' options = '-h' + self.conf['serip'] + ' -u' + self.conf['seruser'] \ + ' -p' + self.conf['serpass'] + ' flush-logs' os.mkdir(dump_dir) if os.system('mysqladmin ' + options) == 0: logbin_file = self._get_logbin_last() print logbin_file myzipfile = zipfile.ZipFile(zip_file_path, 'w') myzipfile.write(logbin_file) myzipfile.close() os.system('echo "DataBase Backup Success!" >> ' + log_file) return True, zip_file_path else: os.system('echo "DataBase Backup Failed! >> ' + log_file) return False, None elif utils.is_linux(): dump_dir = self.bkdir + '/' + timestamp tar_file = dump_dir + '/' + timestamp + '_incr.tar.gz' log_file = dump_dir + '/' + timestamp + '_incr.log' options = '-h' + self.conf['serip'] + ' -u' + self.conf['seruser'] \ + ' -p' + self.conf['serpass'] + ' flush-logs' os.mkdir(dump_dir) if os.system('mysqladmin ' + options) == 0: logbin_file = self._get_logbin_last() tarfile = tarfile.open(tar_file, 'w:gz') tarfile.add(logbin_file) tarfile.close() os.system('echo "DataBase Backup Success!" >> ' + log_file) return True, tar_file else: os.system('echo "DataBase Backup Failed! >> ' + log_file) return False, None else: print 'Unkown System Type!' sys.exit(1) pass
def glob_bak(self): timestamp = str(time.strftime('%Y-%m-%d-%H_%M_%S', time.localtime(time.time()))) if utils.is_windows(): dump_dir = self.bkdir + '\\' + timestamp dump_file = dump_dir + '\\' + timestamp + '_glob.sql' zip_file_path = dump_dir + '\\' + timestamp + '_glob.zip' log_file = dump_dir + '\\' + timestamp + '_glob.log' options = '-h' + self.conf['serip'] + ' -u' + self.conf['seruser'] \ + ' -p' + self.conf['serpass'] + ' --all-databases --flush-logs --master-data=2' os.mkdir(dump_dir) os.system('purge master logs') if os.system('mysqldump ' + options + ' > ' + dump_file) == 0: myzipfile = zipfile.ZipFile(zip_file_path, 'w') myzipfile.write(dump_file) myzipfile.close() os.system('echo "DataBase Backup Success!" >> ' + log_file) return True, zip_file_path else: os.system('echo "DataBase Backup Failed! >> ' + log_file) return False, None elif utils.is_linux(): dump_dir = self.bkdir + '/' + timestamp dump_file = dump_dir + '/' + timestamp + '_glob.sql' tar_file = dump_dir + '/' + timestamp + '_glob.tar.gz' log_file = dump_dir + '/' + timestamp + '_glob.log' options = '-h' + self.conf['serip'] + ' -u' + self.conf['seruser'] \ + ' -p' + self.conf['serpass'] + ' --all-databases --flush-logs --master-data=2' os.mkdir(dump_dir) os.system('purge master logs') if os.system('mysqldump ' + options + ' > ' + dump_file) == 0: tarfile = tarfile.open(tar_file, 'w:gz') tarfile.add(dump_file) tarfile.close() os.system('echo "DataBase Backup Success!" >> ' + log_file) return True, tar_file else: os.system('echo "DataBase Backup Failed! >> ' + log_file) return False, None else: print 'Unkown System Type!' sys.exit(1)
def w2p_unpack(filename, path, delete_tar=True): if filename == "welcome.w2p": create_welcome_w2p() filename = abspath(filename) path = abspath(path) if filename[-4:] == ".w2p" or filename[-3:] == ".gz": if filename[-4:] == ".w2p": tarname = filename[:-4] + ".tar" else: tarname = filename[:-3] + ".tar" fgzipped = gzopen(filename, "rb") tarfile = open(tarname, "wb") tarfile.write(fgzipped.read()) tarfile.close() fgzipped.close() else: tarname = filename untar(tarname, path) if delete_tar: os.unlink(tarname)
def check_for_gzip(tfile): """ Was that tarball also gzipped? Let's find out! @param: file (string): the name of the object (so we can gunzip, if that's necessary) @output: a gunzipped file in the directory of choice, if that's necessary @return new_file (string): The name of the file after gunzipping or the original name of the file if that wasn't necessary """ gzip_contains = 'gzip compressed data' dummy1, cmd_out, dummy2 = run_shell_command('file %s', (tfile,)) if cmd_out.find(gzip_contains) > -1: # we have a gzip! # so gzip is retarded and won't accept any file that doesn't end # with .gz. sad. run_shell_command('cp %s %s', (tfile, tfile + '.tar.gz')) new_dest = os.path.join(os.path.split(tfile)[0], 'tmp.tar') run_shell_command('touch %s', (new_dest,)) dummy1, cmd_out, cmd_err = run_shell_command('gunzip -c %s', (tfile + '.tar.gz',)) if cmd_err != '': write_message('Error while gunzipping ' + tfile) return tfile tarfile = open(new_dest, 'w') tarfile.write(cmd_out) tarfile.close() run_shell_command('rm %s', (tfile + '.tar.gz',)) return new_dest return tfile
def create_metafile(self, datafile): info = collections.defaultdict(lambda: "") # Extract datafile in temporary directory and scan for dependencies. tmpdir = self.mktemp(directory=True) if self.payload_compression == "xz": tarfile = tar.InnerTarFileXz.open(datafile) else: tarfile = tar.InnerTarFile.open(datafile) tarfile.extractall(path=tmpdir) tarfile.close() # Run the dependency tracker. self.pkg.track_dependencies(self.builder, tmpdir) # Generic package information including Pakfire information. info.update({ "pakfire_version" : PAKFIRE_VERSION, "uuid" : self.pkg.uuid, "type" : "binary", }) # Include distribution information. info.update(self.pakfire.distro.info) info.update(self.pkg.info) # Update package information for string formatting. info.update({ "groups" : " ".join(self.pkg.groups), "prerequires" : "\n".join([PACKAGE_INFO_DEPENDENCY_LINE % d \ for d in self.pkg.prerequires]), "requires" : "\n".join([PACKAGE_INFO_DEPENDENCY_LINE % d \ for d in self.pkg.requires]), "provides" : "\n".join([PACKAGE_INFO_DEPENDENCY_LINE % d \ for d in self.pkg.provides]), "conflicts" : "\n".join([PACKAGE_INFO_DEPENDENCY_LINE % d \ for d in self.pkg.conflicts]), "obsoletes" : "\n".join([PACKAGE_INFO_DEPENDENCY_LINE % d \ for d in self.pkg.obsoletes]), "recommends" : "\n".join([PACKAGE_INFO_DEPENDENCY_LINE % d \ for d in self.pkg.recommends]), "suggests" : "\n".join([PACKAGE_INFO_DEPENDENCY_LINE % d \ for d in self.pkg.suggests]), }) # Format description. description = [PACKAGE_INFO_DESCRIPTION_LINE % l \ for l in util.text_wrap(self.pkg.description, length=80)] info["description"] = "\n".join(description) # Build information. info.update({ # Package it built right now. "build_time" : int(time.time()), "build_id" : uuid.uuid4(), }) # Installed size (equals size of the uncompressed tarball). info.update({ "inst_size" : self.getsize(datafile), }) metafile = self.mktemp() f = open(metafile, "w") f.write(PACKAGE_INFO % info) f.close() return metafile
from email.mime.text import MIMEText from email.mime.multipart import MIMEMultipart content = MIMEMultipart() #建立MIMEMultipart物件 content["subject"] = "Mail title" #郵件標題 content["from"] = "*****@*****.**" #寄件者 content["to"] = "*****@*****.**" #收件者 # 壓縮log檔案 tar = tar.open("file name", "w:gz") for root, dir, files in os.walk("/var/log"): for file in files: if "secure" in file: fullpath = os.path.join(root, file) tar.add(fullpath) tar.close() # 文字內容 content.attach(MIMEText("hdfs-overcomer.ddns.net secure log files.")) #郵件內容 # 附加檔案 att = MIMEText(open('compress file name', 'rb').read(), 'base64', 'utf-8') att["Content-Type"] = 'application/octet-stream' att["Content-Disposition"] = 'attachment; filename="compress file name"' content.attach(att) #郵件內容 import smtplib with smtplib.SMTP(host="smtp.gmail.com", port="587") as smtp: # 設定SMTP伺服器 try:
def uncompress_tgz(self, path): tarfile = tarfiles.open(path) tarfile.extractall(self.path) tarfile.close()
songname = basename if trackno.match(songname): songname = trackno.sub('', songname) albumdir = os.path.basename(dirpath) artistdir = os.path.basename(os.path.dirname(dirpath)) key = (artistdir, albumdir, songname, dirpath) value = os.path.join(dirpath, filename) found[key].append((index, value)) count += 1 artists = sorted(set([x[0] for x in found])) albums = sorted(set([x[:2] for x in found])) # re-do everything by artist/album by_artist_album = collections.defaultdict(dict) for artist, album in albums: for key in found: if key[0] == artist and key[1] == album: by_artist_album[(artist, album)][key[2]] = found[key] for (artist, album), songs in by_artist_album.items(): # figure the albumdir albumdir = os.path.dirname(songs[songs.keys()[0]][0][1]) archive_path = os.path.join(albumdir, '@'.join([re.sub('[^-A-Za-z0-9_]', '_', artist), re.sub('[^-A-Za-z0-9_]', '_', album)]) + ".tar.bz2") tarfile = tarfile.open(archive_path, mode='w:bz2') for songname, locations in songs.items(): for idx, location in locations: tarfile.add(location, arcname=songname+".mp3") rmfile.write("%s\n" % (location,)) tarfile.close() print archive_path