def hunt_and_explode(self): ''' Used to find all type-5 package data in the repository and untar the files properly. ''' base_path = make_path(get_spkg_path(), "repos") output = "" if sys.platform == "cli": tfn = "c:/spkg/tmp/tarballs.txt" cmd = 'bash -c "find %s -name \"*.tar.gz\"" > %s' % (base_path, tfn) #Logger.info("CMD: (%s)" % cmd) os.system(cmd) output = open(tfn).read() else: _status, output = gso('find %s -name "*.tar.gz"' % base_path) start_dir = get_slash_cwd() for full_tar_file_name in output.split('\n'): tmp_list = full_tar_file_name.split('/') tar_file_name = tmp_list[-1] base_name = tar_file_name.split('.tar.gz')[0] tar_file_dir = make_path(tmp_list[:-1] + [base_name]) if not os.path.isdir(tar_file_dir): #Logger.info("Exploding %s..." % base_name) cmd = 'bash -c "mkdir -p %s"' % tar_file_dir #Logger.info("tar_file_dir: %s" % tar_file_dir) status = os.system(cmd) if status == OK: cmd = 'bash -c "cd %s && tar -mxzf ../%s"' cmd = cmd % (tar_file_dir, tar_file_name) #Logger.info("Cmd: %s" % cmd) if os.system(cmd) != OK: msg = "could not untar %s" % (tar_file_name) raise Exceptions.BadPackage(full_tar_file_name, msg) os.chdir(start_dir)
def start(): cwd = get_slash_cwd() INSTANCE_WORK_DIR = make_path(cwd, 'spkg', "TEST_INSTANCE") status_file = make_path(INSTANCE_WORK_DIR, "status.yml") spkg_work_dir = make_path(cwd, 'spkg') config_data = {"spkg_path": spkg_work_dir} os.system('bash -c "mkdir -p %s"' % INSTANCE_WORK_DIR) os.system('bash -c "mkdir %s/tmp"' % INSTANCE_WORK_DIR) #print "Copying repository files..." os.system('bash -c "cp -ax repos/ %s"' % spkg_work_dir) if not os.path.isdir(PKG_DIR): #print "Making %s..." % PKG_DIR os.system('bash -c "mkdir -p %s"' % PKG_DIR) #print "Copying package files..." os.system('bash -c "cd repos/type4 && tar -czmf TestPackage-7.spkg TestPackage-7"') os.system('bash -c "cp repos/type4/*.spkg %s"' % PKG_DIR) #print "Creating %s" % status_file open(status_file, 'w').write("{}") #print "Creating %s" % CONFIG_FILE current_config = get_current_config() if current_config: if "spkg_path" in current_config: current_config["old_spkg_path"] = current_config["spkg_path"] current_config["spkg_path"] = spkg_work_dir else: current_config = config_data config_fp = open(CONFIG_FILE, 'w') config_fp.write(yaml_dump(current_config)) config_fp.flush() config_fp.close()
def make_symlinks(self, pkg_path, info_dict, full_name): ''' Create symlinks from the repository into where the package expects to run pkg_path -- directory where the versioned package will reside info_dict -- information regarding injectors and libraries full_name -- name of the package, including version info ''' base_path = make_path(get_spkg_path(), "repos") for component_type in info_dict: #Logger.info("Component: %s" % component_type) #Logger.info("info_dict: %s" % info_dict) component_dict = info_dict[component_type] for component_name in component_dict: full_path = component_dict[component_name]["path"] full_name = full_path.split('/')[-1] full_name = full_name.split('.tar.gz')[0] src = make_path(base_path, component_type, full_name) dst = make_path(pkg_path, component_type, component_name) if os.path.islink(dst): continue if not os.path.isdir(src): msg = "Package component (%s) does not exist." % src raise Exceptions.BadPackage(full_name, msg) if sys.platform == "cli": cmd = 'bash -c "cp -ax %s %s"' % (src, dst) else: cmd = 'bash -c "ln -s %s %s"' % (src, dst) #Logger.info("CMD: (%s)" % cmd) if os.system(cmd) != OK: msg = "Could not create symlink (%s)" % cmd raise Exceptions.BadPackage(full_name, msg)
def instance_setup(instance_name): "When Bombardier is first getting set up, this method is called." progress_path = get_progress_path(instance_name) status_dict = None if os.path.isfile(progress_path): try: status_dict = json.loads(open(progress_path).read()) except: msg = "Unable to load existing json from %s" % progress_path Logger.warning(msg) if type(status_dict) != type({}): status_dict = {"install-progress": {}} status_dict["client_version"] = CLIENT_VERSION status_dict["core_version"] = CORE_VERSION status_dict["clientVersion"] = CLIENT_VERSION status_dict["coreVersion"] = CORE_VERSION pkg_dir = make_path(get_spkg_path(), instance_name, "packages") repos_dir = make_path(get_spkg_path(), "repos") tmp_dir = make_path(get_spkg_path(), "tmp") if not os.path.isdir(pkg_dir): os.makedirs(pkg_dir) if not os.path.isdir(repos_dir): os.makedirs(repos_dir) if not os.path.isdir(tmp_dir): os.makedirs(tmp_dir) open(progress_path, 'w').write(json.dumps(status_dict))
def _get_lib_path(self): ''' Need to modify our path and then clean it out again. This gets the data that both operations will need. ''' package_path = make_path(get_spkg_path(), self.instance_name, "packages", self.full_name) lib_path = make_path(package_path, "libs") return lib_path
def setUp(self): self.test_dir = '_manifest_test' self.manifest_file = make_path( self.test_dir, 'manifest/manifest.yml' ) self.create_directory_structure() extension_list = ['exe', 'aspx'] self.file_manifest = FileManifest(self.test_dir, ['foo', 'bar'], self.manifest_file, extension_list ) self.expected_manifest_dict = {'foo': {'file.exe': 'b66b5b56809078def934c04cda9e791f'}, 'bar': {'file.txt': '', 'bar2/file.aspx': 'dbf1f2836e1b325fcfdfa6fca6aee3c1'}} self.mapping_dictionary = { 'bar': make_path( self.test_dir, 'bar'), 'foo': make_path( self.test_dir, 'foo') }
def test_uninstall_error_script(self): self.repository.pkg_data = {"pkg1": {"install": {"fullName": "TestBadUninstall"}, "package-version": 4}} base = make_path(get_slash_cwd(), "packages", "TestBadUninstall") package = PackageV4("pkg1", self.repository, self.config, INSTANCE) package.initialize() package.uninstall() assert package.status == FAIL, "Uninstallation of a package that returns an error succeeded"
def test_uninstall_ok_package(self): self.repository.pkg_data = {"pkg1": {"install": {"fullName": "TestPackage-7"}, "package-version": 4}} base = make_path(get_slash_cwd(), "packages", "TestPackage-7") package = PackageV4("pkg1", self.repository, self.config, INSTANCE) package.initialize() package.uninstall() assert package.status == OK, "Legitimate package uninstallation failed"
def test_verify_bad_package(self): self.repository.pkg_data = {"pkg1": {"install": {"fullName": "TestBadVerify"}, "package-version": 4}} base = make_path(get_slash_cwd(), "packages", "TestBadVerify") package = PackageV4("pkg1", self.repository, self.config, INSTANCE) package.initialize() package.action = VERIFY package.verify() assert package.status == FAIL, "Verification of a package that returns an error succeeded"
def create_manifest(self): #old_md5_data = get_manifest_data(self.file_name, self.logger) old_md5_data = {} md5_dict = self.dump_md5_sums() md5_dict.update(old_md5_data) dump_string = yaml_dump(md5_dict) manifest_path = make_path(get_base_path(self.source_file), MANIFEST_FILE) open(manifest_path, 'w').write(dump_string) self.logger.info("Writing to %s..." % (manifest_path)) return md5_dict
def use_pkg(self, pkn, action, script_name, arguments): """ Main entry point to the class. Performs an action using a package pkn -- name of the package to use action -- STATUS, INSTALL, UNINSTALL, CONFIGURE, VERIFY, or EXEC script_name -- the name of a method to run within the package arguments -- arguments to the executable, typically a restore target """ try: pkg = self._get_new_pkg(pkn) if pkg.status == FAIL: self.operation_status = FAIL return self._cleanup() if action == INSTALL: pdat = self.progress.get_progress_data(False) installed_pkns, broken_pkns = Progress.get_installed(pdat) if pkn in [installed_pkns + broken_pkns]: Logger.error("Package %s cannot be installed." % pkn) self.operation_status = FAIL return FAIL add_pkd = {pkn: pkg} status = self._install_pkgs(add_pkd) if action == UNINSTALL: pdat = self.progress.get_progress_data(False) installed_pkns, broken_pkns = Progress.get_installed(pdat) bom_pkns = installed_pkns if pkn in bom_pkns: bom_pkns.remove(pkn) self.config.set_bom_pkgs(bom_pkns) add_pkd, del_pkd, uninstall_order = self._ck_inst_stat([]) status = self._uninstall_pkgs(del_pkd, uninstall_order) if action == VERIFY: status = pkg.verify() if action == CONFIGURE: self._check_configuration(pkg) status = pkg.configure() hash_path = make_path(pkg.get_path(), HASH_FILE) msg = "Writing configuration fingerprint to %s" % hash_path Logger.info(msg) self.config.save_hash(hash_path) if action in [EXECUTE, BACKUP, RESTORE]: status = pkg.execute_maint_script(script_name, arguments) if status == FAIL: self.operation_status = FAIL msg = "Finished %s for %s." msg = msg % (ACTION_REVERSE_LOOKUP[action], pkn) self.operation_output.append(msg) status = self._cleanup() return self._cleanup() except Exceptions.BadPackage, err: errmsg = "Cannot perform action %s on package %s: %s" errmsg = errmsg % (ACTION_REVERSE_LOOKUP[action], err.pkn, err.errmsg) Logger.warning(errmsg) Logger.info("RETURN FAIL 1") return FAIL
def _download(self): ''' Run right before a package action takes place. We see if we have a package directory and if not, tell the repository to unpack it from the filesystem. Then verify all the pieces and parts are there for a good type-4 package ''' if not self.downloaded: self.repository.get_type_5(self.full_name, self.injectors_info, self.libs_info) pkg_dir = make_path(get_package_path(self.instance_name), self.full_name) injector_dir = make_path(pkg_dir, "injectors") if os.path.isdir(injector_dir): self.working_dir = injector_dir else: self.status = FAIL errmsg = "The injector directory does not exist for [%s]" raise BadPackage(self.name, errmsg % self.full_name) self.downloaded = True
def test_verify_manifest(self): manifest_file_handle = open( self.manifest_file, 'w' ) json.dump( self.expected_manifest_dict, manifest_file_handle) manifest_file_handle.flush() manifest_file_handle.close() self.file_manifest.load_manifest() assert self.file_manifest.manifest_dictionary == self.expected_manifest_dict assert self.file_manifest.verify_manifest( self.mapping_dictionary ) == [] force_remove_file( make_path(self.test_dir, 'bar', 'file.txt') ) error_list = self.file_manifest.verify_manifest( self.mapping_dictionary ) assert error_list != [], error_list open( make_path(self.test_dir, 'bar', 'file.txt'), 'w' ).write("New text.") error_list = self.file_manifest.verify_manifest( self.mapping_dictionary ) assert error_list == [], error_list open( make_path(self.test_dir, 'bar', 'bar2', 'file.aspx'), 'w' ).write(u"New aspx.") error_list = self.file_manifest.verify_manifest( self.mapping_dictionary ) assert error_list != [], error_list
def _check_configuration_hash(self, pkn): """ We want to see if the configuration that a given package has used has changed since it was installed. pkn -- the name of the package to check """ pkg = self._get_new_pkg(pkn) pkg_config = pkg.get_configuration() config_hash_path = make_path(pkg.get_path(), HASH_FILE) config_diff = self.config.check_hash(config_hash_path) differences = find_differences(pkg_config, config_diff, []) return differences
def execute_maint_script(self, script_name): ''' execute a user-defined function script_name -- name of the function to run ''' self._download() # remove old history output_path = make_path(get_spkg_path(), self.instance_name, "output", "%s-output.yml" % script_name) if os.path.isfile(output_path): os.unlink(output_path) message = "Executing (%s) inside package (%s)" Logger.info(message % (script_name, self.full_name))
def get_type_4(self, full_name): ''' Get a type-4 package from the filesystem, and process it full_name -- name of package (with version) ''' pkg_dir = get_package_path(self.instance_name) os.system('bash -c "mkdir -p %s"' % pkg_dir) pkg_path = make_path(pkg_dir, full_name) if not os.path.isfile(pkg_path + ".spkg"): erstr = "No package file in %s." % (pkg_path + ".spkg") Logger.error(erstr) raise Exceptions.BadPackage(full_name, erstr) if sys.platform != 'win32': cmd = 'bash -c "cd %s && tar -mxzf %s.spkg"' % (pkg_dir, full_name) Logger.info("Untarring with command: %s" %cmd) if not os.system(cmd) == OK: raise Exceptions.BadPackage(full_name, "Could not unpack") return OK if self.unzip_type_5(pkg_path, full_name) == FAIL: raise Exceptions.BadPackage(full_name, "could not unzip") # tar = tarfile.open(pkg_path + ".tar", "r") # tar.errorlevel = 2 cwd = get_slash_cwd() os.chdir(pkg_dir) # for tarinfo in tar: # try: # tar.extract(tarinfo) # except tarfile.ExtractError, err: # Logger.warning("Error with package %s,%s: "\ # "%s" % (full_name, tarinfo.name, err)) # tar.close() if not os.path.isdir(make_path(pkg_path, full_name)): erstr = "Package %s is malformed." % (full_name) os.chdir(cwd) raise Exceptions.BadPackage(full_name, erstr) os.chdir(cwd) # os.unlink(pkg_path + ".tar") return OK
def _prepare_restore(self, obj, restore_path): """ Command to deal with setting up the environment prior to a restore action obj -- package object which owns methods for restoring, etc. restore_path -- place where restore files have been dropped """ if hasattr(obj, "pre_restore_cmd"): pre_backup_cmd = obj.pre_backup_cmd status = self._find_cmd(obj.pre_restore_cmd) if status != OK: erstr = "%s: restore FAILED because %s failed." Logger.error(erstr % (self.full_name, pre_backup_cmd)) return FAIL backup_data = yaml_load(open(make_path(restore_path, "backup_info.yml")).read()) for backup_target in backup_data: if backup_target.startswith("__"): continue md5_data = backup_data[backup_target].get("md5", {}) backup_file_name = backup_data[backup_target].get("backup_file", '') if not md5_data or not backup_file_name: Logger.error("Corrupted backup data for %s, aborting." % (backup_target)) return FAIL source_file = make_path(restore_path, rpartition(backup_target, os.path.sep)[0][1:], backup_file_name) destination_file = make_path(restore_path, backup_target[1:]) Logger.debug("=================================") Logger.debug("backup_target: %s..." % (backup_target)) Logger.debug("current directory: %s" % (restore_path)) Logger.debug("backup_file_name: %s..." % (backup_file_name)) Logger.debug("destination_file: %s..." % (destination_file)) Logger.debug("source_file: %s" % (source_file)) Logger.debug("=================================") if not os.path.isfile(source_file): Logger.error("Restore file %s does not exist, aborting" % (source_file)) return FAIL rfp = ReversePluggableFileProcessor(source_file, destination_file, md5_data, Logger) rfp.process_all()
def _download(self): ''' Run right before a package action takes place. We see if we have a package directory and if not, tell the repository to unpack it from the filesystem. Then verify all the pieces and parts are there for a good type-4 package ''' if not self.downloaded: pkg_dir = make_path(get_package_path(self.instance_name), self.full_name) if not os.path.isdir(pkg_dir): self.repository.get_type_4(self.full_name) self.scripts_dir = make_path(pkg_dir, "scripts") self.maint_dir = make_path(pkg_dir, "maint") injector_dir = make_path(pkg_dir, "injector") for required_dir in [self.scripts_dir, injector_dir]: if not os.path.isdir(required_dir): errmsg = "Required directory %s does not exist" errmsg = errmsg % required_dir self.status = FAIL raise BadPackage(self.name, errmsg) self.working_dir = injector_dir self.downloaded = True
def get_type_5(self, full_name, injectors_info, libs_info): ''' Get type-5 package components from the filesystem, and process them full_name -- name of the package, including version info injectors_info -- dictionary describing injector libraries libs_info -- dictionary describing python code libraries ''' self.hunt_and_explode() pkg_path = make_path(get_spkg_path(), self.instance_name, "packages", full_name) injector_path = make_path(pkg_path, "injectors") lib_path = make_path(pkg_path, "libs") if not os.path.isdir(pkg_path): Logger.info("Making directory %s" % pkg_path) for path in [pkg_path, injector_path, lib_path]: cmd = 'bash -c "mkdir -p %s"' % path if os.system(cmd) != OK: msg = "Could not create directory structure (%s)" % path raise Exceptions.BadPackage(full_name, msg) info_dict = {"injectors": injectors_info, "libs": libs_info} self.make_symlinks(pkg_path, info_dict, full_name) return OK
def get_manifest_data(file_name, logger): manifest_path = make_path(get_base_path(file_name), MANIFEST_FILE) #manifest_path = make_path(MANIFEST_FILE) logger.info("manifest_path: %s" % (manifest_path)) md5_dict = {} if os.path.isfile(manifest_path): md5_string = open(manifest_path, 'r').read() md5_dict = yaml_load(md5_string) if type(md5_dict) != type({}): logger.info("NOT A DICTIONARY") md5_dict = {} else: logger.info("FILE DOESN'T EXIST") return md5_dict
def create_directory_structure(self): self.start_dir = get_slash_cwd() force_remove_dir(self.test_dir) log_command( 'bash -c "mkdir -p %s"' % self.test_dir ) for subdir in [ 'manifest', make_path('bar','bar2'), 'foo' ]: full_path = make_path(self.test_dir, subdir) log_command('bash -c "mkdir -p %s"' % full_path) fp = open( make_path(self.test_dir, 'bar','file.txt'), 'w' ) fp.write("This is text.") fp.close() del fp fp = open( make_path(self.test_dir, 'bar','bar2','file.aspx'), 'w' ) fp.write(u"Aspx file.") fp.close() del fp fp = open( make_path(self.test_dir, 'foo','file.exe'), 'w' ) fp.write("\x45\x90\x08\x21\x05") fp.close() del fp
def _find_cmd(self, action, arguments=[], future_pkns=[], dry_run=False): ''' Perform the action on the system, importing modules from the package and running the appropriate method on the class within. action -- INSTALL, UNINSTALL, CONFIGURE, VERIFY future_pkns -- future package names. Some packages want to know about the packages that will come after them dry_run -- boolean flag to see if we're really going to do this ''' ret_val = None if type(action) == type(1): action = ACTION_REVERSE_LOOKUP[action] cwd = get_slash_cwd() obj, rand_string = self._get_object(future_pkns) try: if not hasattr(obj, action): msg = "Class %s does not have a %s method." raise BadPackage(self.name, msg % (self.class_name, action)) if not dry_run: if arguments: if ACTION_LOOKUP.get(action) == RESTORE: if len(arguments) != 1: Logger.error("Incorrect number of arguments passed to restore") return FAIL restore_path = make_path(get_spkg_path(), "archive", self.name, str(arguments[0])) if not os.path.isdir(restore_path): msg = "Cannot execute restore: archive data does not "\ "exist in %s" % (restore_path) Logger.error(msg) return FAIL self._prepare_restore(obj, restore_path) exec("ret_val = obj.%s('%s')" % (action, restore_path)) else: exec("ret_val = obj.%s()" % (action)) else: ret_val = OK self._cleanup(obj) del rand_string except SystemExit, err: if err.code: ret_val = err.code else: ret_val = OK del rand_string
def _install_pkgs(self, add_pkd, dry_run=False): """ Provided a dictionary of packages, we will install those that need to be installed. add_pkd -- a dictionary of packages that need to be added dry_run -- whether or not to just 'fake it' """ status = OK making_progress = True pkns_left = ["initialize"] while making_progress and pkns_left: making_progress = False install_order = self._find_install_order(add_pkd) pkns_left = list(install_order) for pkn in install_order: msg = "Packages remaining to install (in order):" Logger.info(msg) for tpkn in pkns_left: Logger.info(" + %s" % tpkn) pkns_left.remove(pkn) pkg = add_pkd[pkn] erstr = "Currently installing package priority %s [%s]" Logger.info(erstr % (pkg.get_priority(), pkn)) pkg_status = pkg.install_and_verify(pkns_left) if not dry_run: hash_path = make_path(pkg.get_path(), HASH_FILE) self.config.save_hash(hash_path) if pkg_status == FAIL: status = FAIL erstr = "Package installation failure -- re-calculating" " package installation order" Logger.error(erstr) break else: making_progress = True if status != OK: msg = "There are packages that are broken, and we have done all" " we can do. ; ;" Logger.error(msg) return status
def __init__(self, config, logger): SpkgV4.__init__(self, config, logger) self.thing = config.get("test", "value", "foo") self.directory = config.get("test", "directory", "c:/test") self.test_path = make_path(self.directory, "TEST.TXT")
def data_request(self): "Obtain configuration data from the server" b64_data = [] while True: #Logger.info( "STARTING READ" ) chunk = sys.stdin.read(STREAM_BLOCK_SIZE).strip() #Logger.info( "READ FROM SERVER: [%s]" % chunk) if not chunk or chunk[0] == ' ' or chunk.endswith("-"): chunk = chunk[:-1] b64_data.append(chunk) break b64_data.append(chunk) sys.stdout.write(">\n") sys.stdout.flush() #Logger.info("FINISHED INPUT LOOP") #print "b64_data: ", b64_data json_data = '' #json_data = zlib.decompress(base64.decodestring(''.join(b64_data))) json_data = base64.decodestring(''.join(b64_data)) #print "json_dataa", json_data Logger.debug("Received %s lines of json" % len(json_data.split('\n'))) try: input_data = json.loads(json_data) #print "input_dataa", input_data except: ermsg = "Configuration data not YAML-parseable: %s" % (repr(json_data)) file_number, filename = tempfile.mkstemp(suffix=".yml") fh = open(filename, 'w') Logger.error("Writing bad data to %s" % filename) for line in json_data.split('\n'): fh.write("[%s]" % line) fh.flush() fh.close() raise ConfigurationException(ermsg) if type(input_data) == type("string"): ermsg = "Configuration data not YAML-parseable: %s" % (repr(json_data)) raise ConfigurationException(ermsg) if type(input_data) != type({}) and type(input_data) != type([]): input_data = input_data.next() config_key = input_data.get("config_key", None) if config_key: try: from bombardier_core.Cipher import Cipher except ImportError: msg = "This machine cannot accept an encrypted configuration" raise ConfigurationException(msg) enc_json_file = make_path(get_spkg_path(), self.instance_name, 'client.yml.enc') if not os.path.isfile(enc_json_file): msg = "%s file doesn't exist" % enc_json_file raise ConfigurationException(msg) enc_data = open(enc_json_file).read() cipher = Cipher(config_key) plain_json_str = cipher.decrypt_string(enc_data) try: input_data = json.loads(plain_json_str) except: ermsg = "Received bad YAML file: %s" % enc_json_file raise ConfigurationException(ermsg) config_data = input_data.get("config_data") if not config_data: raise ConfigurationException("No configuration data received") package_data = input_data.get("package_data", {}) self.config = Config(self.instance_name, config_data) self.repository = Repository(self.instance_name, package_data)
def get_path(self): 'find place on the disk where this package can be accessed' path = make_path(get_package_path(self.instance_name), self.full_name) return path
import os from bombardier_core.static_data import CLIENT_CONFIG_FILE from bombardier_core.mini_utility import ensure_bombardier_config_dir from bombardier_core.mini_utility import make_path, get_slash_cwd from bombardier_core.mini_utility import yaml_load, yaml_dump cwd = get_slash_cwd() CONFIG_FILE = CLIENT_CONFIG_FILE.replace('\\', '/') INSTANCE_WORK_DIR = make_path(cwd, 'spkg', "TEST_INSTANCE") PKG_DIR = make_path(INSTANCE_WORK_DIR, "packages") STATUS_FILE = make_path(INSTANCE_WORK_DIR, "status.yml") SPKG_WORK_DIR = make_path(cwd, 'spkg') def get_current_config(): ensure_bombardier_config_dir() if os.path.isfile(CONFIG_FILE): current_config = yaml_load(open(CONFIG_FILE).read()) if type(current_config) == type({}): return current_config else: return None def start(): cwd = get_slash_cwd() INSTANCE_WORK_DIR = make_path(cwd, 'spkg', "TEST_INSTANCE") status_file = make_path(INSTANCE_WORK_DIR, "status.yml") spkg_work_dir = make_path(cwd, 'spkg') config_data = {"spkg_path": spkg_work_dir} os.system('bash -c "mkdir -p %s"' % INSTANCE_WORK_DIR) os.system('bash -c "mkdir %s/tmp"' % INSTANCE_WORK_DIR) #print "Copying repository files..."
def test_unzip(self): pkg_path = make_path("repos", "type4", "TestPackage-7") status = self.repository.unzip_type_4(pkg_path, "TestPackage-7-1") assert status == OK
def _backup(self, obj, backup_data, future_pkns, dry_run): "Perform basic backup functions for a package" pre_backup_cmd = obj.pre_backup post_backup_cmd = obj.post_backup if pre_backup_cmd: status = self._find_cmd(pre_backup_cmd, future_pkns=future_pkns, dry_run=dry_run) if status != OK: erstr = "%s: backup FAILED because pre-backup command failed." Logger.error(erstr % self.full_name) return FAIL file_names = backup_data.get("file_names") if type(file_names) != type([]): errmsg = "Package %s did not define its backup data correctly."\ " '%s' should be a list." Logger.error(errmsg % (self.full_name, file_names)) return FAIL options = backup_data.get("options", [COMPRESS]) if type(options) != type([]): errmsg = "Package %s did not define its backup data correctly."\ " '%s' should be a list." Logger.error(errmsg % (self.full_name, options)) return FAIL backup_dir = tempfile.mkdtemp() Logger.info("Temporary backup dir: %s" % backup_dir) start_time = time.time() backup_data = {"__START_TIME__": start_time} for file_name in file_names: backup_data[file_name] = {} current_start = time.time() if file_name.startswith(os.path.sep): backup_destination = make_path(backup_dir, file_name[1:]) fpf = ForwardPluggableFileProcessor(file_name, backup_destination, options, Logger) backup_file_name, md5_dict = fpf.process_all() if not os.path.isfile(backup_file_name): Logger.error("Backup file not created.") return FAIL backup_data[file_name]["md5"] = md5_dict backup_data[file_name]["backup_file"] = rpartition(backup_file_name, os.path.sep)[-1] elapsed_time = time.time() - current_start backup_data[file_name]["elapsed_time"] = elapsed_time size = os.stat(file_name)[stat.ST_SIZE] backup_data[file_name]["size"] = size backup_data[file_name]["status"] = OK if post_backup_cmd: status = self._find_cmd(post_backup_cmd, future_pkns=future_pkns, dry_run=dry_run) if status != OK: erstr = "%s: backup FAILED because post-backup command failed." Logger.error(erstr % self.full_name) return FAIL backup_data["__BACKUP_DIR__"] = backup_dir dump_string = yaml_dump(backup_data) for line in dump_string.split('\n'): Logger.info("==REPORT==:%s" % line) return OK