def run_pip(): # Install an recent version of pip into a temporary directory. The version # that is bundled with python is too old to support the next step. temp_python_home = mkdtemp() pip_env = {"PYTHONUSERBASE": temp_python_home} run([sys.executable, "-m", "pip", "install", "--upgrade", "--user", "pip"], cwd=third_party_path, merge_env=pip_env) # Install pywin32. run([ sys.executable, "-m", "pip", "install", "--upgrade", "--target", python_packages_path, "--platform=win_amd64", "--only-binary=:all:", "pypiwin32" ], cwd=third_party_path, merge_env=pip_env) # Get yapf. run([ sys.executable, "-m", "pip", "install", "--upgrade", "--target", python_packages_path, "yapf" ], cwd=third_party_path, merge_env=pip_env) # Remove the temporary pip installation. rmtree(temp_python_home)
def sync_delete(a, b, ignore = None): if callable(ignore) and ignore(a, b): return True #dir if os.path.isdir(a): #if source dir not exist, remove target dir if not os.path.isdir(b): try: logger.info('remove dir %s' % a) util.rmtree(a) return True except: logger.error('failed to remove dir "%s"' % a) return False else: for f in os.listdir(a): if not sync_delete(os.path.join(a, f), os.path.join(b, f), ignore): return False return True #file elif os.path.isfile(a): if not os.path.isfile(b): try: logger.info('remove file %s' % a) os.remove(a) return True except: logger.error('failed to remove file "%s"' % a) return False else: return True else: logger.info('%s is not dir or file, and will not be removed' % a) return True
def main(): parser = argparse.ArgumentParser() parser.add_argument("--filter", help="Run specific tests") parser.add_argument("--release", help="Use release build of Deno", action="store_true") parser.add_argument("--executable", help="Use external executable of Deno") args = parser.parse_args() target = "release" if args.release else "debug" build_dir = os.environ.get("DENO_BUILD_PATH", os.path.join(root_path, "target", target)) deno_dir = os.path.join(build_dir, ".deno_test") if os.path.isdir(deno_dir): rmtree(deno_dir) os.environ["DENO_DIR"] = deno_dir test_names = [ test_name for test_name in unittest.TestLoader().getTestCaseNames( TestIntegrations) if not args.filter or args.filter in test_name ] suite = unittest.TestLoader().loadTestsFromNames(test_names, module=TestIntegrations) with spawn(): result = ColorTextTestRunner(verbosity=2).run(suite) if not result.wasSuccessful(): sys.exit(1)
def main(): parser = argparse.ArgumentParser() parser.add_argument("--filter", help="Run specific tests") parser.add_argument("--release", help="Use release build of Deno", action="store_true") parser.add_argument("--executable", help="Use external executable of Deno") args = parser.parse_args() target = "release" if args.release else "debug" build_dir = None if "DENO_BUILD_PATH" in os.environ: build_dir = os.environ["DENO_BUILD_PATH"] else: build_dir = os.path.join(root_path, "target", target) deno_dir = os.path.join(build_dir, ".deno_test") if os.path.isdir(deno_dir): rmtree(deno_dir) os.environ["DENO_DIR"] = deno_dir deno_exe = os.path.join(build_dir, "deno" + executable_suffix) if args.executable: deno_exe = args.executable http_server.spawn() integration_tests(deno_exe, args.filter)
def save(config, lsfConfig, path='/tmp'): logger = util.getLogger(__name__) if 'LSF_LOGDIR' in lsfConfig: logDir = lsfConfig.get('LSF_LOGDIR', '') if os.path.isdir(logDir): lsfDC = Data_Collect(path, __name__) tempFolder = tempfile.mkdtemp(prefix='lc_') args = [ os.path.join(sys.path[0], 'log_collector.py'), 'deploy', tempFolder, socket.gethostname(), 'lsf' ] log_collector = imp.load_source( __name__ + '.log_collector', os.path.join(os.path.dirname(os.path.realpath(__file__)), 'log_collector.py')) log_collector.main(args) for fname in iglob(os.path.join(tempFolder, '*.lsf.logs.*.tar.gz')): lsfDC.moveit(fname) util.rmtree(tempFolder) else: logger.error("LSF_LOGDIR directory '%s' does not exist.", logDir) else: logger.warn( 'LSF_LOGDIR is not set in lsf.conf. No logs will be collected.')
def main(): args = parse_test_args() deno_dir = os.path.join(args.build_dir, ".deno_test") if os.path.isdir(deno_dir): rmtree(deno_dir) os.environ["DENO_DIR"] = deno_dir enable_ansi_colors() test_cases = [ TestSetup, TestUtil, TestTarget, JsUnitTests, TestFetch, TestRepl, TestDenoDir, TestBenchmark, TestIsTty, ] test_cases += permission_prompt_tests() test_cases += complex_permissions_tests() # It is very slow, so do TestFmt at the end. test_cases += [TestFmt] with http_server.spawn(): run_tests(test_cases)
def test_deno_dir(self): deno_dir = mkdtemp() if os.path.isdir(deno_dir): rmtree(deno_dir) # Run deno with no env flag self.run_deno() assert not os.path.isdir(deno_dir)
def main(argv): if len(argv) == 2: build_dir = sys.argv[1] elif len(argv) == 1: build_dir = build_path() else: print "Usage: tools/test.py [build_dir]" sys.exit(1) deno_dir = os.path.join(build_dir, ".deno_test") if os.path.isdir(deno_dir): rmtree(deno_dir) os.environ["DENO_DIR"] = deno_dir enable_ansi_colors() http_server.spawn() deno_exe = os.path.join(build_dir, "deno" + executable_suffix) check_exists(deno_exe) # Internal tools testing setup_test() util_test() benchmark_test(build_dir, deno_exe) test_cc = os.path.join(build_dir, "test_cc" + executable_suffix) check_exists(test_cc) run([test_cc]) test_rs = os.path.join(build_dir, "test_rs" + executable_suffix) check_exists(test_rs) run([test_rs]) unit_tests(deno_exe) prefetch_test(deno_exe) fmt_test(deno_exe) integration_tests(deno_exe) # TODO We currently skip testing the prompt and IsTTY in Windows completely. # Windows does not support the pty module used for testing the permission # prompt. if os.name != 'nt': from permission_prompt_test import permission_prompt_test from is_tty_test import is_tty_test permission_prompt_test(deno_exe) is_tty_test(deno_exe) repl_tests(deno_exe) rmtree(deno_dir) deno_dir_test(deno_exe, deno_dir) test_no_color(deno_exe)
def check_dir(path, force): if exists(path) and force: warnings.warn(f'{path} exists and will be overwritten') rmtree(path) makedirs(path) elif not exists(path): makedirs(path) else: warnings.warn( f'{path} exists, --force not specified, continuing with existing directory' )
def test_deno_dir(self): deno_dir = mkdtemp() if os.path.isdir(deno_dir): rmtree(deno_dir) # Run deno with no env flag self.run_deno() assert not os.path.isdir(deno_dir) # Run deno with DENO_DIR env flag self.run_deno(deno_dir) assert os.path.isdir(deno_dir) assert os.path.isdir(os.path.join(deno_dir, "deps")) assert os.path.isdir(os.path.join(deno_dir, "gen")) rmtree(deno_dir)
def swap(previous, current, next_): try: if util.exists(previous): util.rmtree(previous) os.mkdir(previous) for path in ('/app', '/lib', '/VERSION'): if util.exists(current + path): os.rename(current + path, previous + path) if util.exists(next_ + path): os.rename(next_ + path, current + path) print('updated: `%s`' % (current + path)) gc.collect() finally: if util.exists(next_): util.rmtree(next_)
def run_gclient_sync(): # Depot_tools will normally try to self-update, which will fail because # it's not checked out from it's own git repository; gclient will then try # to fix things up and not succeed, and and we'll end up with a huge mess. # To work around this, we rename the `depot_tools` directory to # `{root_path}/depot_tools_temp` first, and we set DEPOT_TOOLS_UPDATE=0 in # the environment so depot_tools doesn't attempt to self-update. # Since depot_tools is listed in .gclient_entries, gclient will install a # fresh copy in `third_party/depot_tools`. # If it all works out, we remove the depot_tools_temp directory afterwards. depot_tools_temp_path = os.path.join(root_path, "depot_tools_temp") # Rename depot_tools to depot_tools_temp. try: os.rename(depot_tools_path, depot_tools_temp_path) except OSError: # If renaming failed, and the depot_tools_temp directory already exists, # assume that it's still there because a prior run_gclient_sync() call # failed half-way, before we got the chance to remove the temp dir. # We'll use whatever is in the temp dir that was already there. # If not, the user can recover by removing the temp directory manually. if os.path.isdir(depot_tools_temp_path): pass else: raise args = [ "gclient", "sync", "--reset", "--shallow", "--no-history", "--nohooks" ] envs = { "DEPOT_TOOLS_UPDATE": "0", "GCLIENT_FILE": os.path.join(root_path, "gclient_config.py") } env = google_env(depot_tools_path_=depot_tools_temp_path, merge_env=envs) run(args, cwd=third_party_path, env=env) # Delete the depot_tools_temp directory, but not before verifying that # gclient did indeed install a fresh copy. # Also check that `{depot_tools_temp_path}/gclient.py` exists, so a typo in # this script won't accidentally blow out someone's home dir. if (os.path.isdir(os.path.join(depot_tools_path, ".git")) and os.path.isfile(os.path.join(depot_tools_path, "gclient.py")) and os.path.isfile( os.path.join(depot_tools_temp_path, "gclient.py"))): rmtree(depot_tools_temp_path)
def save(path='/tmp'): logger = util.getLogger(__name__) hosts = set() egoConfDir = os.getenv('EGO_CONFDIR', '') if not egoConfDir: logger.error( 'Could not determine the EGO conf directory. Failed to collect Symphony log files.' ) return egoConfFile = os.path.join(egoConfDir, 'ego.conf') if not os.path.isfile(egoConfFile): logger.error( "Could not find ego.conf. Failed to collect Symphony log files.") return config = configfile.getConfigFromFile(egoConfFile) try: for candidate in config['EGO_MASTER_LIST'].strip('"').split(): hosts.add(socket.getfqdn(candidate)) except: pass egoDC = Data_Collect(path, __name__) out = egoDC.runit('egosh rg ManagementHosts') if out: for line in out.splitlines(): if line.startswith('Resource List:'): for host in line.split(':', 1)[-1].split(): hosts.add(socket.getfqdn(host)) break tempFolder = tempfile.mkdtemp(prefix='lc_') hostList = ' '.join(hosts) args = [ os.path.join(sys.path[0], 'log_collector.py'), 'deploy', tempFolder, hostList, 'ego' ] log_collector = imp.load_source( __name__ + '.log_collector', os.path.join(os.path.dirname(os.path.realpath(__file__)), 'log_collector.py')) log_collector.main(args) for filePattern in iglob(os.path.join(tempFolder, '*.ego.logs.*.tar.gz')): egoDC.moveit(filePattern) util.rmtree(tempFolder)
def run_gclient_sync(): # Depot_tools will normally try to self-update, which will fail because # it's not checked out from it's own git repository; gclient will then try # to fix things up and not succeed, and and we'll end up with a huge mess. # To work around this, we rename the `depot_tools` directory to # `{root_path}/depot_tools_temp` first, and we set DEPOT_TOOLS_UPDATE=0 in # the environment so depot_tools doesn't attempt to self-update. # Since depot_tools is listed in .gclient_entries, gclient will install a # fresh copy in `third_party/depot_tools`. # If it all works out, we remove the depot_tools_temp directory afterwards. depot_tools_temp_path = root("depot_tools_temp") # Rename depot_tools to depot_tools_temp. try: os.rename(depot_tools_path, depot_tools_temp_path) except OSError: # If renaming failed, and the depot_tools_temp directory already exists, # assume that it's still there because a prior run_gclient_sync() call # failed half-way, before we got the chance to remove the temp dir. # We'll use whatever is in the temp dir that was already there. # If not, the user can recover by removing the temp directory manually. if path.isdir(depot_tools_temp_path): pass else: raise args = [ "gclient", "sync", "--reset", "--shallow", "--no-history", "--nohooks" ] envs = { 'DEPOT_TOOLS_UPDATE': "0", 'GCLIENT_FILE': root("gclient_config.py") } env = google_env(depot_tools_path_=depot_tools_temp_path, merge_env=envs) run(args, cwd=third_party_path, env=env) # Delete the depot_tools_temp directory, but not before verifying that # gclient did indeed install a fresh copy. # Also check that `{depot_tools_temp_path}/gclient.py` exists, so a typo in # this script won't accidentally blow out someone's home dir. if (path.isdir(path.join(depot_tools_path, ".git")) and path.isfile(path.join(depot_tools_path, "gclient.py")) and path.isfile(path.join(depot_tools_temp_path, "gclient.py"))): rmtree(depot_tools_temp_path)
def main(argv): if len(argv) == 2: build_dir = sys.argv[1] elif len(argv) == 1: build_dir = build_path() else: print "Usage: tools/test.py [build_dir]" sys.exit(1) deno_dir = os.path.join(build_dir, ".deno_test") if os.path.isdir(deno_dir): rmtree(deno_dir) os.environ["DENO_DIR"] = deno_dir enable_ansi_colors() http_server.spawn() deno_exe = os.path.join(build_dir, "deno" + executable_suffix) check_exists(deno_exe) deno_ns_exe = os.path.join(build_dir, "deno_ns" + executable_suffix) check_exists(deno_ns_exe) # Internal tools testing setup_test() util_test() benchmark_test(build_dir, deno_exe) test_cc = os.path.join(build_dir, "test_cc" + executable_suffix) check_exists(test_cc) run([test_cc]) test_rs = os.path.join(build_dir, "test_rs" + executable_suffix) check_exists(test_rs) run([test_rs]) unit_tests(deno_exe) check_output_test(deno_exe) check_output_test(deno_ns_exe) rmtree(deno_dir) deno_dir_test(deno_exe, deno_dir)
def recipe(package): path = package[attributes.a_location] if exists(path): print 'Removing old files...' util.rmtree(path) util.run('git clone %s "%s"' % (git_repo, path)) build = join(path, 'build') if exists(build): print 'Removing build dir...' util.rmtree(build) os.makedirs(build) options = ' '.join('-D %s=%s' % (k, v) for k, v in [ ]) util.run('cmake -G"NMake Makefiles" %s ..' % options, cwd=build) util.run('nmake tinyxml2static', cwd=build)
def download_and_extract(url, package, prefix=None): cache_path = join(paths.cache, package.project.id) if not exists(cache_path): makedirs(cache_path) path = download.url_filename(url) path = join(cache_path, path) if not exists(path): download.download(url, path) dest = package[standard.a_location] if exists(dest): print 'DEBUG'; return dest print 'Removing old files...' util.rmtree(dest) print 'Extracting...' download.extract(path, dest, prefix) return dest
def recipe(package): path = package[standard.a_location] if exists(path): print 'Removing old files...' util.rmtree(path) util.run('hg clone %s "%s"' % (hg_repo, path)) build = join(path, 'build') if 0: if exists(build): print 'Removing build dir...' util.rmtree(build) os.makedirs(build) build = join(build, 'release') if 0: if exists(build): print 'Removing build dir...' util.rmtree(build) os.makedirs(build) config = package[standard.a_configuration].title() options = [ ('CEGUI_BUILD_TYPE', config), ('CEGUI_BUILD_XMLPARSER_TINYXML', '1'), ('CMAKE_INCLUDE_PATH', '%INCLUDE%'), ('CMAKE_LIBRARY_PATH', '%LIB%') ] options = ' '.join('-D %s=%s' % (k, v) for k, v in options) util.run('cmake -G"NMake Makefiles" %s ..' % options, cwd=build) util.run('nmake', cwd=build)
def main(argv): args = test_args(argv) deno_dir = os.path.join(args.build_dir, ".deno_test") if os.path.isdir(deno_dir): rmtree(deno_dir) os.environ["DENO_DIR"] = deno_dir enable_ansi_colors() with spawn(): test_cases = [ TestSetup, TestUtil, TestTarget, JsUnitTests, FetchTest, FmtTest, TestIntegrations, TestRepl, TestDenoDir, TestBenchmark, ] # These tests are skipped, but to make the test output less noisy # we'll avoid triggering them. if os.name != 'nt': test_cases.append(TestIsTty) test_cases += permission_prompt_tests() test_cases += complex_permissions_tests() suite = unittest.TestSuite([ unittest.TestLoader().loadTestsFromTestCase(tc) for tc in test_cases ]) result = ColorTextTestRunner( verbosity=args.verbosity + 1, failfast=args.failfast).run(suite) if not result.wasSuccessful(): sys.exit(1)
def deno_dir_test(deno_exe, deno_dir): assert os.path.isfile(deno_exe) old_deno_dir = None if "DENO_DIR" in os.environ: old_deno_dir = os.environ["DENO_DIR"] del os.environ["DENO_DIR"] if os.path.isdir(deno_dir): rmtree(deno_dir) # Run deno with no env flag run_deno(deno_exe) assert not os.path.isdir(deno_dir) # Run deno with DENO_DIR env flag run_deno(deno_exe, deno_dir) assert os.path.isdir(deno_dir) assert os.path.isdir(os.path.join(deno_dir, "deps")) assert os.path.isdir(os.path.join(deno_dir, "gen")) rmtree(deno_dir) if old_deno_dir is not None: os.environ["DENO_DIR"] = old_deno_dir
def run_backup(config): """ The primary entry function that starts and runs the backup process. This algorithm will go through each input-output pair in the configuration individually and run on each. It begins on each pair by marking which files will be new, modified, or deleted, then it uses those to check space requirements on the disk, then it does the file operations on each file in each list. :param config: A configuration containing paths to folders to backup. """ print("Initializing...", end="\r", flush=True) set_status("Initializing...") reset_backup_number() log.log("\n" + configuration.config_display_string(config, show_exclusions=True)) # Loop through every entry in the configuration for input_number in range(1, config.num_entries() + 1): input_path = config.get_entry(input_number).input outputs = config.get_entry(input_number).outputs for output_path in outputs: # True if backing up only one file, false if backing up a directory file_mode = os.path.isfile(input_path) # Get the name of the folder to make the backup in folder_name = os.path.split(input_path)[1] if file_mode: backup_folder = output_path else: backup_folder = os.path.join( output_path, folder_name + " " + BACKUP_FOLDER_SUFFIX) # Start the log messages log.log("\n" + '/' * 60 + "\n///// INPUT: " + input_path + "\n///// OUTPUT: " + backup_folder + "\n" + '/' * 60 + "\n") # Mark all the files needed for the backup process print(' ' * 40 + "\nPreparing files for backup from {} to {}...".format( input_path, backup_folder)) reset_globals() start_time = time.time() if file_mode: # If backing up one file, create the backed-up filename here filename_no_ext, filename_ext = os.path.splitext(folder_name) output_filename = os.path.join( output_path, filename_no_ext + " " + BACKUP_FOLDER_SUFFIX + filename_ext) new_files, changed_files, remove_files = mark_files( input_path, output_filename, config, input_number) else: new_files, changed_files, remove_files = mark_files( input_path, backup_folder, config, input_number) set_num_marked( len(new_files) + len(changed_files) + len(remove_files)) if not file_mode: print() print("File preparation complete.") if NUM_FILES_ERROR > 0: log.log_print( "There were {} error(s) reported during file preparation.". format(NUM_FILES_ERROR)) print( "Please check the log file for more info on the individual errors." ) # Check that doing this backup won't over-fill the disk, if it will then return has_space, remaining_space, space_difference =\ check_space_requirements(new_files, changed_files, remove_files, backup_folder) if not has_space: drive_letter, tail = os.path.splitdrive(backup_folder) error_str = "Copying {} to {} may not fit on the {} drive.".format( input_path, backup_folder, drive_letter) error_str += "\nPlease clear up space on the drive you want to copy to and try again." error_str += "\nTry clearing at least {} on the {} drive and trying again.".format( util.bytes_to_string(-1 * remaining_space, 3), drive_letter) log.log_print("\n" + error_str) set_error(error_str) set_status( "ERROR: The backup will not fit. Backup process has stopped." ) if not file_mode: if util.dir_empty(backup_folder): util.rmtree(backup_folder) return # Make changes to the files found in file preparation print("Backing up files from {} to {}...".format( input_path, backup_folder)) num_errors = backup_files(new_files, changed_files, remove_files) end_time = time.time() # Backup is complete, report the time taken, space difference, and if any errors occurred complete_str = "Backup complete in {}. ".format( util.time_string(end_time - start_time)) if space_difference == 0: if len(remove_files) + len(changed_files) + len(new_files) > 0: complete_str += "({}{})".format( util.sign_string(space_difference), util.bytes_to_string(abs(space_difference), precision=2)) else: complete_str += "(No changes)" else: complete_str += "({}{})".format( util.sign_string(space_difference), util.bytes_to_string(abs(space_difference), precision=2)) print("\n" + complete_str) set_status(complete_str) if num_errors > 0: log.log_print( "There were {} error(s) reported during the backup.". format(num_errors)) print( "Please check the log file for more info on the individual errors." ) # Report on any errors and finalize the backup final_report_str = "Backup complete: {} files processed, {} new files, {} existing files modified, " + \ "{} files removed ({}, {}{})" log.log( final_report_str.format( NUM_FILES_PROCESSED, NUM_FILES_NEW, NUM_FILES_MODIFIED, NUM_FILES_DELETED, util.bytes_to_string(TOTAL_SIZE_PROCESSED, 2), util.sign_string(space_difference), util.bytes_to_string(abs(space_difference), precision=2))) if NUM_FILES_ERROR > 0: log.log_print( "There were {} error(s) reported during this backup.". format(NUM_FILES_ERROR)) print( "Please check the log file for more info on the individual errors." ) if not os.path.isfile(input_path): create_backup_text_file(backup_folder) increment_backup_number()
def main(argv): if len(argv) == 2: build_dir = sys.argv[1] elif len(argv) == 1: build_dir = build_path() else: print "Usage: tools/test.py [build_dir]" sys.exit(1) deno_dir = os.path.join(build_dir, ".deno_test") if os.path.isdir(deno_dir): rmtree(deno_dir) os.environ["DENO_DIR"] = deno_dir enable_ansi_colors() http_server.spawn() deno_exe = os.path.join(build_dir, "deno" + executable_suffix) check_exists(deno_exe) exec_path_test(deno_exe) # Internal tools testing run([ "node", "./node_modules/.bin/ts-node", "--project", "tools/ts_library_builder/tsconfig.json", "tools/ts_library_builder/test.ts" ]) setup_test() util_test() benchmark_test(build_dir, deno_exe) test_cc = os.path.join(build_dir, "test_cc" + executable_suffix) check_exists(test_cc) run([test_cc]) test_rs = os.path.join(build_dir, "test_rs" + executable_suffix) check_exists(test_rs) run([test_rs]) deno_core_test = os.path.join(build_dir, "deno_core_test" + executable_suffix) check_exists(deno_core_test) run([deno_core_test]) unit_tests(deno_exe) prefetch_test(deno_exe) fmt_test(deno_exe) integration_tests(deno_exe) # TODO We currently skip testing the prompt and IsTTY in Windows completely. # Windows does not support the pty module used for testing the permission # prompt. if os.name != 'nt': from permission_prompt_test import permission_prompt_test from is_tty_test import is_tty_test permission_prompt_test(deno_exe) is_tty_test(deno_exe) repl_tests(deno_exe) rmtree(deno_dir) deno_dir_test(deno_exe, deno_dir) test_no_color(deno_exe)
def runClsSnapshot(config): logger = util.getLogger(__name__) modulePath = os.path.dirname(os.path.abspath(__file__)) modname = os.path.splitext(os.path.basename(__file__))[0] savePath = config.get('TOP', '') if not savePath: savePath = os.path.join(modulePath, 'snapshots') if not os.path.exists(savePath): try: os.makedirs(savePath) except OSError as e: if e.errno == errno.EEXIST: pass else: logger.error("Could not create save path %s.", savePath) return None elif not os.path.exists(savePath): logger.error("The save path %s does not exist.", savePath) return None logFileName = modname + '.log' logFile = os.path.join(savePath, logFileName) stdErr = config.get('LOG_STDERR', '') dbglevel = getattr(logging, config.get('LOG_LEVEL', 'WARN').upper()) formatter = logging.Formatter( '%(asctime)s - %(levelname)s - %(name)s.%(funcName)s() - %(message)s') logfh = logging.FileHandler(logFile, mode='a') logfh.setFormatter(formatter) logger.addHandler(logfh) logger.setLevel(dbglevel) if stdErr: logstderr = logging.StreamHandler(sys.stderr) logstderr.setLevel(dbglevel) logstderr.setFormatter( util.LogFileFormatter('%(name)s: %(levelname)s- %(message)s')) logger.addHandler(logstderr) pKey = config.get('pKey', '') if pKey and os.path.isfile(pKey): os.putenv('SSH_PRIVATE_KEY', pKey) modules = configfile.getModuleList(config.get('MODULES', ['all']), allMods) mList = list() myScriptPath = os.path.dirname(os.path.realpath(__file__)) for m in modules: try: mList.append( imp.load_source(__name__ + '.' + m, os.path.join(myScriptPath, m + '.py'))) except ImportError as e: logger.exception( "Error loading module as defined in CLI parameter '--modules' or snapshot.conf MODULES" " parameter. Check that the module %s is valid.", m) savePath = os.path.abspath(savePath) t_list = [] for mod in mList: t_list.append(Thread(target=mod.save, args=[config, savePath])) t_list[-1].start() for thread in t_list: thread.join() logger.debug('%s is complete.', thread) logger.info('Compressing cluster snapshot...') timestr = datetime.now().strftime("%Y-%m-%d-%H%M%S") cwd = os.getcwd() os.chdir(savePath) sstarfile = 'CL_Snapshot.%s.tar.gz' % timestr tar = tarfile.open(sstarfile, 'w:gz') os.chdir(cwd) removeDir = [] for name in os.listdir(savePath): if name.endswith('.tar.gz') or name.endswith('.log'): continue fullname = os.path.join(savePath, name) tar.add(fullname, arcname=name) removeDir.append(fullname) tar.close() for f in removeDir: if not util.rmtree(f, 5): logger.error("Unable to remove '%s'", f) logger.info("Compressed to '%s'", sstarfile) return os.path.join(savePath, sstarfile)
def rmtree(self, *args): rmtree(self.j(*args))
def clean(): rmtree('install') rmtree('devtree') rmtree('build')
def clean_release(): clean() rmtree('release')
def backup_files(new_files, changed_files, remove_files): """ The files provided in the given lists will be backed up. This will first delete all the files in the remove_files list, then copy over all the files in the new_files list, then modify each file in the changed_files list. :param new_files: A list of new files to backup generated by mark_files(). :param changed_files: A list of changed files to backup generated by mark_files(). :param remove_files: A list of files to delete from the backup generated by mark_files(). :return: The number of errors that occurred. """ # If there's no changes to make, display a message if len(new_files) == 0 and len(changed_files) == 0 and len( remove_files) == 0: print("No changes are needed.", end="\r", flush=True) # Prepare values that will track the progress of each section of the backup num_errors = 0 count = 0 limit = NUM_FILES_DELETED # Delete every file in the remove list for file_tuple in remove_files: delete_file_path = file_tuple[0] # Use the correct delete function based on if it's a file or folder try: set_status("Deleting {}".format( os.path.split(delete_file_path)[1])) if os.path.isdir(delete_file_path): deleted_file_count = util.rmtree(delete_file_path) for _ in range(deleted_file_count): count += 1 print("Deleting old files: {}/{}".format(count, limit) + ' ' * 20, end="\r", flush=True) increment_backup_progress() else: os.remove(delete_file_path) increment_backup_progress() count += 1 print("Deleting old files: {}/{}".format(count, limit) + ' ' * 20, end="\r", flush=True) log.log("DELETED - " + delete_file_path) except PermissionError: # Log the exception and indicate that an error occurred log.log_exception(delete_file_path, "DELETING") num_errors += 1 # Reset the counter values and copy over every file in the new list count = 0 limit = len(new_files) for file_tuple in new_files: new_file = file_tuple[0] output_path = file_tuple[2] try: set_status("Copying over {} ({})".format( os.path.split(new_file)[1], util.bytes_to_string(os.path.getsize(new_file), 2))) shutil.copy2(new_file, output_path) log.log("NEW - " + output_path) except PermissionError: # Write the full error to the log file and record that an error occurred log.log_exception(output_path, "CREATING") num_errors += 1 count += 1 increment_backup_progress() print("Copying over new files: {}/{}".format(count, limit) + ' ' * 20, end="\r", flush=True) # Reset the counter values and overwrite every file in the changed list count = 0 limit = len(changed_files) for file_tuple in changed_files: new_file = file_tuple[0] output_path = file_tuple[2] try: set_status("Updating {}, ({})".format( os.path.split(new_file)[1], util.bytes_to_string(os.path.getsize(new_file), 2))) shutil.copy2(new_file, output_path) log.log("UPDATED - " + output_path) except PermissionError: # Write the full error to the log file and record that an error occurred log.log_exception(output_path, "UPDATING") num_errors += 1 count += 1 increment_backup_progress() print("Updating existing files: {}/{}".format(count, limit) + ' ' * 20, end="\r", flush=True) return num_errors
def main(argv): if len(argv) == 2: build_dir = sys.argv[1] elif len(argv) == 1: build_dir = build_path() else: print "Usage: tools/test.py [build_dir]" sys.exit(1) deno_dir = os.path.join(build_dir, ".deno_test") if os.path.isdir(deno_dir): rmtree(deno_dir) os.environ["DENO_DIR"] = deno_dir enable_ansi_colors() http_server.spawn() deno_exe = os.path.join(build_dir, "deno" + executable_suffix) check_exists(deno_exe) # Python/build tools testing setup_test() util_test() run([ "node", "./node_modules/.bin/ts-node", "--project", "tools/ts_library_builder/tsconfig.json", "tools/ts_library_builder/test.ts" ]) libdeno_test = os.path.join(build_dir, "libdeno_test" + executable_suffix) check_exists(libdeno_test) run([libdeno_test]) cli_test = os.path.join(build_dir, "cli_test" + executable_suffix) check_exists(cli_test) run([cli_test]) deno_core_test = os.path.join(build_dir, "deno_core_test" + executable_suffix) check_exists(deno_core_test) run([deno_core_test]) deno_core_http_bench_test = os.path.join( build_dir, "deno_core_http_bench_test" + executable_suffix) check_exists(deno_core_http_bench_test) run([deno_core_http_bench_test]) unit_tests(deno_exe) fetch_test(deno_exe) fmt_test(deno_exe) integration_tests(deno_exe) # TODO We currently skip testing the prompt and IsTTY in Windows completely. # Windows does not support the pty module used for testing the permission # prompt. if os.name != 'nt': from is_tty_test import is_tty_test from permission_prompt_test import permission_prompt_test permission_prompt_test(deno_exe) is_tty_test(deno_exe) repl_tests(deno_exe) rmtree(deno_dir) deno_dir_test(deno_exe, deno_dir) test_no_color(deno_exe) benchmark_test(build_dir, deno_exe) exec_path_test(deno_exe)
def do_uninstall(package): loc = package[standard.a_location] if exists(loc): util.rmtree(package[standard.a_location])