def main(): """Primary function""" if not cmd_exists('psql'): raise SystemExit( 'Postgresql must be installed for this script to run!') args = parse_args() if not Path(CURRENT_PATH, '..', '..', '.vagrant').is_dir() and not args.force: inp = input( "Do you really want to reset the system? There's no undo! [y/n]") if inp.lower() not in ["yes", "y"]: raise SystemExit("Aborting...") shutil.rmtree('/var/local/submitty', True) Path(SUBMITTY_DATA_DIR, 'courses').mkdir(parents=True) distro_name = distro.id() distro_version = distro.lsb_release_attr('codename') # Clean out the log files, but leave the folders intact if Path(CURRENT_PATH, "..", "..", ".vagrant").is_dir(): repo_path = SUBMITTY_REPOSITORY / '.vagrant' / distro_name / distro_version / 'logs' / 'submitty' data_path = SUBMITTY_DATA_DIR / 'logs' data_path.mkdir() if repo_path.exists(): shutil.rmtree(str(repo_path)) repo_path.mkdir() for folder in ['autograding', 'access', 'site_errors', 'ta_grading']: repo_log_path = repo_path / folder data_log_path = data_path / folder repo_log_path.mkdir() data_log_path.symlink_to(repo_log_path) with Path(SUBMITTY_INSTALL_DIR, 'config', 'database.json').open() as submitty_config: submitty_config_json = json.load(submitty_config) os.environ['PGPASSWORD'] = submitty_config_json["database_password"] db_user = submitty_config_json["database_user"] os.system( 'psql -d postgres -U ' + db_user + ' -c "SELECT pg_terminate_backend(pg_stat_activity.pid) ' 'FROM pg_stat_activity WHERE pg_stat_activity.datname LIKE \'Submitty%\' AND ' 'pid <> pg_backend_pid();"') os.system("psql -U " + db_user + " --list | grep submitty* | awk '{print $1}' | " "xargs -I \"@@\" dropdb -h localhost -U " + db_user + " \"@@\"") os.system('psql -d postgres -U ' + db_user + ' -c "CREATE DATABASE submitty"') migrator_script = str(SUBMITTY_REPOSITORY / 'migration' / 'migrator.py') subprocess.check_call([ 'python3', migrator_script, '-e', 'system', '-e', 'master', 'migrate', '--initial' ]) del os.environ['PGPASSWORD'] subprocess.check_call( ['bash', str(SUBMITTY_INSTALL_DIR / '.setup' / 'INSTALL_SUBMITTY.sh')]) for user_file in Path(args.users_path).glob('*.yml'): user = load_data_yaml(user_file) delete_user(user['user_id']) random_users = SETUP_DATA_PATH / 'random_users.txt' if random_users.is_file(): with random_users.open() as open_file: for line in open_file: delete_user(line.strip()) groups = [] for course_file in Path(args.courses_path).glob('*.yml'): course = load_data_yaml(course_file) groups.append(course['code']) groups.append(course['code'] + "_archive") groups.append(course['code'] + "_tas_www") for queue in ['to_be_graded_queue']: for queue_file in Path(SUBMITTY_DATA_DIR, queue).glob( "*__{}__*".format(course['code'])): queue_file.unlink() for group in groups: os.system('groupdel ' + group)
import distro import platform # Параметры системы OS_RELEASE = distro.lsb_release_attr('codename') PLATFORM_ARCH = platform.machine() OS_DISTRIBUTION = distro.lsb_release_attr('distributor_id').lower() OS_VERSION = distro.lsb_release_attr('release')[0] # Адрес загрузки исходного кода nginx NGINX_URL = "http://nginx.org/download" NGINX_SRPM_URL = "http://nginx.org/packages/mainline/centos/{}/SRPMS".format(OS_VERSION) # Версия архива со скриптами PACKAGE_SCRIPTS_VERSION = "1.13.9" # Архив со скриптами для создания пакета DEB_PACKAGE_SCRIPTS_URL = "http://nginx.org/packages/mainline/{}/pool/nginx/n/nginx/nginx_{}-1~{}.debian.tar.xz".format( OS_DISTRIBUTION, PACKAGE_SCRIPTS_VERSION, OS_RELEASE ) DEB_PACKAGE_SCRIPTS_FILENAME = DEB_PACKAGE_SCRIPTS_URL[DEB_PACKAGE_SCRIPTS_URL.rfind("/") + 1:] # Путь до директории сборки пакета SRC_PATH = "/usr/src/nginx" # Error build code DPKG_FAIL_EXIT_CODE = 29 # Параметры компиляции nginx
install_commands = ['%s boost%s %s %s/boost' % (script_path, boost_package_directory, package, destination) for package in boost_packages] install_commands = ['rm -rf %s/boost/install' % destination] + install_commands return ' && '.join(install_commands) def ubuntu_identify_boost_version(codename, index): packages = subprocess.check_output(['wget', '-t', '1', '-qO-', 'http://ports.ubuntu.com/indices/override.%s.%s' % (codename, 'main')]).decode('utf-8') libboost_system_package = re.search("libboost-system\d+\.\d+\.\d+", packages) if libboost_system_package: libboost_system_package_name = libboost_system_package.group() return re.search('\d+\.\d+\.\d+', libboost_system_package_name).group() else: return '' name = distro.id() version = distro.version() codename = distro.lsb_release_attr('codename') if name.lower() == 'fedora': commands_to_download = fedora_download_commands(version) elif name.lower() == 'ubuntu': boost_version = ubuntu_identify_boost_version(codename, 'main') if boost_version == '': boost_version = ubuntu_identify_boost_version(codename, 'universe') if boost_version == '': print("Cound not find boost version from neither main nor universe ports index!") sys.exit(1) commands_to_download = ubuntu_download_commands(boost_version) elif name.lower() == "centos": commands_to_download = [ 'bash -eu %s/scripts/download_aarch64_toolchain.sh' % osv_root ] else: print("The distribution %s is not supported for cross-compiling aarch64 version of OSv" % name) sys.exit(1)
def main(): """Primary function""" if not cmd_exists('psql'): raise SystemExit( 'Postgresql must be installed for this script to run!') args = parse_args() if not Path(CURRENT_PATH, '..', '..', '.vagrant').is_dir() and not args.force: inp = input( "Do you really want to reset the system? There's no undo! [y/n]") if inp.lower() not in ["yes", "y"]: raise SystemExit("Aborting...") services = subprocess.check_output( ["systemctl", "list-units", "--type=service"], universal_newlines=True).strip().split("\n") running_services = [] for service in services: service = service[2:].strip() if "submitty_" not in service: continue if "running" not in service: continue service = service.split()[0] running_services.append(service) subprocess.check_call(["systemctl", "stop", service]) shutil.rmtree('/var/local/submitty', True) Path(SUBMITTY_DATA_DIR, 'courses').mkdir(parents=True) distro_name = distro.id() distro_version = distro.lsb_release_attr('codename') # Clean out the log files, but leave the folders intact if Path(CURRENT_PATH, "..", "..", ".vagrant").is_dir(): repo_path = Path(SUBMITTY_REPOSITORY, '.vagrant', distro_name, distro_version, 'logs', 'submitty') data_path = SUBMITTY_DATA_DIR / 'logs' if repo_path.exists(): shutil.rmtree(str(repo_path)) repo_path.mkdir() data_path.symlink_to(repo_path) with Path(SUBMITTY_INSTALL_DIR, 'config', 'database.json').open() as submitty_config: submitty_config_json = json.load(submitty_config) os.environ['PGPASSWORD'] = submitty_config_json["database_password"] db_user = submitty_config_json["database_user"] query = """ SELECT pg_terminate_backend(pg_stat_activity.pid) FROM pg_stat_activity WHERE pg_stat_activity.datname LIKE \'Submitty%\' AND pid <> pg_backend_pid(); """ subprocess.check_call( ['psql', '-d', 'postgres', '-U', db_user, '-c', query]) db_list = subprocess.check_output(['psql', '-U', db_user, '--list'], universal_newlines=True).split("\n")[3:] for db_row in db_list: db_name = db_row.strip().split('|')[0].strip() if not db_name.startswith('submitty'): continue subprocess.check_call( ['dropdb', '-h', 'localhost', '-U', db_user, db_name]) subprocess.check_call([ 'psql', '-d', 'postgres', '-U', db_user, '-c', 'CREATE DATABASE submitty' ]) migrator_script = str(SUBMITTY_REPOSITORY / 'migration' / 'run_migrator.py') subprocess.check_call([ 'python3', migrator_script, '-e', 'system', '-e', 'master', 'migrate', '--initial' ]) del os.environ['PGPASSWORD'] subprocess.check_call( ['bash', str(SUBMITTY_INSTALL_DIR / '.setup' / 'INSTALL_SUBMITTY.sh')]) for user_file in Path(args.users_path).glob('*.yml'): user = load_data_yaml(user_file) delete_user(user['user_id']) random_users = SETUP_DATA_PATH / 'random_users.txt' if random_users.is_file(): with random_users.open() as open_file: for line in open_file: delete_user(line.strip()) groups = [] for course_file in Path(args.courses_path).glob('*.yml'): course = load_data_yaml(course_file) groups.append(course['code']) groups.append(course['code'] + "_archive") groups.append(course['code'] + "_tas_www") for queue in ['to_be_graded_queue']: queue_path = Path(SUBMITTY_DATA_DIR, queue) for queue_file in queue_path.glob("*__{}__*".format( course['code'])): queue_file.unlink() for group in groups: os.system('groupdel ' + group)