Exemplo n.º 1
0
def upload_pending_repository_content(server: str, sourcePath: str, remoteDestinationPath: str) -> None:
    log.info("Uploading pending repository content from: [%s] -> [%s:%s]", sourcePath, server, remoteDestinationPath)
    # When uploading new content to staging the old content is always deleted
    delete_remote_paths(server, [remoteDestinationPath])
    # repository paths
    create_remote_paths(server, [remoteDestinationPath])
    # upload content
    cmd = ['rsync', '-avzh', sourcePath + "/", server + ":" + remoteDestinationPath]
    exec_cmd(cmd, timeout=60*60)  # give it 60 mins
Exemplo n.º 2
0
def create_remote_script(server: str, cmd: List[str], remoteScriptPath: str, scriptFileName: str) -> str:
    with tempfile.TemporaryDirectory(dir=os.getcwd()) as tmpBaseDir:
        tempFilePath = os.path.join(tmpBaseDir, scriptFileName)
        with open(tempFilePath, 'w+') as f:
            f.write("#!/usr/bin/env bash\n")
            f.write(' '.join(cmd))
        os.chmod(tempFilePath, 0o755)
        create_remote_paths(server, [remoteScriptPath])
        cmd = ['rsync', '-avzh', tempFilePath, server + ":" + remoteScriptPath]
        exec_cmd(cmd, timeout=60 * 60)
        return os.path.join(remoteScriptPath, scriptFileName)
Exemplo n.º 3
0
def create_remote_repository_backup(server: str, remote_repo_path: str) -> None:
    backup_path = os.path.join(remote_repo_path + "____snapshot_backup")
    # if there exists a backup already then delete it, we keep only one backup
    if remote_repository_exists(server, backup_path):
        log.info("Deleting old backup repo: %s", backup_path)
        delete_remote_paths(server, [backup_path])
    # move the repo as backup
    cmd = get_remote_login_cmd(server) + ['mv', '-v', remote_repo_path, backup_path]
    exec_cmd(cmd, timeout=60 * 60)  # give it 60 mins
    log.info("Moved remote repository as backup: %s:%s -> %s", server, remote_repo_path, backup_path)
    return backup_path
Exemplo n.º 4
0
def reset_new_remote_repository(server: str, remoteSourceRepoPath: str, remoteTargetRepoPath: str) -> None:
    if not remote_repository_exists(server, remoteSourceRepoPath):
        raise PackagingError("The remote source repository path did not exist on the server: {0}:{1}".format(server, remoteSourceRepoPath))
    if remote_repository_exists(server, remoteTargetRepoPath):
        # this will _move_ the currect repo as backup
        create_remote_repository_backup(server, remoteTargetRepoPath)

    log.info("Reset new remote repository: source: [%s] target: [%s]", remoteSourceRepoPath, remoteTargetRepoPath)
    create_remote_paths(server, [remoteTargetRepoPath])
    cmd = get_remote_login_cmd(server) + ['cp', '-Rv', remoteSourceRepoPath + '/*', remoteTargetRepoPath]
    exec_cmd(cmd, timeout=60 * 60)  # give it 60 mins
Exemplo n.º 5
0
    async def test_exec_cmd(self) -> None:
        output = exec_cmd(['echo', "TEST"])
        self.assertEqual(output, "TEST")

        cmd = ['sleep', '2']
        with self.assertRaises(asyncio.TimeoutError):
            await async_exec_cmd(cmd, timeout=1)
Exemplo n.º 6
0
async def create_converted_repositories(
        repogen: str,
        repositories_to_migrate: List[str],
        dry_run=False) -> Tuple[Dict[str, str], Dict[str, str]]:
    # first check that pending repository does not already exist per given repository
    log.info(
        f"Starting to create new converted repositories: {len(repositories_to_migrate)}"
    )
    updatable_repos, existing_pending_repos = check_repos_which_can_be_updated(
        repositories_to_migrate)
    if existing_pending_repos:
        log.warning(
            "There are already existing pending repositories which should be completed first:"
        )
        for repo in existing_pending_repos:
            log.warning(f"  {repo}")
        raise IfwRepoUpdateError(
            "Repositories found in pending state, complete those first!")

    # convert all repositories to combined metadata version
    successful_conversions = {}  # type: Dict[str, str]
    failed_conversions = {}  # type: Dict[str, str]
    for repo in updatable_repos:
        repo_output_path = repo + convert_suffix  # the "pending" repository
        cmd = [
            repogen, "--repository", repo, "--unite-metadata", repo_output_path
        ]
        if dry_run:
            cmd.insert(0, "echo")
        try:
            # perform the update
            exec_cmd(cmd, timeout=60 * 15)
            successful_conversions[repo] = repo_output_path
        except Exception as e:
            log.error(
                f"Failed to update metadata for repository: {repo} - reason: {str(e)}"
            )
            failed_conversions[repo] = repo_output_path

    return (successful_conversions, failed_conversions)
Exemplo n.º 7
0
async def upload_ifw_to_remote(ifwTools: str, remoteServer: str, remoteServerHome: str) -> str:
    from installer_utils import is_valid_url_path, download_archive, extract_archive, locate_file
    assert is_valid_url_path(ifwTools)
    log.info("Preparing ifw tools: %s", ifwTools)
    # fetch the tool first
    currentDir = os.getcwd()
    ifwToolsDir = os.path.join(currentDir, "ifw_tools")
    if not os.path.isdir(ifwToolsDir):
        os.makedirs(ifwToolsDir)
        destFile = download_archive(ifwTools, ifwToolsDir)
        await extract_archive(destFile, ifwToolsDir)
    repogen = locate_file("repogen", ifwToolsDir)
    repogenDir = os.path.dirname(repogen)
    # upload to server
    remoteTmpDir = os.path.join(remoteServerHome, "ifw_tools", timestamp)
    # create tmp dir at remote
    create_remote_paths(remoteServer, [remoteTmpDir])
    # upload content
    cmd = ['rsync', '-avzh', repogenDir + "/", remoteServer + ":" + remoteTmpDir]
    exec_cmd(cmd, timeout=60*60)
    # return path on remote poiting to repogen
    return os.path.join(remoteTmpDir, "bin", "repogen")
Exemplo n.º 8
0
async def _build_python(srcDir: str, bldDir: str, prefix: str) -> str:
    log.info("Building..")
    log.info("  Source dir: %s", srcDir)
    log.info("  Build dir: %s", bldDir)
    log.info("  Prefix: %s", prefix)
    system = platform.system().lower()
    cpuCount = str(multiprocessing.cpu_count())
    if "darwin" in system:
        opensslQueryCmd = ['brew', '--prefix', 'openssl']
        opensslPath = exec_cmd(opensslQueryCmd)
        if not os.path.exists(opensslPath):
            raise BldPythonError(
                "Could not find OpenSSL path. Please check that the required brew formula is installed."
            )
        configureCmd = [
            os.path.join(srcDir, 'configure'), '--enable-shared',
            '--with-openssl=' + opensslPath, '--prefix=' + prefix
        ]
        makeCmd = ['make', '-j' + cpuCount]
        makeInstallCmd = ['make', 'install']
    else:
        configureCmd = [
            os.path.join(srcDir, 'configure'), '--enable-shared',
            '--prefix=' + prefix
        ]
        makeCmd = ['make', '-j' + cpuCount]
        makeInstallCmd = ['make', 'install']

    rmtree(bldDir, ignore_errors=True)
    os.makedirs(bldDir)

    with cd(bldDir):
        await async_exec_cmd(configureCmd)
        await async_exec_cmd(makeCmd)
        await async_exec_cmd(makeInstallCmd)
    log.info("Python built successfully and installed to: %s", prefix)
    return prefix
Exemplo n.º 9
0
def delete_remote_paths(server: str, paths: List[str]) -> None:
    is_safe_repo_directory(paths)
    cmd = get_remote_login_cmd(server) + ['rm -rf', ' '.join(paths)]
    exec_cmd(cmd, timeout=60 * 2)