def _wait_for_remote_mount(
    max_attempts: int,
    ctx: click.core.Context,
    remote_encrypted: Path,
    path_on_cloud_drive: str,
) -> None:
    """
    Wait for the rclone mount or error if it does not start within some time
    close to 25 seconds.
    """
    relative_path_on_cloud_drive = Path(path_on_cloud_drive).relative_to('/')
    remote_mount = remote_encrypted / relative_path_on_cloud_drive
    attempts = 0
    sleep_seconds = 5

    while not remote_mount.exists():
        attempts += 1
        if attempts > max_attempts:
            message = (
                f'Remote mount not found after {max_attempts} attempts, '
                'exiting.')
            ctx.fail(message)

        message = f'Remote mount {remote_mount} does not exist yet, waiting.'
        LOGGER.info(message)
        time.sleep(sleep_seconds)
Exemple #2
0
def download_artifact(
    ctx: click.core.Context,
    dcos_version: str,
    download_path: str,
) -> None:
    """
    Download a DC/OS Open Source artifact.

    For DC/OS Enterprise release artifacts, contact your sales representative.
    """
    path = Path(download_path)
    label = 'Downloading to ' + str(path)
    base_url = 'https://downloads.dcos.io/dcos/'
    url = base_url + dcos_version + '/dcos_generate_config.sh'
    head_resp = requests.head(url)
    if not head_resp.ok:
        message = 'Cannot download artifact from {url}.'.format(url=url)
        ctx.fail(message=message)

    if path.is_dir():
        path = path / 'dcos_generate_config.sh'

    if not path.exists():
        path.parent.mkdir(parents=True, exist_ok=True)

    # See
    # https://stackoverflow.com/questions/16694907/how-to-download-large-file-in-python-with-requests-py

    stream = requests.get(url, stream=True)
    assert stream.ok
    content_length = int(stream.headers['Content-Length'])
    total_written = 0
    chunk_size = 1024
    # See http://click.pocoo.org/6/arguments/#file-args for parameter
    # information
    with click.open_file(
            filename=str(path),
            mode='wb',
            atomic=True,
            lazy=True,
    ) as file_descriptor:
        content_iter = stream.iter_content(chunk_size=chunk_size)
        with click.progressbar(  # type: ignore
                content_iter,
                length=content_length / chunk_size,
                label=label,
        ) as progress_bar:
            for chunk in progress_bar:
                # Filter out keep-alive new chunks.
                if chunk:
                    total_written += len(chunk)
                    file_descriptor.write(chunk)  # type: ignore

    message = ('Downloaded {total_written} bytes. '
               'Expected {content_length} bytes.').format(
                   total_written=total_written,
                   content_length=content_length,
               )
    assert total_written == content_length, message
Exemple #3
0
def init(ctx: click.core.Context, site_path: str) -> None:
    """Create an new mackerel site"""
    output_path = Path(site_path)
    sample_site_path = Path(os.path.dirname(
        os.path.realpath(mackerel.__file__))) / 'site'
    try:
        shutil.copytree(src=sample_site_path, dst=output_path)
    except FileExistsError as e:
        ctx.fail(f'Initialize failed, file {e.filename} already exists')

    click.echo(f'Initialized empty mackerel site in {output_path}')
Exemple #4
0
def init(ctx: click.core.Context, site_path: str) -> None:
    """Create an new mackerel site"""
    output_path = Path(site_path)
    sample_site_path = Path(
        os.path.dirname(os.path.realpath(mackerel.__file__))) / 'site'
    try:
        shutil.copytree(src=sample_site_path, dst=output_path)
    except FileExistsError as e:
        ctx.fail(f'Initialize failed, file {e.filename} already exists')

    click.echo(f'Initialized empty mackerel site in {output_path}')
Exemple #5
0
 def get_command(self, ctx: click.core.Context,
                 cmd_name: str) -> Optional[click.core.Command]:
     rv = click.core.Group.get_command(self, ctx, cmd_name)
     if rv is not None:
         return rv
     matches = difflib.get_close_matches(cmd_name,
                                         self.list_commands(ctx),
                                         n=2)
     if not matches:
         return None
     elif len(matches) == 1:
         return click.core.Group.get_command(self, ctx, str(matches[0]))
     else:
         ctx.fail('Too many matches: {0}'.format(matches))
         return None
def upload(ctx: click.core.Context, config: _Config) -> None:
    """
    Upload local data to the cloud.
    """
    upload_pid_file = Path(__file__).parent / 'upload.pid'
    if upload_pid_file.exists():
        running_pid = upload_pid_file.read_text()
        if running_pid:
            proc_file = Path('/proc') / running_pid
            if proc_file.exists():
                message = 'Upload script already running'
                LOGGER.error(msg=message)
                ctx.fail(message=message)

    current_pid = os.getpid()
    upload_pid_file.write_text(str(current_pid))
    _sync_deletes(
        local_decrypted=config.local_decrypted,
        encfs_pass=config.encfs_pass,
        remote_encrypted=config.remote_encrypted,
        rclone_remote=config.rclone_remote,
        path_on_cloud_drive=config.path_on_cloud_drive,
        rclone_config_path=config.rclone_config_path,
        rclone=config.rclone,
        rclone_verbose=config.rclone_verbose,
    )

    # Determine the .unionfs-fuse directory name as to not upload it
    exclude_name = _encode_with_encfs(
        path_or_file_name='.unionfs-fuse',
        encfs_pass=config.encfs_pass,
        root_dir=config.remote_encrypted,
    )

    upload_args = [
        str(config.rclone),
        '--config',
        str(config.rclone_config_path),
        _rclone_verbosity_flag(verbose=config.rclone_verbose),
        'copy',
        # Try to avoid Google 403: User Rate Limit Exceeded.
        # This happens when 10 total transfers happen in a second.
        '--tpslimit',
        '1',
        # Try to avoid limit of uploading files > 100 GB.
        '--max-size',
        '100G',
        # Avoid retrying when the 750 GB / day upload limit is hit.
        '--drive-stop-on-upload-limit',
        # Make fewer API requests.
        '--fast-list',
        # Exclude the ``.unionfs-fuse`` directory as this is where files to be
        # deleted go.
        '--exclude',
        f'/{exclude_name}/*',
        str(config.local_encrypted),
        _rclone_path(
            rclone_remote=config.rclone_remote,
            rclone_root=config.path_on_cloud_drive,
            rclone_relative_path=None,
        ),
    ]

    children = config.local_encrypted.glob('*')
    if children:
        subprocess.run(args=upload_args, check=True)
    else:
        message = f'{config.local_encrypted} is empty - nothing to upload'
        LOGGER.info(msg=message)

    message = 'Upload Complete - Syncing changes'
    LOGGER.info(message)
    _local_cleanup(
        days_to_keep_local=config.days_to_keep_local,
        local_decrypted=config.local_decrypted,
    )
    upload_pid_file.unlink()
Exemple #7
0
def download_installer(
    ctx: click.core.Context,
    dcos_version: str,
    download_path: str,
) -> None:
    """
    Download a DC/OS Open Source installer.

    For DC/OS Enterprise installers, contact your sales representative.
    """
    path = Path(download_path)
    path.parent.mkdir(exist_ok=True, parents=True)
    path = path.parent.resolve() / path.name

    click.echo('Downloading to {path}.'.format(path=path))

    if dcos_version.startswith('http'):
        url = dcos_version
    else:
        base_url = 'https://downloads.dcos.io/dcos/'
        url = base_url + dcos_version + '/dcos_generate_config.sh'

    head_resp = requests.head(url)
    if not head_resp.ok:
        message = 'Cannot download installer from {url}.'.format(url=url)
        ctx.fail(message=message)

    if path.is_dir():
        path = path / 'dcos_generate_config.sh'

    if not path.exists():
        path.parent.mkdir(parents=True, exist_ok=True)

    # See
    # https://stackoverflow.com/questions/16694907/how-to-download-large-file-in-python-with-requests-py

    stream = requests.get(url, stream=True)
    assert stream.ok
    content_length = int(stream.headers['Content-Length'])
    total_written = 0
    chunk_size = 1024
    # See http://click.pocoo.org/7/arguments/#file-args for parameter
    # information.
    content_iter = stream.iter_content(chunk_size=chunk_size)
    progress_bar = tqdm(
        iterable=content_iter,
        total=content_length / chunk_size,
        dynamic_ncols=True,
        bar_format='{l_bar}{bar}',
        unit_scale=None,
    )
    with click.open_file(
        filename=str(path),
        mode='wb',
        atomic=True,
        lazy=True,
    ) as file_descriptor:
        for chunk in progress_bar:
            # Enable at the start of each chunk, disable at the end, to avoid
            # showing statistics at the end.
            progress_bar.disable = False
            # Filter out keep-alive new chunks.
            if chunk:
                total_written += len(chunk)
                file_descriptor.write(chunk)  # type: ignore
            progress_bar.disable = True

    message = (
        'Downloaded {total_written} bytes. '
        'Expected {content_length} bytes.'
    ).format(
        total_written=total_written,
        content_length=content_length,
    )
    assert total_written == content_length, message