Beispiel #1
0
def download_from_youtube(url: str) -> str:
    with wrap_context('downloading from youtube', url=url):
        log.info('downloading from youtube...', url=url)

        uid = str(uuid.uuid4())
        filename = f'trimmer_dl_{uid}'

        ydl_opts = {
            'format': 'bestaudio/best',
            'postprocessors': [{
                'key': 'FFmpegExtractAudio',
                'preferredcodec': 'mp3',
                'preferredquality': '192',
            }],
            'outtmpl': f'{filename}.%(ext)s'
        }
        with youtube_dl.YoutubeDL(ydl_opts) as ydl:
            retcode = ydl.download([url])
            assert retcode == 0

        full_filename = f'{filename}.mp3'
        assert os.path.isfile(full_filename)
        log.info('song downloaded', tmpfile=full_filename)

        return full_filename
Beispiel #2
0
def volume_show():
    master_volume = read_master_volume()
    icon_name = get_notification_icon(master_volume)
    summary = 'Volume'
    body = f'{master_volume:d}%'
    log.info(f'Volume: {body}')
    show_notification(icon_name, summary, body)
Beispiel #3
0
 def log(self, verbose: int):
     ctx = {}
     if verbose >= 2:
         ctx['headers'] = self.headers
         if self.content:
             ctx['content'] = '\n' + self.content.decode('utf-8')
     log.info(f'< Incoming {self.method} {self.path}', **ctx)
Beispiel #4
0
def read_mp3_artist_title(mp3_file: str) -> Tuple[str, str]:
    with wrap_context('extracting mp3 metadata', mp3_file=mp3_file):
        tag_artist, tag_title = read_mp3_tags(mp3_file)
        file_artist, file_title = extract_filename_artist_title(mp3_file)
        artist = tag_artist or file_artist
        title = tag_title or file_title
        log.info('metadata inferred', artist=artist, title=title)
        return artist, title
Beispiel #5
0
def tag_mp3(mp3_file: str, artist: str, title: str):
    with wrap_context('tagging mp3', artist=artist, title=title, mp3_file=mp3_file):
        log.info('tagging mp3...', artist=artist, title=title)

        audiofile = eyed3.load(mp3_file)
        audiofile.tag.artist = artist
        audiofile.tag.title = title

        audiofile.tag.save(version=ID3_V1_1)
        audiofile.tag.save(version=ID3_V2_4)
Beispiel #6
0
def read_mp3_tags(mp3_file: str) -> Tuple[str, str]:
    with wrap_context('reading mp3 tags'):
        audiofile = eyed3.load(mp3_file)
        if audiofile is None or audiofile.tag is None:
            log.warn('no IDv3 tags read', mp3_file=mp3_file)
            return '', ''

        artist = _oremptystr(audiofile.tag.artist).strip()
        title = _oremptystr(audiofile.tag.title).strip()
        log.info('IDv3 tags read', mp3_file=mp3_file, artist=artist, title=title)
        return artist, title
Beispiel #7
0
def make_iso(yes: bool, source_disk: str, target_iso: str):
    log.info(f'checking required files existence')
    assert os.path.exists(os.path.abspath(os.path.join(target_iso, os.pardir)))

    fdisk_output = shell_output(f'sudo fdisk -l {source_disk}')
    print(fdisk_output)

    block_size_line = [
        line for line in fdisk_output.splitlines()
        if line.startswith('Units: sectors of')
    ][0]
    block_size_matcher = re.compile(r'= ([0-9]+) bytes$')
    match = block_size_matcher.search(block_size_line)
    assert match
    block_size = int(match.group(1))

    end_sector_line = fdisk_output.splitlines()[-1]
    end_sector_matcher = re.compile(
        f'^{source_disk}[a-z0-9]*\\s+[0-9]+\\s+([0-9]+)\\s+[0-9]+')
    match = end_sector_matcher.search(end_sector_line)
    assert match
    end_sector = int(match.group(1))

    log.info(f'block size: {block_size}')
    log.info(f'end sector: {end_sector}')
    confirm(
        yes,
        f'Attempting to dump partitions from {source_disk} to {target_iso}. Are you sure?'
    )

    log.info(f'Writing {source_disk} to {target_iso}')
    wrap_shell(
        f'sudo dd if={source_disk} of={target_iso} bs={block_size} count={end_sector} conv=noerror,sync status=progress'
    )
    wrap_shell('sync')
Beispiel #8
0
def rename_song(mp3_file: str, artist: str, title: str) -> str:
    with wrap_context('renaming song',
                      artist=artist,
                      title=title,
                      mp3_file=mp3_file):
        dirname, filename = os.path.split(mp3_file)
        if artist.strip():
            new_filename = f'{artist.strip()} - {title.strip()}.mp3'
        else:
            new_filename = f'{title.strip()}.mp3'
        new_path = Path(dirname) / new_filename
        os.rename(mp3_file, new_path)
        log.info('song renamed', new_name=new_path)
        return new_path
Beispiel #9
0
def trim_from_source(source: str, artist: Optional[str], title: Optional[str],
                     no_trim: bool, no_fade: bool, no_normalize: bool, no_rename: bool,
                     trim_start: Optional[float], trim_end: Optional[float], gain: Optional[float],
                     output: Optional[str]):
    with log_error():
        if source_is_url(source):
            log.info('source recognized as url', source=source)
            trim_url(source, artist, title, no_trim, no_fade, no_normalize,
                     trim_start, trim_end, gain, output)
        elif source_is_mp3(source):
            log.info('source recognized as mp3 file', source=source)
            trim_mp3(source, artist, title, no_trim, no_fade, no_normalize, no_rename,
                     trim_start, trim_end, gain, output)
        else:
            raise RuntimeError(f'unrecognized source: {source}')
Beispiel #10
0
 def _init_request_cache(self) -> Dict[int, CacheEntry]:
     if self.config.record_file and os.path.isfile(self.config.record_file):
         txt = Path(self.config.record_file).read_text()
         if not txt:
             return {}
         entries = json.loads(txt)
         loaded_cache = {}
         for entry in entries:
             parsed_entry = CacheEntry.from_json(entry)
             request_hash = self._request_hash(parsed_entry.request)
             loaded_cache[request_hash] = parsed_entry
         conflicts = len(entries) - len(loaded_cache)
         log.info(f'Loaded cached request-response entries',
                  record_file=self.config.record_file,
                  loaded=len(loaded_cache),
                  conflicts=conflicts)
         return loaded_cache
     return {}
Beispiel #11
0
def load_extensions(extension_path: str) -> Extensions:
    if not extension_path:
        return Extensions()

    ext = Extensions()
    ext_module = SourceFileLoader('xman.extension',
                                  extension_path).load_module()
    ext_names = [field.name for field in fields(Extensions)]
    loaded = []
    for ext_name in ext_names:
        if hasattr(ext_module, ext_name):
            ext_value = getattr(ext_module, ext_name)
            if ext_value:
                setattr(ext, ext_name, ext_value)
                loaded.append(ext_name)

    log.info('Loaded extensions', file=extension_path, extensions=loaded)

    return ext
Beispiel #12
0
def add_modules(yes: bool, modules: List[str]):
    set_workdir(os.path.join(script_real_dir(), '..'))

    log.info(f'checking required files existence')
    assert os.path.exists('squash/filesystem.squashfs')
    assert os.path.exists('content/boot-files')
    assert os.path.exists('content/grub')
    assert os.path.exists('modules/init')

    target_path = find_usb_data_partition()
    assert os.path.exists(target_path), 'module target path not found'

    confirm(
        yes,
        f'Attepmting to install modules {modules} to {target_path}. Are you sure?'
    )

    log.info(f'Adding optional modules: {modules}')
    for module in modules:
        add_module(module, target_path)
Beispiel #13
0
def trim_mp3(mp3_file: str, user_artist: Optional[str], user_title: Optional[str],
             no_trim: bool, no_fade: bool, no_normalize: bool, no_rename: bool,
             trim_start: Optional[float], trim_end: Optional[float], gain: Optional[float], output: Optional[str]):
    with wrap_context('mp3 song'):
        assert os.path.isfile(mp3_file), 'input file should exist'

        if output:
            shutil.copyfile(mp3_file, output)
            mp3_file = output

        tag_artist, tag_title = read_mp3_artist_title(mp3_file)
        artist = user_artist or tag_artist or enter_or_default('Artist', default='')
        title = user_title or tag_title or enter_or_default('Title', default='')

        if not output and not no_rename:
            mp3_file = rename_song(mp3_file, artist, title)
        normalize_song(mp3_file, no_trim, no_fade, no_normalize, trim_start, trim_end, gain)
        tag_mp3(mp3_file, artist, title)

        log.info('song saved', mp3_file=mp3_file)
Beispiel #14
0
def setup_proxy(listen_port: int, listen_ssl: bool, dst_url: str, record: bool,
                record_file: str, replay: bool, replay_throttle: bool,
                replay_clear_cache: bool, replay_clear_cache_seconds: int,
                config: str, verbose: int):
    with logerr():
        with wrap_context('initialization'):
            extensions = load_extensions(config)
            _config = Config(
                listen_port=listen_port,
                listen_ssl=listen_ssl,
                dst_url=dst_url,
                record=record,
                record_file=record_file,
                replay=replay,
                replay_throttle=replay_throttle,
                replay_clear_cache=replay_clear_cache,
                replay_clear_cache_seconds=replay_clear_cache_seconds,
                verbose=verbose,
            )
            if extensions.override_config:
                extensions.override_config(_config)
            log.info('Configuration set', **asdict(_config))

            RequestHandler.extensions = extensions
            RequestHandler.config = _config
            RequestHandler.cache = RequestCache(extensions, _config)

            TCPServer.allow_reuse_address = True
            httpd = TCPServer((_config.listen_addr, _config.listen_port),
                              RequestHandler)
            if _config.listen_ssl:
                httpd.socket = ssl.wrap_socket(httpd.socket,
                                               certfile='./dev-cert.pem',
                                               server_side=True)
            log.info(
                f'Listening on {_config.listen_scheme} port {_config.listen_port}...'
            )
            try:
                httpd.serve_forever()
            finally:
                httpd.server_close()
Beispiel #15
0
def fetch_youtube_metadata(url: str) -> Tuple[str, str, str]:
    with wrap_context('fetching title from youtube', url=url):
        log.info('fetching metadata from youtube page...', url=url)

        ydl_opts = {
            'format': 'bestaudio/best',
            'postprocessors': [{
                'key': 'FFmpegExtractAudio',
                'preferredcodec': 'mp3',
                'preferredquality': '192',
            }],
            'outtmpl': '%(title)s.%(ext)s',
        }
        with youtube_dl.YoutubeDL(ydl_opts) as ydl:
            einfo = ydl.extract_info(url, download=False)

            track = einfo.get('track')
            artist = einfo.get('artist') or einfo.get('creator')
            full_title = einfo.get('title') or einfo.get('alt_title')

            log.info('youtube page metadata fetched', yt_title=full_title, artist=artist, track=track)
            return artist, track, full_title
Beispiel #16
0
def bulk_rename(
    pattern: str,
    replacement_pattern: Optional[str],
    testing: bool = True,
    full: bool = False,
    recursive: bool = False,
    padding: int = 0,
) -> List[Match]:
    """
    Rename (or match) multiple files at once
    :param pattern: regex pattern to match filenames
    :param replacement: replacement regex pattern for renamed files. 
    Use \\1 syntax to make use of matched groups
    :param testing: True - just testing replacement pattern, False - do actual renaming files
    :param full: whether to enforce matching full filename against pattern
    :param recursive: whether to search directories recursively
    :param padding: applies padding with zeros with given length on matched numerical groups
    """
    log.debug('matching regex pattern',
              pattern=pattern,
              replacement=replacement_pattern,
              testing_mode=testing,
              full_match=full,
              recursive=recursive,
              padding=padding)

    matches: List[Match] = match_files(Path(), pattern, replacement_pattern,
                                       recursive, full, padding)
    for match in matches:
        match.log_info(testing)

    if replacement_pattern:
        find_duplicates(matches)

    if testing:
        if matches:
            log.info('files matched', count=len(matches))
        else:
            log.info('no files matched', count=len(matches))
    elif replacement_pattern:
        rename_matches(matches)
        if matches:
            log.info('files renamed', count=len(matches))
        else:
            log.info('no files renamed', count=len(matches))
    else:
        raise RuntimeError('replacement pattern is required for renaming')

    return matches
Beispiel #17
0
 def log_info(self, testing: bool):
     group_kwargs = {f'group_{idx}': group for idx, group in self.groups.items()}
     if self.name_to is None:
         log.info('matched file', file=self.name_from, **group_kwargs)
     else:
         if testing:
             log.info('matched file', **{'from': self.name_from, 'to': self.name_to}, **group_kwargs)
         else:
             log.info('renaming file', **{'from': self.name_from, 'to': self.name_to})
Beispiel #18
0
def add_module(module: str, target_path):
    log.info(f'Adding module {module}')
    assert module in optional_modules
    module_src_path = optional_modules[module]
    assert os.path.exists(module_src_path), 'module src path not found'
    if os.path.isdir(module_src_path):
        dirname = os.path.basename(os.path.normpath(module_src_path))
        log.info(
            f'Copying module {module_src_path} to {target_path}/{dirname}')
        wrap_shell(f'mkdir -p {target_path}/{dirname}')
        wrap_shell(f'rsync -a {module_src_path}/ {target_path}/{dirname}/')
    else:
        assert module_src_path.endswith('.zip'), 'supporting .zip only'
        log.info(f'Extracting module from {module_src_path} to {target_path}')
        wrap_shell(f'unzip {module_src_path} -d {target_path}/')
    wrap_shell(f'sync')
Beispiel #19
0
def trim_url(url: str, user_artist: Optional[str], user_title: Optional[str],
             no_trim: bool, no_fade: bool, no_normalize: bool,
             trim_start: Optional[float], trim_end: Optional[float], gain: Optional[float], output: Optional[str]):
    with wrap_context('url song'):
        yt_artist, yt_title = extract_youtube_artist_title(url)
        log.info('artist & title extracted from youtube page', artist=yt_artist, title=yt_title)
        artist = user_artist or enter_or_default('Artist', default=yt_artist)
        title = user_title or enter_or_default('Title', default=yt_title)
        log.info('song name set', name=f'{artist} - {title}')

        mp3_file = download_from_youtube(url)
        if output:
            mp3_file = rename_output_song(mp3_file, output)
        else:
            mp3_file = rename_song(mp3_file, artist, title)
        normalize_song(mp3_file, no_trim, no_fade, no_normalize, trim_start, trim_end, gain)
        tag_mp3(mp3_file, artist, title)

        log.info('song saved', mp3_file=mp3_file)
Beispiel #20
0
def rename_output_song(mp3_file: str, output: str) -> str:
    with wrap_context('renaming song', mp3_file=mp3_file, output=output):
        os.rename(mp3_file, output)
        log.info('song renamed', new_name=output)
        return output
Beispiel #21
0
def prebuild_tools(watchmaker_repo: str):
    set_workdir(watchmaker_repo)
    submodule_src_dir = f'{watchmaker_repo}/modules'
    home = '/home/user'

    log.info(f'checking required files existence')
    assert os.path.exists(watchmaker_repo)
    assert os.path.exists(submodule_src_dir)
    assert os.path.exists(f'{submodule_src_dir}/lichking')
    assert os.path.exists(f'{submodule_src_dir}/volumen')

    assert os.geteuid() != 0, 'This script must not be run as root'

    log.info('updating watchmaker tools itself')
    wrap_shell(f'mkdir -p {home}/tools')
    wrap_shell(f'rsync -a {watchmaker_repo}/watchmake/ {home}/tools/watchmake')
    wrap_shell(f'rsync -a {watchmaker_repo}/scripts/ {home}/tools/scripts')
    wrap_shell(f'cp {watchmaker_repo}/modules/music/tubular.wav {home}/Music/')
    wrap_shell(f'cp {watchmaker_repo}/modules/music/tubular.mp3 {home}/Music/')

    log.info('updating pip packages')
    wrap_shell(f'sudo python3 -m pip install --upgrade nuclear')
    wrap_shell(f'python3 -m pip install --upgrade diffs')
    wrap_shell(f'python3 -m pip install --upgrade copymon')
    wrap_shell(f'python3 -m pip install --upgrade regex-rename')
    wrap_shell(f'python3 -m pip install --upgrade trimmer')
    wrap_shell(f'python3 -m pip install --upgrade youtube-dl')

    log.info('updating py-tools')
    wrap_shell(f'rsync -a {submodule_src_dir}/lichking/ {home}/tools/lichking')
    wrap_shell(f'rsync -a {submodule_src_dir}/volumen/ {home}/tools/volumen')

    log.info('recreating links & autocompletion for tools')
    wrap_shell(f'sudo rm -f /usr/bin/lichking')
    wrap_shell(f'sudo rm -f /usr/bin/lich')
    wrap_shell(f'sudo rm -f /usr/bin/king')
    wrap_shell(f'sudo rm -f /usr/bin/volumen')
    wrap_shell(f'sudo rm -f /usr/bin/watchmake')
    wrap_shell(f'sudo rm -f /etc/bash_completion.d/cliglue_*')
    wrap_shell(f'sudo rm -f /etc/bash_completion.d/nuclear_*')

    wrap_shell(f'{home}/tools/lichking/lichking.py --install-bash lichking')
    wrap_shell(f'{home}/tools/lichking/lichking.py --install-bash lich')
    wrap_shell(f'{home}/tools/lichking/lichking.py --install-bash king')
    wrap_shell(f'{home}/tools/watchmake/watchmake.py --install-bash watchmake')
    wrap_shell(f'{home}/tools/volumen/volumen.py --install-bash volumen')

    wrap_shell(f'diffs --install-autocomplete')
    wrap_shell(f'copymon --install-autocomplete')
    wrap_shell(f'regex-rename --install-autocomplete')
    wrap_shell(f'trimmer --install-autocomplete')

    log.info('updating live dev repos')
    wrap_shell(f'rm -rf {home}/dev-live')
    wrap_shell(f'mkdir -p {home}/dev-live')
    for repo_name, url in repo_remotes.items():
        log.info(f'initializing live git repo {repo_name}')
        repo_path = f'{home}/dev-live/{repo_name}'
        wrap_shell(f'mkdir -p {repo_path}')
        set_workdir(repo_path)
        wrap_shell(f'git init')
        wrap_shell(f'git remote add origin "{url}"')
    set_workdir(watchmaker_repo)

    log.info('clearing gradle cache')
    wrap_shell(f'rm -rf {home}/.gradle/*')

    log.info('clearing apt cache')
    wrap_shell(f'sudo apt clean')

    version_file = '/home/user/.osversion'
    version_line = read_file(version_file).splitlines()[0]
    version_matcher = re.compile(r'^v([0-9]+)\.([0-9]+)$')
    match = version_matcher.match(version_line)
    assert match
    major_version = int(match.group(1))
    minor_version = int(match.group(2)) + 1
    new_version = f'v{major_version}.{minor_version}'
    log.info(f'updating new OS version {new_version}')
    save_file(version_file, new_version)
Beispiel #22
0
def normalize_song(mp3_file: str,
                   no_trim: bool,
                   no_fade: bool,
                   no_normalize: bool,
                   user_trim_start: Optional[float] = None,
                   user_trim_end: Optional[float] = None,
                   user_gain: Optional[float] = None):
    with wrap_context('normalizing mp3', mp3_file=mp3_file):
        log.info('loading song...', mp3_file=mp3_file)
        song = AudioSegment.from_mp3(mp3_file)

        if not no_normalize:
            if user_gain is not None:
                gain = user_gain
            else:
                volume = calculate_volume(song)
                log.info('normalizing volume level...',
                         volume=f'{volume:.2f}dB',
                         dBFS=f'{song.dBFS:.2f}dB')
                gain = -volume
            song = song.apply_gain(gain)
            log.info('volume normalized', gain=f'{gain:.2f}dB')

        if not no_trim:
            log.info('trimming silence...')
            start_trim = user_trim_start * 1000 if user_trim_start is not None else detect_leading_silence(
                song)
            end_trim = user_trim_end * 1000 if user_trim_end is not None else detect_leading_silence(
                song.reverse(), margin=0)
            pre_duration = len(song)
            song = song[start_trim:len(song) - end_trim]
            post_duration = len(song)
            log.info('silence trimmed',
                     trim_start=duration_to_human(start_trim),
                     trim_end=duration_to_human(end_trim),
                     duration_before=duration_to_human(pre_duration),
                     duration_after=duration_to_human(post_duration))

        if not no_fade:
            fade_in_duration = 100
            fade_out_duration = 1000
            log.info('applying fade-in & fade-out...',
                     fade_in=duration_to_human(fade_in_duration),
                     fade_out=duration_to_human(fade_out_duration))
            song = song.fade_in(fade_in_duration).fade_out(fade_out_duration)

        duartion = len(song)
        log.info('saving song...',
                 mp3_file=mp3_file,
                 duration=duration_to_human(duartion))
        song.export(mp3_file, format="mp3")
Beispiel #23
0
def list_modules():
    log.info(f'Available modules:')
    for name, path in optional_modules.items():
        print(f'{name} - {path}')
Beispiel #24
0
def replicate_os(source_disk: str, target_disk: str):
    log.info(f'Cloning {source_disk} to {target_disk}...')
    wrap_shell(
        f'sudo dd if={source_disk} of={target_disk} bs=64K conv=noerror,sync status=progress'
    )
    wrap_shell('sync')
Beispiel #25
0
def resquash_os(storage_path: str, live_squash: str, exclude_file: str):
    today = today_stamp()
    squashfs_storage_path = f'{storage_path}/filesystem.squashfs'
    tagged_squashfs_path = f'{storage_path}/filesystem-{today}.squashfs'
    exclude_file_abs = os.path.abspath(exclude_file)

    set_workdir('/')

    # ensure mount points are mounted
    log.info(f'checking mount points')
    assert os.path.exists(
        storage_path), f'storage path does not exist: {storage_path}'
    assert os.path.exists(
        live_squash), f'live squash file does not exist: {live_squash}'
    assert os.path.exists(
        exclude_file_abs), f'exclude file does not exist: {exclude_file_abs}'

    log.info('removing old filesystem copy on storage')
    wrap_shell(f'sudo rm -f {squashfs_storage_path}')
    wrap_shell('sync')

    log.info('squashing filesystem...')
    wrap_shell(f'''
sudo mksquashfs \
    /bin /boot /dev /etc /home /lib /lib64 /media /mnt /opt /proc /run /root /sbin /srv /sys /tmp /usr /var \
    /initrd.img /initrd.img.old /vmlinuz /vmlinuz.old \
    {squashfs_storage_path} \
    -regex -ef {exclude_file_abs} \
    -comp gzip -b 512k \
    -keep-as-directory
    ''')

    log.info(f'creating tagged copy: {tagged_squashfs_path}...')
    wrap_shell(f'sudo cp {squashfs_storage_path} {tagged_squashfs_path}')
    wrap_shell('sync')

    log.info(f'cheking current squashfs size')
    live_squash_mib = os.path.getsize(live_squash) / 1024**2

    log.info(f'[!] Putting Live system at risk')
    log.info(f'[!] removing current Live squashfs: {live_squash}')
    wrap_shell(f'sudo rm -f {live_squash}')

    log.info('[!] replacing with newest squashfs')
    wrap_shell(
        f'sudo rsync -ah --progress --no-perms --no-owner --no-group {squashfs_storage_path} {live_squash}'
    )
    wrap_shell('sync')
    log.info(f'[!] Live system is functional again')

    log.info(f'calculating checksum {squashfs_storage_path}')
    cksum1 = checksum_file(squashfs_storage_path)
    log.info(f'calculating checksum {live_squash}')
    cksum2 = checksum_file(live_squash)
    assert cksum1 == cksum2
    log.info(f'checksums are valid')
    tagged_squashfs_mib = os.path.getsize(tagged_squashfs_path) / 1024**2
    squash_size_diff = tagged_squashfs_mib - live_squash_mib

    log.info(
        f'Success. '
        f'Resquashed {live_squash}. '
        f'Filesystem snaposhot dumped to {tagged_squashfs_path}',
        size=f'{tagged_squashfs_mib}MiB',
        size_diff=f'{squash_size_diff}MiB')
Beispiel #26
0
def flash_disk(disk: str, persistence: bool, boot_storage_surplus: int, modules: List[str], skip_fs: bool):
    set_workdir(os.path.join(script_real_dir(), '..'))

    log.info(f'checking required files existence')
    assert os.path.exists('squash/filesystem.squashfs')
    assert os.path.exists('content/boot-files')
    assert os.path.exists('content/grub')
    assert os.path.exists('modules/init')

    # TODO unmount disk partitions

    log.warn(f'writing to disk {disk}')
    wrap_shell(f'df {disk}')

    log.info('creating MBR')
    wrap_shell(f'''sudo wipefs {disk}''')
    wrap_shell(f'''sudo dd if=/dev/zero of={disk} seek=1 count=2047''')
    wrap_shell(f'''
sudo parted --script {disk} \\
    mklabel msdos
    ''')

    log.info('calculating partitions size')
    # depend on filesystem.squash size, expand by some surplus (for storage)
    squashfs_size = os.path.getsize('squash/filesystem.squashfs')
    boot_part_min_size = squashfs_size + dir_size('content/boot-files') + dir_size('content/grub')
    boot_part_end_mib = boot_part_min_size / 1024 ** 2 + boot_storage_surplus
    efi_part_end_mib = boot_part_end_mib + efi_part_size
    persistence_part_end_mib = efi_part_end_mib + persistence_part_size
    log.info(f'boot partition size: {boot_part_end_mib}MiB ({boot_storage_surplus}MiB surplus)')

    log.info('creating partitions space')
    if persistence:
        wrap_shell(f'''
sudo parted --script {disk} \\
    mkpart primary fat32 1MiB {boot_part_end_mib}MiB \\
    set 1 lba on \\
    set 1 boot on \\
    mkpart primary fat32 {boot_part_end_mib}MiB {efi_part_end_mib}MiB \\
    set 2 esp on \\
    mkpart primary ext4 {efi_part_end_mib}MiB {persistence_part_end_mib}MiB \\
    mkpart primary ext4 {persistence_part_end_mib}MiB 100%
        ''')
    else:
        wrap_shell(f'''
sudo parted --script {disk} \\
    mkpart primary fat32 1MiB {boot_part_end_mib}MiB \\
    set 1 lba on \\
    set 1 boot on \\
    mkpart primary fat32 {boot_part_end_mib}MiB {efi_part_end_mib}MiB \\
    set 2 esp on \\
    mkpart primary ext4 {efi_part_end_mib}MiB 100%
        ''')
    wrap_shell('sync')

    log.info('making boot partition filesystem')
    wrap_shell(f'''sudo mkfs.fat -F32 {disk}1''')
    log.info('making EFI partition filesystem')
    wrap_shell(f'''sudo mkfs.fat -F32 {disk}2''')
    if persistence:
        log.info('making persistence partition filesystem')
        wrap_shell(f'''sudo mkfs.ext4 -F {disk}3''')
        log.info('making watchmodules partition filesystem')
        wrap_shell(f'''sudo mkfs.ext4 -F {disk}4''')
    else:
        log.info('making watchmodules partition filesystem')
        wrap_shell(f'''sudo mkfs.ext4 -F {disk}3''')
    wrap_shell('sync')

    log.info('setting partition names')
    if persistence:
        wrap_shell(f'''
sudo mlabel -i {disk}1 ::boot
sudo mlabel -i {disk}2 ::EFI
sudo e2label {disk}3 persistence
sudo e2label {disk}4 watchmodules
        ''')
    else:
        wrap_shell(f'''
sudo mlabel -i {disk}1 ::boot
sudo mlabel -i {disk}2 ::EFI
sudo e2label {disk}3 watchmodules
        ''')
    wrap_shell('sync')

    log.info('mounting partitions')
    wrap_shell(f'''sudo mkdir -p /mnt/watchmaker''')
    wrap_shell(f'''
sudo mkdir -p /mnt/watchmaker/boot
sudo mount {disk}1 /mnt/watchmaker/boot
        ''')
    wrap_shell(f'''
sudo mkdir -p /mnt/watchmaker/efi
sudo mount {disk}2 /mnt/watchmaker/efi
        ''')

    wrap_shell(f'''sudo mkdir -p /mnt/watchmaker/watchmodules''')
    if persistence:
        wrap_shell(f'''
sudo mkdir -p /mnt/watchmaker/persistence
sudo mount {disk}3 /mnt/watchmaker/persistence
        ''')
        wrap_shell(f'''sudo mount {disk}4 /mnt/watchmaker/watchmodules''')
    else:
        wrap_shell(f'''sudo mount {disk}3 /mnt/watchmaker/watchmodules''')

    log.info('installing GRUB EFI bootloaders')
    wrap_shell(f'''
sudo grub-install \\
    --target=x86_64-efi \\
    --efi-directory=/mnt/watchmaker/boot \\
    --boot-directory=/mnt/watchmaker/boot/boot \\
    --removable --recheck
    ''')
    wrap_shell(f'''
sudo grub-install \\
    --target=x86_64-efi \\
    --efi-directory=/mnt/watchmaker/efi \\
    --boot-directory=/mnt/watchmaker/boot/boot \\
    --removable --recheck
    ''')

    log.info('installing GRUB i386-pc bootloader')
    wrap_shell(f'''
sudo grub-install \\
    --target=i386-pc \\
    --boot-directory=/mnt/watchmaker/boot/boot \\
    --recheck \\
    {disk}
    ''')

    wrap_shell('sync')

    log.info('Fixing GRUB EFI by replacing with Debian GRUB')
    wrap_shell(f'''
sudo rm /mnt/watchmaker/efi/EFI/BOOT/*
sudo cp -r content/efi/* /mnt/watchmaker/efi/EFI/BOOT/
sudo rm /mnt/watchmaker/boot/EFI/BOOT/*
sudo cp -r content/efi/* /mnt/watchmaker/boot/EFI/BOOT/
sudo cp -r /mnt/watchmaker/efi/EFI/BOOT /mnt/watchmaker/efi/EFI/debian
sudo cp -r /mnt/watchmaker/boot/EFI/BOOT /mnt/watchmaker/boot/EFI/debian

sudo cp -r content/grub/x86_64-efi /mnt/watchmaker/boot/boot/grub/
    ''')

    log.info('making EFI Microsoft workaround')
    wrap_shell(f'''
sudo cp -r /mnt/watchmaker/efi/EFI/BOOT /mnt/watchmaker/efi/EFI/Microsoft
sudo cp -r /mnt/watchmaker/boot/EFI/BOOT /mnt/watchmaker/boot/EFI/Microsoft
    ''')

    log.info('GRUB config')
    wrap_shell(f'''
sudo cp content/grub/grub.cfg /mnt/watchmaker/boot/boot/grub/
sudo cp content/grub/background.png /mnt/watchmaker/boot/boot/grub/
sudo cp content/grub/font.pf2 /mnt/watchmaker/boot/boot/grub/
sudo cp content/grub/loopback.cfg /mnt/watchmaker/boot/boot/grub/
sudo cp content/grub/GRUB_FINDME /mnt/watchmaker/boot/
    ''')

    log.info('Boot base files')
    wrap_shell(f'''
sudo cp -r content/boot-files/[BOOT] /mnt/watchmaker/boot/
sudo cp -r content/boot-files/d-i /mnt/watchmaker/boot/
sudo cp -r content/boot-files/dists /mnt/watchmaker/boot/
sudo cp -r content/boot-files/live /mnt/watchmaker/boot/
sudo cp -r content/boot-files/pool /mnt/watchmaker/boot/
sudo cp -r content/boot-files/.disk /mnt/watchmaker/boot/
    ''')
    wrap_shell(f'''sudo mkdir -p /mnt/watchmaker/boot/storage''')

    log.info('EFI base files')
    wrap_shell(f'''
    sudo cp -r content/boot-files/[BOOT] /mnt/watchmaker/efi/
    sudo cp -r content/boot-files/d-i /mnt/watchmaker/efi/
    sudo cp -r content/boot-files/dists /mnt/watchmaker/efi/
    sudo cp -r content/boot-files/live /mnt/watchmaker/efi/
    sudo cp -r content/boot-files/pool /mnt/watchmaker/efi/
    sudo cp -r content/boot-files/.disk /mnt/watchmaker/efi/
        ''')

    if persistence:
        log.info('Persistence configuration')
        wrap_shell(f'''sudo cp -r content/persistence/persistence.conf /mnt/watchmaker/persistence/''')

    log.info('Copying squash filesystem')
    if not skip_fs:
        wrap_shell(f'''sudo cp squash/filesystem.squashfs /mnt/watchmaker/boot/live/''')

    log.info('Adding init module')
    wrap_shell(f'''sudo cp -r modules/init /mnt/watchmaker/watchmodules/''')
    log.info('Adding dev module')
    wrap_shell(f'''sudo mkdir -p /mnt/watchmaker/watchmodules/dev''')

    log.info('make watchmodules writable to non-root user')
    wrap_shell(f'''sudo chown igrek /mnt/watchmaker/watchmodules -R''')

    if modules:
        log.info(f'Adding optional modules: {modules}')
        target_path = '/mnt/watchmaker/watchmodules'
        for module in modules:
            install_module.add_module(module, target_path)

    log.info('unmounting')
    wrap_shell('sync')
    wrap_shell(f'''sudo umount /mnt/watchmaker/boot''')
    wrap_shell(f'''sudo umount /mnt/watchmaker/efi''')
    wrap_shell(f'''sudo umount /mnt/watchmaker/watchmodules''')
    if persistence:
        wrap_shell(f'''sudo umount /mnt/watchmaker/persistence''')
    wrap_shell('sync')

    log.info('Success')
Beispiel #27
0
def action_monitor_meminfo(sync: bool):
    background_thread: Optional[BackgroundExecuteThread] = None
    if sync:
        background_thread = run_sync_background()

    mem_sizes_buffer: List[MemDataPoint] = []

    try:
        while True:
            # rerun sync
            if sync and background_thread and not background_thread.is_alive():
                log.info('running sync in background...')
                background_thread.stop()
                background_thread = run_sync_background()

            timestamp: float = time.time()
            dirty_kb, writeback_kb = get_mem_dirty_writeback()
            remaining_kb: int = dirty_kb + writeback_kb

            mem_sizes_buffer.append(
                MemDataPoint(timestamp, dirty_kb, writeback_kb))
            # max buffer size
            if len(mem_sizes_buffer) > 10:
                mem_sizes_buffer.pop(0)

            speed_temp: float = calc_temporary_speed(mem_sizes_buffer)
            speed_avg: float = calc_avg_speed(mem_sizes_buffer)
            eta_s: float = calc_eta(remaining_kb, speed_avg)

            # output values
            print_timestamp = CHAR_BOLD + current_time() + CHAR_RESET
            print_remaining = CHAR_BOLD + kb_to_human_just(
                remaining_kb) + CHAR_RESET
            print_temporary_speed = CHAR_BOLD + kb_to_speed_human_just(
                speed_temp) + CHAR_RESET
            print_avg_speed = CHAR_BOLD + kb_to_speed_human_just(
                speed_avg) + CHAR_RESET
            print_eta = CHAR_BOLD + seconds_to_human(eta_s).rjust(
                8) + CHAR_RESET

            # output formatting
            if remaining_kb < 100:
                print_remaining = CHAR_GREEN + print_remaining

            if speed_temp > 0:
                print_temporary_speed = CHAR_RED + print_temporary_speed
            elif speed_temp == 0:
                print_temporary_speed = CHAR_YELLOW + print_temporary_speed
            else:
                print_temporary_speed = CHAR_GREEN + print_temporary_speed

            if speed_avg > 0:
                print_avg_speed = CHAR_RED + print_avg_speed
            elif speed_avg == 0:
                print_avg_speed = CHAR_YELLOW + print_avg_speed
            else:
                print_avg_speed = CHAR_GREEN + print_avg_speed

            if not eta_s:
                print_eta = CHAR_YELLOW + print_eta
            elif eta_s < 60:
                print_eta = CHAR_GREEN + print_eta
            elif eta_s > 600:
                print_eta = CHAR_RED + print_eta

            print(f'[{print_timestamp}] Remaining:{print_remaining}, '
                  f'Speed:{print_temporary_speed}, '
                  f'AVG:{print_avg_speed}, '
                  f'ETA: {print_eta}')

            # delay before next loop
            inp = input_or_timeout(1)
            # sync command
            if inp == 's':
                if background_thread and background_thread.is_alive():
                    log.info('already syncing.')
                else:
                    log.info('running sync in background...')
                    background_thread = run_sync_background()
            elif inp == 'q':
                return

    except KeyboardInterrupt:
        # Ctrl + C handling without printing stack trace
        print()  # new line)
    finally:
        # cleanup_thread
        if background_thread is not None:
            background_thread.stop()