Пример #1
0
def test_ds1saijd2s23():
    a1 = range(6)
    b2 = range(6)

    for _ in progress_bar(a1, notifications=False):
        pass

    for _ in progress_bar(b2, notifications=False):
        pass
Пример #2
0
def test_asudhweasi23jdq():
    def sas():
        """ """
        yield from range(100)

    for a in progress_bar(sas(), notifications=False):
        pass
Пример #3
0
def test_asudhwea213si23jdq():
    def sas(l):
        """ """
        yield from range(l)

    for a in progress_bar(sas(129), notifications=False):
        pass
Пример #4
0
def download_binary_mnist(
        file_path: str = "binary_mnist.h5",
        data_dir: Path = (PROJECT_APP_PATH.user_data / "vanilla_vae" / "data"),
):
    """

    Args:
      file_path:
      data_dir:
    """
    if not data_dir.exists():
        data_dir.mkdir(parents=True)
    subdatasets = ["train", "valid", "test"]
    for subdataset in progress_bar(subdatasets):
        filename = f"binarized_mnist_{subdataset}.amat"
        url = (
            f"http://www.cs.toronto.edu/~larocheh/public/datasets/binarized_mnist"
            f"/binarized_mnist_{subdataset}.amat")
        local_filename = str(data_dir / filename)
        urllib.request.urlretrieve(url, local_filename)

    train, validation, test = parse_binary_mnist(data_dir)

    data_dict = {"train": train, "valid": validation, "test": test}
    f = h5py.File(file_path, "w")
    f.create_dataset("train", data=data_dict["train"])
    f.create_dataset("valid", data=data_dict["valid"])
    f.create_dataset("test", data=data_dict["test"])
    f.close()
    print(f"Saved binary MNIST data to: {file_path}")
Пример #5
0
def test_dict_items():
    from time import sleep

    class exp_v:
        Test_Sets = {v: v for v in range(9)}

    for a in progress_bar(exp_v.Test_Sets.items()):
        sleep(1)
Пример #6
0
def resize_children(
    src_path: Union[Path, str],
    size: Union[Tuple[Number, Number], Number],
    dst_path: Union[Path, str] = "resized",
    *,
    from_extensions: Iterable[str] = ("jpg", "png"),
    to_extension: "str" = "jpg",
    resize_method: ResizeMethodEnum = ResizeMethodEnum.scale_crop,
) -> None:
    target_size = (size, size)
    src_path = Path(src_path)
    dst_path = Path(dst_path)
    if not dst_path.root:
        dst_path = src_path.parent / dst_path
    for ext in progress_bar(from_extensions):
        for c in progress_bar(
                src_path.rglob(f'*.{ext.rstrip("*").rstrip(".")}')):
            image = cv2.imread(str(c))
            if resize_method == resize_method.scale:
                resized = cv2_resize(image, target_size,
                                     InterpolationEnum.area)
            elif resize_method == resize_method.crop:
                center = (image.shape[0] / 2, image.shape[1] / 2)
                x = int(center[1] - target_size[0] / 2)
                y = int(center[0] - target_size[1] / 2)
                resized = image[y:y + target_size[1], x:x + target_size[0]]
            elif resize_method == resize_method.scale_crop:
                resized = resize(image, width=target_size[0])
                center = (resized.shape[0] / 2, resized.shape[1] / 2)
                x = int(center[1] - target_size[0] / 2)
                y = int(center[0] - target_size[1] / 2)
                resized = resized[y:y + target_size[1], x:x + target_size[0]]
            else:
                raise NotImplementedError

            target_folder = ensure_existence(
                dst_path.joinpath(*(c.relative_to(src_path).parent.parts)))
            cv2.imwrite(
                str((target_folder / c.name).with_suffix(
                    f'.{to_extension.rstrip("*").rstrip(".")}')),
                resized,
            )
Пример #7
0
    def asdasf():
        """ """
        from draugr.opencv_utilities import frame_generator, AsyncVideoStream
        from draugr.tqdm_utilities import progress_bar

        with AsyncVideoStream() as vc:
            with OpencvImageStream() as s:
                for i in progress_bar(
                    frame_generator(vc, coder=None), auto_total_generator=False
                ):
                    s.draw(i)
Пример #8
0
    def asdasf():
        """ """
        import cv2
        from draugr.opencv_utilities import frame_generator
        from draugr.tqdm_utilities import progress_bar
        from functools import partial
        from draugr.opencv_utilities import AsyncVideoStream

        with AsyncVideoStream() as vc:
            coder = partial(cv2.cvtColor, code=cv2.COLOR_BGR2RGB)
            with ImageStreamPlot(coder(next(vc))) as s:
                for i in progress_bar(frame_generator(vc, coder=coder),
                                      auto_total_generator=False):
                    s.draw(i)
Пример #9
0
 def a():
     """ """
     duration_sec = 4
     mul = 1000
     sampling_Hz = 44
     sampling_rate = sampling_Hz * mul  # Hz
     delta = 1 / sampling_rate
     n_fft = 64
     s = FastFourierTransformPlot(n_fft=n_fft, sampling_rate=sampling_rate)
     for t in progress_bar(numpy.arange(0, duration_sec, delta)):
         ts = 2 * numpy.pi * t
         s1 = numpy.sin(ts * 1 * sampling_Hz / 2**4 * mul)
         s2 = numpy.sin(ts * 3 * sampling_Hz / 2**3 * mul + 0.33 * numpy.pi)
         s3 = numpy.sin(ts * 5 * sampling_Hz / 2**2 * mul + 0.66 * numpy.pi)
         signal = s1 + s2 + s3
         signal /= 3
         # signal += (numpy.random.random() - 0.5) * 2 * 1 / 2  # Noise
         s.draw(signal, delta=delta)
Пример #10
0
def test_ds12sadad311231223():
    for a in progress_bar(
        {
            2.13j,
            8921.9123j,
            923j,
            821738j,
            782173j,
            8912738124j,
            8471827j,
            661262j,
            1111j,
            2222j,
            3333j,
            4444j,
            5555j,
        },
        notifications=False,
    ):
        pass
Пример #11
0
def test_dsad311231223():
    for a in progress_bar(
        (
            2.13,
            8921.9123,
            923,
            821738,
            782173,
            8912738124,
            8471827,
            661262,
            1111,
            2222,
            3333,
            4444,
            5555,
        ),
        notifications=False,
    ):
        pass
 def a() -> None:
     """
     :rtype: None
     """
     duration_sec = 4
     mul = 1000
     sampling_Hz = 44.1
     sampling_rate = int(sampling_Hz * mul)  # Hz
     delta = 1 / sampling_rate
     n_fft = 128  # 1024
     s = FastFourierTransformSpectrogramPlot(
         n_fft=n_fft, sampling_rate=sampling_rate, buffer_size_sec=delta * n_fft * 4
     )
     for t in progress_bar(numpy.arange(0, duration_sec, delta)):
         ts = 2 * numpy.pi * t
         s1 = numpy.sin(ts * 1 * sampling_Hz / 2 ** 4 * mul)
         s2 = numpy.sin(ts * 3 * sampling_Hz / 2 ** 3 * mul + 0.33 * numpy.pi)
         s3 = numpy.sin(ts * 5 * sampling_Hz / 2 ** 2 * mul + 0.66 * numpy.pi)
         signal = s1 + s2 + s3
         signal /= 3
         # signal += (numpy.random.random() - 0.5) * 2 * 1 / 2  # Noise
         s.draw(signal, delta=delta)
Пример #13
0
def extract_scalars_as_csv(
    train_path: Path = EXPORT_RESULTS_PATH / "csv" / "training",
    test_path: Path = EXPORT_RESULTS_PATH / "csv" / "testing",
    export_train: bool = True,
    export_test: bool = True,
    verbose: bool = False,
    only_extract_from_latest_event_file: bool = False,
) -> None:
    """
    :param train_path:
    :param test_path:
    :param export_train:
    :param export_test:
    :param verbose:
    :param only_extract_from_latest_event_file:
    """
    if only_extract_from_latest_event_file:
        max_load_time = max(
            list(
                AppPath(
                    "Adversarial Speech", "Christian Heider Nielsen"
                ).user_log.iterdir()
            ),
            key=os.path.getctime,
        )
        unique_event_files_parents = set(
            [ef.parent for ef in max_load_time.rglob("events.out.tfevents.*")]
        )
        event_files = {max_load_time: unique_event_files_parents}
    else:
        event_files = {
            a: set([ef.parent for ef in a.rglob("events.out.tfevents.*")])
            for a in list(
                AppPath(
                    "Adversarial Speech", "Christian Heider Nielsen"
                ).user_log.iterdir()
            )
        }

    for k, v in progress_bar(event_files.items()):
        for e in progress_bar(v):
            relative_path = e.relative_to(k)
            mapping_id, *rest = relative_path.parts
            mappind_id_test = f"{mapping_id}_Test_{relative_path.name}"
            # model_id = relative_path.parent.name can be include but is always the same
            relative_path = Path(*(mappind_id_test, *rest))
            with TensorboardEventExporter(e, save_to_disk=True) as tee:
                if export_test:
                    out_tags = []
                    for tag in progress_bar(TestingScalars):
                        if tag.value in tee.available_scalars:
                            out_tags.append(tag.value)

                    if len(out_tags):
                        tee.scalar_export_csv(
                            *out_tags,
                            out_dir=ensure_existence(
                                test_path / k.name / relative_path,
                                force_overwrite=True,
                                verbose=verbose,
                            ),
                        )
                        print(e)
                    else:
                        if verbose:
                            print(
                                f"{e}, no requested tags found {TestingScalars.__members__.values()}, {tee.available_scalars}"
                            )

                if export_train:
                    out_tags = []
                    for tag in progress_bar(TrainingScalars):
                        if tag.value in tee.available_scalars:
                            out_tags.append(tag.value)

                    if len(out_tags):
                        tee.scalar_export_csv(
                            *out_tags,
                            out_dir=ensure_existence(
                                train_path / k.name / relative_path,
                                force_overwrite=True,
                                verbose=verbose,
                            ),
                        )
                    else:
                        if verbose:
                            print(
                                f"{e}, no requested tags found {TrainingScalars.__members__.values()}, {tee.available_scalars}"
                            )
Пример #14
0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-

__author__ = "Christian Heider Nielsen"
__doc__ = r"""

           Created on 30-12-2020
           """

if __name__ == "__main__":

    from draugr.tqdm_utilities import progress_bar

    for a in progress_bar(range(100)):
        print(a)
Пример #15
0
def main(config, model, tmsp_path):
    """ """
    data_loader = DataLoader(dataset=DATASET,
                             batch_size=config.batch_size,
                             shuffle=True)

    optimiser = torch.optim.Adam(model.parameters(), lr=config.learning_rate)

    logs = defaultdict(list)

    for epoch_i in progress_bar(range(config.epochs)):
        tracker_epoch = defaultdict(lambda: defaultdict(dict))

        for iteration, (original,
                        label) in progress_bar(enumerate(data_loader)):

            original, label = (
                original.to(global_torch_device()),
                label.to(global_torch_device()),
            )
            reconstruction, mean, log_var, z = model(
                original,
                one_hot(label, 10).to(GLOBAL_DEVICE))

            for i, yi in enumerate(label):
                id = len(tracker_epoch)
                tracker_epoch[id]["x"] = z[i, 0].item()
                tracker_epoch[id]["y"] = z[i, 1].item()
                tracker_epoch[id]["label"] = yi.item()

            optimiser.zero_grad()
            loss = loss_fn(reconstruction, original, mean, log_var)
            loss.backward()
            optimiser.step()

            logs["loss"].append(loss.item())

            if iteration % config.print_every == 0 or iteration == len(
                    data_loader) - 1:
                print(f"Epoch {epoch_i:02d}/{config.epochs:02d}"
                      f" Batch {iteration:04d}/{len(data_loader) - 1:d},"
                      f" Loss {loss.item():9.4f}")

                condition_vector = torch.arange(0, 10,
                                                device=GLOBAL_DEVICE).long()
                sample = model.sample(
                    one_hot(condition_vector, 10).to(GLOBAL_DEVICE),
                    num=condition_vector.size(0),
                )

                pyplot.figure()
                pyplot.figure(figsize=(5, 10))
                for p in range(10):
                    pyplot.subplot(5, 2, p + 1)

                    pyplot.text(
                        0,
                        0,
                        f"c={condition_vector[p].item():d}",
                        color="black",
                        backgroundcolor="white",
                        fontsize=8,
                    )
                    pyplot.imshow(sample[p].cpu().data.numpy())
                    pyplot.axis("off")

                pyplot.savefig(
                    str(tmsp_path / f"Epoch{epoch_i:d}_Iter{iteration:d}.png"),
                    dpi=300,
                )
                pyplot.clf()
                pyplot.close("all")

        df = pandas.DataFrame.from_dict(tracker_epoch, orient="index")
        g = seaborn.lmplot(
            x="x",
            y="y",
            hue="label",
            data=df.groupby("label").head(100),
            fit_reg=False,
            legend=True,
        )
        g.savefig(
            str(tmsp_path / f"Epoch{epoch_i:d}_latent_space.png"),
            dpi=300,
        )
        if True:
            torch.save(model.state_dict(),
                       BASE_PATH / f"model_state_dict{str(epoch_i)}.pth")
Пример #16
0
def test_progress_bar():
    for a in progress_bar(range(100), notifications=False):
        pass
Пример #17
0
def test_ds12s23():
    for a in progress_bar(
        [[2.13j], [8921.9123j], [923j], [821738j], [782173j]], notifications=False
    ):
        pass
Пример #18
0
def test_dsad3123():
    for a in progress_bar([2.13, 8921.9123, 923], notifications=False):
        pass
Пример #19
0
def test_asudhweasijdq():
    sas = [f"a{a}" for a in range(5)]

    for a in progress_bar(sas, notifications=False):
        pass