Esempio n. 1
0
def embed_map(map, path="map.html"):
    """
    Embeds a linked iframe to the map into the IPython notebook.
    
    Note: this method will not capture the source of the map into the notebook.
    This method should work for all maps (as long as they use relative urls).
    """
    map.create_map(path=path)
    return idisp.IFrame(src="files/{path}".format(path=path),
                        width="100%",
                        height="510")
Esempio n. 2
0
def macro(code: str) -> Tuple[display.DisplayObject]:
    """
    >>> url = "https://test.com"
    >>> assert macro(url) and macro(''' {}
    ...
    ... '''.format(url))[0].data.strip() == url
    """
    lines = code.splitlines()
    if lines and lines[0].strip():
        if len(lines) is 1 and lines[0][:1].strip():
            type = mimetypes.guess_type(code)[0]
            is_image = type and type.startswith('image')
            disp = partial(display.Image,
                           embed=True) if is_image else display.Markdown
            if fnmatch(code, "* [[]*[]](*)*"):
                url = code.rsplit(')', 1)[0].rsplit('(', 1)[1].split(' ', 1)[0]
                if url and url != '#':
                    return (display.Markdown(data=code), *macro(url))
            if fnmatch(code, 'http*://*'):
                return is_image and display.Image(url=code) or display.IFrame(
                    code, width=600, height=400),
        return display.Markdown(data=code),
    return tuple()
Esempio n. 3
0
    for (input_image, target) in train_data:
      print('.', end='')
      n = n + 1
      if (n+1) % 100 == 0:
        print()
      train_step(input_image, target, epoch)
    print()

    # saving (checkpoint) the model every 20 epochs
    if (epoch + 1) % 20 == 0:
      checkpoint.save(file_prefix=checkpoint_prefix)

    print('Time taken for epoch {} is {} sec\n'.format(epoch + 1,
                                                       time.time()-start))
  checkpoint.save(file_prefix=checkpoint_prefix)


fit(EPOCHS)

display.IFrame(
    src="https://tensorboard.dev/experiment/lZ0C6FONROaUMfjYkVyJqw",
    width="100%",
    height="1000px")

# restoring the latest checkpoint in checkpoint_dir
checkpoint.restore(tf.train.latest_checkpoint(checkpoint_dir))

# Run the trained model on a few examples from the test dataset
for k, j in test_data.take(5):
  generate_images(generator, k, j)
# plot the training and validation losses

plotter.plot(size_histories)
a = plt.xscale('log')
plt.xlim([5, max(plt.xlim())])
plt.ylim([0.5, 0.7])
plt.xlabel('epoch [log scale]')
plt.savefit('./results_plot/Overfit_and_Underfit_4.png')
plt.clf()

#%load_ext tensorboard
#%tensorboard --logdir {logdir}/sizes

display.IFrame(
    src="https://tensorboard.dev/experiment/vW7jmmF9TmKmy3rbheMQpw/#scalars&_smoothingWeight=0.97",
    width="100%", height="800px")

# prevent overfitting

shutil.rmtree(logdir/'regularizers/Tiny', ignore_errors=True)
shutil.copytree(logdir/'sizes/Tiny', logdir/'regularizers/Tiny')

regularizer_histories = {}
regularizer_histories['Tiny'] = size_histories['Tiny']

# solution 1
# weight regularization

l2_momdel = tf.keras.Sequential([
	layers.Dense(512, activation = 'elu', kernel_regularizer = regularizers.l2(0.001), input_shape = (features,)),
Esempio n. 5
0
    layers.Dense(512, activation='elu'),
    layers.Dense(512, activation='elu'),
    layers.Dense(1, activation='sigmoid')
])

size_histories['large'] = compile_and_fit(large_model, "sizes/large")

# Plot the training and validation losses
plotter.plot(size_histories)
a = plt.xscale('log')
plt.xlim([5, max(plt.xlim())])
plt.ylim([0.5, 0.7])
plt.xlabel("Epochs [Log Scale]")
plt.show()

'''
# View in TensorBoard
%tensorboard --logdir {logdir}/sizes

display.IFrame(
    src="https://tensorboard.dev/experiment/vW7jmmF9TmKmy3rbheMQpw/#scalars&_smoothingWeight=0.97",
    width="100%", height="800px")

!tensorboard dev upload --logdir  {logdir}/sizes
'''

# Strategies to prevent overfitting
shutil.rmtree(logdir / 'regularizers/Tiny', ignore_errors=True)
shutil.copytree(logdir / 'sizes/Tiny', logdir / 'regularizers/Tiny')

regularizer_histories = {'Tiny': size_histories['Tiny']}
Esempio n. 6
0
fig, axs = plt.subplots(2, len(img_indices), figsize=(15, 8))
for i, img in enumerate(images):
  axs[0][i].imshow(img.cpu().permute(1, 2, 0))
for i, img in enumerate(integrated_grads):
  axs[1][i].imshow(np.moveaxis(normalize(img),0,-1))
plt.show()
plt.close()

"""# **Homework 9 - Explainable AI (Part 2 BERT)**

# Question 21 - 24
### You are recommended to visualize on this website directly: https://exbert.net/exBERT.html
"""

from IPython import display
display.IFrame("https://exbert.net/exBERT.html", width=1600, height=1600)

"""# Import Packages (For Questions 25 - 30)"""

# Install transformers
!pip install transformers==4.5.0

# Import all packages needed
import numpy as np
import random
import torch

from sklearn.decomposition import PCA
from sklearn.metrics import pairwise_distances
from transformers import BertModel, BertTokenizerFast