def __repr__(self): if (__COULD_HAVE_IPYTHON__ and hasattr(get_ipython(), "config") and "IPKernelApp" in get_ipython().config): label_str = pp2mkdtable(self.labels, True) else: label_str = pp2mkdtable(self.labels, False) descr = self.meta.get("description", "MetaDataset") repr_str = f"{descr}\n\n# Labels\n{label_str}" return repr_str
def explore(config, disable_cache=False): if not disable_cache: get_state = st.cache(persist=False, allow_output_mutation=True)(_get_state) else: get_state = _get_state dset = get_state(config) dset.expand = True st.title("Dataset Explorer: {}".format(type(dset).__name__)) input_method = st.sidebar.selectbox( "Index selection method", ["Slider", "Number input", "Sample"] ) if input_method == "Slider": idx = st.sidebar.slider("Index", 0, len(dset), 0) elif input_method == "Number input": idx = st.sidebar.number_input("Index", 0, len(dset), 0) elif input_method == "Sample": idx = 0 if st.sidebar.button("Sample"): idx = np.random.choice(len(dset)) st.sidebar.text("Index: {}".format(idx)) show_example(dset, idx) st.header("config") cfg_string = pp2mkdtable(config, jupyter_style=True) cfg = st.markdown(cfg_string)
def display_status(stages_per_vid, stages=STAGES, error=ERROR): to_print = {} finished = [] for vid, completed in stages_per_vid.items(): seen_stages = np.array(completed)[:, 0] if error in seen_stages: to_print[vid] = completed[0, 1] elif stages[-1] in seen_stages: finished += [vid] continue else: def sort_key(v): v = v[0] if ' - skipped' in v: element = v[:len(v) - len(' - skipped')] else: element = v return stages.index(element) to_print[vid] = sorted(completed, key=sort_key)[-1][0] to_print = pp2mkdtable(to_print) print(to_print) print('Finished {} videos'.format(len(finished))) return to_print
def show_example(dset, idx): ex = dset[idx] st.header("Keys") walk(ex, display, pass_key=True) st.header("Summary") summary = pp2mkdtable(ex, jupyter_style=True) # print markdown summary on console for easy copy and pasting in readme etc print(summary) st.markdown(summary)
def explore(config, disable_cache=False): if not disable_cache: get_state = st.cache(persist=False, allow_output_mutation=True)(_get_state) else: get_state = _get_state dset = get_state(config) dset.expand = True st.title("Dataset Explorer: {}".format(type(dset).__name__)) idx = st.sidebar.slider("index", 0, len(dset), 0) if st.sidebar.button("sample"): idx = np.random.choice(len(dset)) show_example(dset, idx) st.header("config") cfg_string = pp2mkdtable(config, jupyter_style=True) cfg = st.markdown(cfg_string)
ex["keypoints_reference"] = [ e["keypoints"][self.c] for e in ex["neighbours"] ] ex["image_reference"] = [ e["target"][self.c] for e in ex["neighbours"] ] ex["magnification_factor"] = self.mags ex["frame_anchor_1"] = ex["image_reference"][0] ex["frame_anchor_2"] = ex["image_query"] return ex def __len__(self): return len(self.viterbi_neighbours) return ABC_Seq_Mag_Dset if __name__ == "__main__": from abc_interpolation.data.human_gait import HumanGaitFixedBox dataset = HumanGaitFixedBox hg_viterbi = make_abc_nn_seq_mag_dset(dataset) data = hg_viterbi({"data_split": "train", "mode": "eval"}) from edflow.util import pp2mkdtable pp2mkdtable(data.get_example(10))
debug = False D = AggregatedMultiPersonDataset( {'spatial_size': 256}, root='/export/scratch/jhaux/Data/trickyoga', ext='mp4', force=True, # See if new videos are ready! debug=debug) D2 = AggregatedMultiPersonDataset( {'spatial_size': 256}, root='/export/scratch/jhaux/Data/olympic_sports_new', ext='seq', force=False, # See if new videos are ready! debug=debug) d = D[10] tab = pp2mkdtable(d) plot_datum(d, 'ty_10.png') print(tab) d = D2[10] tab = pp2mkdtable(d) plot_datum(d, 'oly_10.png') print(tab) print("D1: {}\nD2: {}".format(len(D), len(D2))) with open('multiperson.md', 'w+') as df: df.write(tab)
example["image"] = example["image"] / 127.5 - 1.0 example["image"] = example["image"].astype(np.float32) def get_example(self, i): example = self._load_example(i) self._preprocess_example(example) return example def __len__(self): return self._length if __name__ == "__main__": from edflow.util import pp2mkdtable print("train") d = CIFAR10() print(len(d)) e = d[0] print(pp2mkdtable(e)) x, y = e["image"], e["class"] print(x.dtype, x.shape, x.min(), x.max(), y) print("test") d = CIFAR10({"CIFAR10": {"split": "test"}}) print(len(d)) from PIL import Image Image.fromarray( ((x + 1.0) * 127.5).astype(np.uint8)).save("cifar10_example.png")
d['im_crop'] = im_crop plot_kps(d, idx) if __name__ == '__main__': from multiperson_dataset import MultiPersonDataset # MP = MultiPersonDataset('/export/scratch/jhaux/Data/olympic sports/') MP = MultiPersonDataset('/export/scratch/jhaux/Data/olympic_test/') CMP = CropDataset(MP) from edflow.util import pprint, pp2mkdtable from edflow.data.util import plot_datum idx = 10 for idx in range(25): d = CMP[idx] print(pp2mkdtable(d)) plot_datum(d, 'crop_{}.png'.format(idx)) test_new_crop(d, idx) for idx in np.random.randint(len(CMP), size=10): d = CMP[idx] print(pp2mkdtable(d)) plot_datum(d, 'crop_{}.png'.format(idx)) test_new_crop(d, idx)
def show_example(dset, idx): ex = dset[idx] st.header("Keys") walk(ex, display, pass_key=True) st.header("Summary") st.markdown(pp2mkdtable(ex, jupyter_style=True))