def check(ds, dataset_arg, url_file, fname_format): subdir = op.join(ds.path, "subdir") os.mkdir(subdir) with chpwd(subdir): shutil.copy(self.json_file, "in.json") addurls(dataset_arg, url_file, "{url}", fname_format) # Files specified in the CSV file are always relative to the # dataset. for fname in ["a", "b", "c"]: ok_exists(op.join(ds.path, fname))
def test_addurls_create_newdataset(self=None, path=None): dspath = os.path.join(path, "ds") addurls(self.json_file, "{url}", "{name}", dataset=dspath, cfg_proc=["yoda"], result_renderer='disabled') for fname in ["a", "b", "c", "code"]: ok_exists(os.path.join(dspath, fname))
def time_addurls(self, exclude_autometa): lgr.warning("CSV: " + self.listfile.read_text()) ret = dl.addurls(self.ds, str(self.listfile), '{url}', '{filename}', exclude_autometa=exclude_autometa) assert not any(r['status'] == 'error' for r in ret)
def test_addurls_create_newdataset(self, path): dspath = os.path.join(path, "ds") addurls(dspath, self.json_file, "{url}", "{name}", cfg_proc=["yoda"]) for fname in ["a", "b", "c", "code"]: ok_exists(os.path.join(dspath, fname))
def test_addurls_create_newdataset(self, path): dspath = os.path.join(path, "ds") addurls(dspath, self.json_file, "{url}", "{name}") for fname in ["a", "b", "c"]: ok_exists(os.path.join(dspath, fname))