def find_all_files(self): if len(self.files) > 0: return # already done if not common.is_iterable(self.root_dir): self.root_dir = [self.root_dir] for root in self.root_dir: t0 = current_milli_time() walk = os.walk(root) t1 = current_milli_time() if not self.dump_dups and t1 - t0 > 1000: print "os.walk took " + str(t1 - t0) + " ms" for root, dirs, files in walk: for file in files: full_path = os.path.join(root, file) # print "examine: " + full_path f = FileInfo(full_path, self.checksum_size_kb) if self.use_checksum: f.gen_checksum() self.files.append(f) l = len(self.files) if not self.dump_dups and (l % 100) == 0: print 'processed ' + str(l) + ' files' for fi in self.files: if fi.size < self.min_size: continue self.by_size.setdefault(fi.size, []).append(fi)
def _unpack(self, value): if isinstance(value, torch.Tensor): return value elif is_iterable(value): return type(value)(self._unpack(v) for v in value) else: return value
def _unpack(self, value): if isinstance(value, Variable): return value.data elif torch.is_tensor(value): return value elif is_iterable(value): return type(value)(self._unpack(v) for v in value) else: return value