def set_size(self, size=None, scale_factor=1.0): """ Set data size of this node """ if scale_factor != 1.0: logger.warning( "Trying to set scale factor for ShuffleNode, ignore") self._size = self.__from_node.size() return super(LogicalPlan.ShuffleNode, self).set_size(size, scale_factor)
def _force_delete_file(self, path): toft_path = self._toft_path(path) if toft_path.startswith('/hdfs/'): self._hadoop_client().fs_rmr(path, self._hadoop_config) return True else: try: logger.debug('rmtree %s' % path) shutil.rmtree(path) except Exception as e: logger.warning('%s' % e) return not os.path.exists(path)
def set_size(self, size=None, scale_factor=1.0): """ Set data size of this node """ logger.warning("Trying to set size for UnionNode, ignore") return self