def test_delegation_non_existent_attribute(self): def create_lazy(): return DbUtilsMock() db_utils_wrapper = DbUtilsWrapper(create_lazy) with self.assertRaises(AttributeError): db_utils_wrapper.non_existent_method()
def test_delegationNonExistentAttribute(self): def createLazy(): return DbUtilsMock() dbUtilsWrapper = DbUtilsWrapper(createLazy) with self.assertRaises(AttributeError): dbUtilsWrapper.nonExistentMethod()
def test_methodWithArgument(self): def createLazy(): from pyspark.dbutils import FSHandler return FSHandler('foo') dbUtilsWrapper = DbUtilsWrapper(createLazy) result = dbUtilsWrapper.print_return(12345) self.assertEqual(12345, result)
def test_delegationNonExistentAttribute(self): def createLazy(): from pyspark.dbutils import DBUtils return DBUtils() dbUtilsWrapper = DbUtilsWrapper(createLazy) with self.assertRaises(AttributeError): dbUtilsWrapper.nonExistentMethod()
def test_method_with_argument(self): def create_lazy(): # pylint: disable=import-outside-toplevel from pyspark.dbutils import FSHandler return FSHandler("foo") db_utils_wrapper = DbUtilsWrapper(create_lazy) result = db_utils_wrapper.print_return(12345) self.assertEqual(12345, result)
def create(self) -> DbUtilsWrapper: def createLazy(): from pyspark.dbutils import DBUtils # pylint: disable = import-outside-toplevel return DBUtils(self.__spark.sparkContext) return DbUtilsWrapper(createLazy)
def create(self) -> DbUtilsWrapper: def create_lazy(): # pylint: disable=import-outside-toplevel from pyspark.dbutils import DBUtils return DBUtils(self.__spark) return DbUtilsWrapper(create_lazy)
def test_objectAttributeDelegation(self): def createLazy(): return DbUtilsMock() dbUtilsWrapper = DbUtilsWrapper(createLazy) result = dbUtilsWrapper.fs.ls('/') self.assertIsInstance(result, list)
def test_object_attribute_delegation(self): def create_lazy(): return DbUtilsMock() db_utils_wrapper = DbUtilsWrapper(create_lazy) result = db_utils_wrapper.fs.ls("/") self.assertIsInstance(result, list)
def test_objectAttributeDelegation(self): def createLazy(): from pyspark.dbutils import DBUtils return DBUtils() dbUtilsWrapper = DbUtilsWrapper(createLazy) result = dbUtilsWrapper.fs.ls('/') self.assertIsInstance(result, list)
def test_scalarAttributeDelegation(self): def createLazy(): from pyspark.dbutils import DBUtils dbUtils = DBUtils() dbUtils.a = 5 return dbUtils dbUtilsWrapper = DbUtilsWrapper(createLazy) result = dbUtilsWrapper.a self.assertEqual(5, result)
def create(self) -> DbUtilsWrapper: return DbUtilsWrapper(resolveDbUtils)