def testAggMethods(self):
        # create a silly table
        tab = TableTools.emptyTable(10)
        tab = tab.update("dumb=(int)(i/5)", "var=(int)i",
                         "weights=(double)1.0/(i+1)")

        # try the various aggregate methods - just a coverage test
        tab.by(
            ComboAggregateFactory.AggCombo(
                ComboAggregateFactory.AggArray("aggArray=var"),
                ComboAggregateFactory.AggAvg("aggAvg=var"),
                ComboAggregateFactory.AggCount("aggCount"),
                ComboAggregateFactory.AggFirst("aggFirst=var"),
                ComboAggregateFactory.AggLast("aggLast=var"),
                ComboAggregateFactory.AggMax("aggMax=var"),
                ComboAggregateFactory.AggMed("aggMed=var"),
                ComboAggregateFactory.AggMin("aggMin=var"),
                ComboAggregateFactory.AggPct(0.20, "aggPct=var"),
                ComboAggregateFactory.AggStd("aggStd=var"),
                ComboAggregateFactory.AggSum("aggSum=var"),
                ComboAggregateFactory.AggAbsSum(
                    "aggAbsSum=var"),  # TODO: add this in...
                ComboAggregateFactory.AggVar("aggVar=var"),
                ComboAggregateFactory.AggWAvg(
                    "var",
                    "weights")),  # how to specify the name of this column?
            "dumb")
        # TODO: AggFormula - this is terrible
        del tab
Exemplo n.º 2
0
def demo_implicit():
    print("Running Implicit Demo.")
    size_imp = 42
    global hello_imp
    hello_imp = TableTools.emptyTable(size_imp)
    global world_imp
    world_imp = TableTools.timeTable("00:00:01")
Exemplo n.º 3
0
 def setUpClass(self):
     """
     Inherited method allowing initialization of test environment
     """
     self.table = TableTools.emptyTable(200).update("timestamp=new DBDateTime((long)(i/2)*1000000000)",
                                                    "Sym=((i%2 == 0) ? `MSFT` : `AAPL`)",
                                                    "price=(double)((i%2 == 0) ? 100.0 + (i/2) + 5*Math.random() : 250.0 + (i/2) + 10*Math.random())")
Exemplo n.º 4
0
    def testTableToDataframeNoNulls(self):
        """
        Test for converting a basic table with no null values to a dataframe
        """

        tab_reg = TableTools.emptyTable(1).update(
            "boolCol=(boolean)false", "byteCol=(byte)0", "shortCol=(short)0",
            "intCol=(int)0", "longCol=(long)0", "floatCol=(float)0",
            "doubleCol=(double)0", "datetimeCol=new DBDateTime(0)",
            "stringCol=`test`")
        # there are no nulls here, so all three conversion options should work, and result in identical dataframes
        with self.subTest(msg="convert null when no null values"):
            df = tableToDataFrame(tab_reg,
                                  convertNulls='ERROR',
                                  categoricals=None)
            df_reg = tableToDataFrame(tab_reg,
                                      convertNulls='PASS',
                                      categoricals=None)
            df_reg_nc = tableToDataFrame(tab_reg,
                                         convertNulls='CONVERT',
                                         categoricals=None)

        # EQUALITY CHECK
        with self.subTest(msg='converted dfs are equal'):
            self.assertTrue(df.equals(df_reg))  # equals is transitive
            self.assertTrue(df_reg.equals(df_reg_nc))

        # DATA TYPE TEST
        for col, dtyp in [('boolCol', numpy.bool_), ('byteCol', numpy.int8),
                          ('shortCol', numpy.int16), ('intCol', numpy.int32),
                          ('longCol', numpy.int64),
                          ('floatCol', numpy.float32),
                          ('doubleCol', numpy.float64),
                          ('datetimeCol', numpy.dtype('datetime64[ns]')),
                          ('stringCol', numpy.object)]:
            # NB: I'm confident that dtype is not checked for df.equals(), so it's not redundant to do both
            with self.subTest(
                    msg='dtype nulls_convert=ERROR for {}'.format(col)):
                self.assertEqual(df[col].values.dtype, dtyp)
            with self.subTest(
                    msg='dtype nulls_convert=PASS for {}'.format(col)):
                self.assertEqual(df_reg[col].values.dtype, dtyp)
            with self.subTest(
                    msg='dtype nulls_convert=CONVERT for {}'.format(col)):
                self.assertEqual(
                    df_reg_nc[col].values.dtype,
                    dtyp)  # there are no nulls -> no dumb type casts

        # VALUES TEST
        for col, val in [('boolCol', False), ('byteCol', 0), ('shortCol', 0),
                         ('intCol', 0), ('longCol', 0), ('floatCol', 0),
                         ('doubleCol', 0),
                         ('datetimeCol', numpy.datetime64(0, 'ns')),
                         ('stringCol', u'test')]:
            # NB: raw unicode string should be simultaneously python2/3 compliant
            with self.subTest(msg='entries for {}'.format(col)):
                self.assertEqual(df[col].values[0], val)
    def testCreation(self):
        """
        Test suite for reading, writing, and deleting a table to disk
        """

        table = TableTools.emptyTable(3).update("x=i", "y=(double)(i/10.0)",
                                                "z=(double)(i*i)")
        definition = table.getDefinition()
        baseDir = os.path.join(self.rootDir, "testCreation")
        fileLocation = os.path.join(baseDir, 'table1.parquet')
        fileLocation2 = os.path.join(baseDir, 'table2.parquet')

        # make sure that the test workspace is clean
        if os.path.exists(fileLocation):
            shutil.rmtree(fileLocation)
        if os.path.exists(fileLocation2):
            shutil.rmtree(fileLocation2)
        time.sleep(0.01)  # avoid race condition on file existence...

        # Writing
        with self.subTest(msg="writeTable(Table, String)"):
            ParquetTools.writeTable(table, fileLocation)
            time.sleep(0.01)  # avoid race condition on file existence...
            self.assertTrue(os.path.exists(fileLocation))
            shutil.rmtree(baseDir)
            time.sleep(0.01)  # avoid race condition on file existence...
        with self.subTest(msg="writeTable(Table, File)"):
            ParquetTools.writeTable(table,
                                    ParquetTools.getFileObject(fileLocation))
            time.sleep(0.01)  # avoid race condition on file existence...
            self.assertTrue(os.path.exists(fileLocation))
            shutil.rmtree(baseDir)
            time.sleep(0.01)  # avoid race condition on file existence...
        with self.subTest(msg="writeTables(Table[], TableDefinition, File[]"):
            ParquetTools.writeTables([table, table], definition,
                                     [fileLocation, fileLocation2])
            time.sleep(0.01)  # avoid race condition on file existence...
            self.assertTrue(os.path.exists(fileLocation))
            self.assertTrue(os.path.exists(fileLocation2))

        # Reading
        with self.subTest(msg="readTable(File)"):
            table2 = ParquetTools.readTable(fileLocation)

        # Delete
        with self.subTest(msg="delete(File)"):
            if os.path.exists(fileLocation):
                ParquetTools.deleteTable(fileLocation)
                time.sleep(0.01)  # avoid race condition on file existence...
                self.assertFalse(os.path.exists(fileLocation))
            if os.path.exists(fileLocation2):
                ParquetTools.deleteTable(fileLocation2)
                time.sleep(0.01)  # avoid race condition on file existence...
                self.assertFalse(os.path.exists(fileLocation2))
        shutil.rmtree(baseDir)
Exemplo n.º 6
0
    def setUpClass(cls):
        """
        Inherited method allowing initialization of test environment
        """
        cls.table = TableTools.emptyTable(200).update("timestamp=new DBDateTime((long)(i/2)*1000000000)",
                                                      "Sym=((i%2 == 0) ? `MSFT` : `AAPL`)",
                                                      "price=(double)((i%2 == 0) ? 100.0 + (i/2) + 5*Math.random() : 250.0 + (i/2) + 10*Math.random())")

        longs = numpy.arange(0, 86401, 60, dtype=numpy.int64)
        cls.arrays = {
            'DBDateTime[]': longs.astype('datetime64[s]'),
            'long[]': longs,
            'int[]': longs.astype(numpy.int32),
            'float[]': longs.astype(numpy.float32),
            'double[]': longs.astype(numpy.float64),
        }
Exemplo n.º 7
0
    def testTableToDataframeWithNulls(self):
        """
        Test for converting a basic table with null values to a dataframe
        """

        tab_nulls = TableTools.emptyTable(2).update(
            "boolCol=((i==0) ? true : null)",
            "byteCol=(byte)((i==0) ? 0 : NULL_BYTE)",
            "shortCol=(short)((i==0) ? 2 : NULL_SHORT)",
            "intCol=(int)((i==0) ? 0 : NULL_INT)",
            "longCol=(long)((i==0) ? 0 : NULL_LONG)",
            "floatCol=(float)((i==0) ? 2 : NULL_FLOAT)",
            "doubleCol=(double)((i==0) ? 2 : NULL_DOUBLE)",
            "datetimeCol=((i==0) ? new DBDateTime(0) : null)")
        with self.subTest(
                msg="Does not convert if convertNulls=ERROR and nulls present"
        ):
            self.assertRaises(ValueError,
                              tableToDataFrame,
                              tab_nulls,
                              convertNulls='ERROR',
                              categoricals=None)
        with self.subTest(
                msg=
                "Converts if convertNulls in [PASS, CONVERT] and nulls present"
        ):
            df_nulls = tableToDataFrame(tab_nulls,
                                        convertNulls='PASS',
                                        categoricals=None)
            df_nulls_nc = tableToDataFrame(tab_nulls,
                                           convertNulls='CONVERT',
                                           categoricals=None)

        # EQUALITY CHECK
        self.assertFalse(df_nulls.equals(df_nulls_nc))

        # DATA TYPES TEST
        # verify that the dtypes are as expected when we DO NOT convert the nulls
        for col, dtyp in [('boolCol', numpy.bool_), ('byteCol', numpy.int8),
                          ('shortCol', numpy.int16), ('intCol', numpy.int32),
                          ('longCol', numpy.int64),
                          ('floatCol', numpy.float32),
                          ('doubleCol', numpy.float64),
                          ('datetimeCol', numpy.dtype('datetime64[ns]'))]:
            with self.subTest(
                    msg='data type, nulls_convert=False, for {}'.format(col)):
                self.assertEqual(df_nulls[col].values.dtype, dtyp)  # as before
        # verify that the dtypes are as expected when we DO convert the nulls
        for col, dtyp in [
            ('boolCol', numpy.object), ('byteCol', numpy.float32),
            ('shortCol', numpy.float32), ('intCol', numpy.float64),
            ('longCol', numpy.float64), ('floatCol', numpy.float32),
            ('doubleCol', numpy.float64),
            ('datetimeCol', numpy.dtype('datetime64[ns]'))
        ]:
            with self.subTest(
                    msg='data type, nulls_convert=True, for {}'.format(col)):
                self.assertEqual(df_nulls_nc[col].values.dtype, dtyp)

        # VALUES TEST
        # verify that the null entries are as expected when we DO NOT convert the nulls
        for col, val in [
            ('boolCol', False),
            ('byteCol', NULL_BYTE),
            ('shortCol', NULL_SHORT),
            ('intCol', NULL_INT),
            ('longCol', NULL_LONG),
        ]:
            with self.subTest(
                    msg='null entry, nulls_convert=False, for {}'.format(col)):
                self.assertEqual(df_nulls[col].values[1], val)
        # floating point types & time converted to NaN/T regardless of null conversion
        with self.subTest(msg='null entry, nulls_convert=False, for floatCol'):
            self.assertTrue(numpy.isnan(df_nulls['floatCol'].values[1]))
        with self.subTest(
                msg='null entry, nulls_convert=False, for doubleCol'):
            self.assertTrue(numpy.isnan(df_nulls['doubleCol'].values[1]))
        with self.subTest(
                msg='null entry, nulls_convert=False, for datetimeCol'):
            self.assertTrue(numpy.isnat(df_nulls['datetimeCol'].values[1]))
        # verify that the null entries are as expected when we DO convert the nulls
        with self.subTest(msg='entries nulls_convert=True for bool'):
            self.assertIsNone(df_nulls_nc['boolCol'][1])
        for col in [
                'byteCol', 'shortCol', 'intCol', 'longCol', 'floatCol',
                'doubleCol'
        ]:
            with self.subTest(msg='regular entry, nulls_convert=True, for {}'.
                              format(col)):
                self.assertFalse(numpy.isnan(df_nulls_nc[col].values[0]))
            with self.subTest(
                    msg='null entry, nulls_convert=True, for {}'.format(col)):
                self.assertTrue(numpy.isnan(df_nulls_nc[col].values[1]))
        with self.subTest(
                msg='regular entry, nulls_convert=True, for datetimeCol'):
            self.assertEqual(df_nulls_nc['datetimeCol'].values[0],
                             numpy.datetime64(0, 'ns'))
        with self.subTest(
                msg='null entry, nulls_convert=False, for {}'.format(col)):
            self.assertTrue(numpy.isnat(df_nulls['datetimeCol'].values[1]))
Exemplo n.º 8
0
    def testArrayColumnConversion(self):
        """
        Test for behavior when one of the columns is of array type (in each direction)
        """

        firstTable = TableTools.emptyTable(10).update(
            "MyString=new String(`a`+i)",
            "MyChar=new Character((char) ((i%26)+97))",
            "MyBoolean=new Boolean(i%2==0)",
            "MyByte=new java.lang.Byte(Integer.toString(i%127))",
            "MyShort=new Short(Integer.toString(i%32767))",
            "MyInt=new Integer(i)", "MyLong=new Long(i)",
            "MyFloat=new Float(i+i/10)", "MyDouble=new Double(i+i/10)")
        arrayTable = firstTable.update("A=i%3").by("A")
        dataFrame = tableToDataFrame(arrayTable,
                                     convertNulls='PASS',
                                     categoricals=None)

        for colName, arrayType in [
            ('MyString', 'io.deephaven.db.tables.dbarrays.DbArray'),
            ('MyChar', 'io.deephaven.db.tables.dbarrays.DbCharArray'),
            ('MyBoolean', 'io.deephaven.db.tables.dbarrays.DbArray'
             ),  # NB: DbBooleanArray is deprecated
            ('MyByte', 'io.deephaven.db.tables.dbarrays.DbByteArray'),
            ('MyShort', 'io.deephaven.db.tables.dbarrays.DbShortArray'),
            ('MyInt', 'io.deephaven.db.tables.dbarrays.DbIntArray'),
            ('MyLong', 'io.deephaven.db.tables.dbarrays.DbLongArray'),
            ('MyFloat', 'io.deephaven.db.tables.dbarrays.DbFloatArray'),
            ('MyDouble', 'io.deephaven.db.tables.dbarrays.DbDoubleArray'),
        ]:
            with self.subTest(
                    msg="type for original column {}".format(colName)):
                self.assertEqual(
                    arrayTable.getColumn(colName).getType().getName(),
                    arrayType)
                self.assertEqual(dataFrame[colName].values.dtype, numpy.object)

        for colName, dtype in [
            ('MyBoolean', numpy.bool_),
            ('MyByte', numpy.int8),
            ('MyShort', numpy.int16),
            ('MyInt', numpy.int32),
            ('MyLong', numpy.int64),
            ('MyFloat', numpy.float32),
            ('MyDouble', numpy.float64),
        ]:
            with self.subTest(
                    msg="type of converted array for {}".format(colName)):
                self.assertTrue(
                    isinstance(dataFrame[colName].values[0], numpy.ndarray))
                self.assertEqual(dataFrame[colName].values[0].dtype, dtype)

        with self.subTest(msg="type of converted array for MyString"):
            self.assertTrue(
                isinstance(dataFrame['MyString'].values[0], numpy.ndarray))
            self.assertTrue(
                dataFrame['MyString'].values[0].dtype.name.startswith(
                    'unicode') or
                dataFrame['MyString'].values[0].dtype.name.startswith('str'))

        # NB: numpy really doesn't have a char type, so it gets treated like an uninterpretted type
        with self.subTest(msg="type of converted array for MyChar"):
            self.assertTrue(
                isinstance(dataFrame['MyChar'].values[0], numpy.ndarray))
            self.assertTrue(
                dataFrame['MyChar'].values[0].dtype.name.startswith('unicode')
                or dataFrame['MyChar'].values[0].dtype.name.startswith('str'))

        # convert back
        backTable = dataFrameToTable(dataFrame, convertUnknownToString=True)
        for colName, arrayType in [
            ('MyString', 'io.deephaven.db.tables.dbarrays.DbArrayDirect'),
            ('MyChar', 'io.deephaven.db.tables.dbarrays.DbCharArrayDirect'),
            ('MyBoolean', 'io.deephaven.db.tables.dbarrays.DbArrayDirect'),
            ('MyByte', 'io.deephaven.db.tables.dbarrays.DbByteArrayDirect'),
            ('MyShort', 'io.deephaven.db.tables.dbarrays.DbShortArrayDirect'),
            ('MyInt', 'io.deephaven.db.tables.dbarrays.DbIntArrayDirect'),
            ('MyLong', 'io.deephaven.db.tables.dbarrays.DbLongArrayDirect'),
            ('MyFloat', 'io.deephaven.db.tables.dbarrays.DbFloatArrayDirect'),
            ('MyDouble',
             'io.deephaven.db.tables.dbarrays.DbDoubleArrayDirect'),
        ]:
            with self.subTest(
                    msg="type for reverted column for {}".format(colName)):
                self.assertEqual(
                    backTable.getColumn(colName).getType().getName(),
                    arrayType)
        with self.subTest(msg="element type for reverted column MyBoolean"):
            self.assertEqual(
                backTable.getColumn('MyBoolean').get(
                    0).getComponentType().getName(), 'java.lang.Boolean')
        with self.subTest(msg="element type for reverted column MyString"):
            self.assertEqual(
                backTable.getColumn('MyString').get(
                    0).getComponentType().getName(), 'java.lang.String')
Exemplo n.º 9
0
import deephaven.TableTools as tt
import deephaven.Plot as plt


t = tt.emptyTable(50)\
    .update("X = i + 5", "XLow = X -1", "XHigh = X + 1", "Y = Math.random() * 5", "YLow = Y - 1", "YHigh = Y + 1", "USym = i % 2 == 0 ? `AAPL` : `MSFT`")

p = plt.plot("S1", t, "X", "Y").lineColor("black").show()
p2 = plt.plot("S1", t, "X", "Y").plotStyle("bar").gradientVisible(True).show()
p3 = plt.plot(
    "S1", t, "X",
    "Y").plotStyle("scatter").pointColor("black").pointSize(2).show()
p4 = plt.plot("S1", t, "X", "Y").plotStyle("area").seriesColor("red").show()

p4 = plt.plot3d("S1", t, "X", "X", "Y").show()

pBy = plt.plotBy("S1", t, "X", "Y", "USym").show()
pBy = plt.plot3dBy("S1", t, "X", "X", "Y", "USym").show()

cp = plt.catPlot("S1", t, "X", "Y").lineColor("black").show()
cp2 = plt.catPlot("S1", t, "X",
                  "Y").plotStyle("bar").gradientVisible(True).show()
cp3 = plt.catPlot(
    "S1", t, "X",
    "Y").plotStyle("scatter").pointColor("black").pointSize(2).show()
cp4 = plt.catPlot("S1", t, "X",
                  "Y").plotStyle("area").seriesColor("red").show()

cp = plt.catPlot3d("S1", t, "X", "X", "Y").show()

cpBy = plt.catPlotBy("S1", t, "X", "Y", "USym").show()
 def test_pyobj_field_access(self):
     t = TableTools.emptyTable(10)
     t2 = t.update("SYM = `AAPL-` + (String)pyobj.name", "PRICE = i * 1000").where("PRICE > (int)pyobj.price + 100")
     html_output = TableTools.html(t2)
     self.assertIn("AAPL-GOOG", html_output)
     self.assertIn("2000", html_output)
 def test_wrong_return_type(self):
     with self.assertRaises(Exception):
         t = TableTools.emptyTable(10).view("I=ii", "J=(ii * 2)")\
             .where("vectorized_func_wrong_return_type(I, J)")
Exemplo n.º 12
0
    def testTableBasics(self):
        """
        Test cases for table creation, and a few other basic table methods:
            diff(), html(), show(), showCommaDelimited(), showWithIndex(), string(),
            roundDecimalColumns(), roundDecimalColumnsExcept(), merge(), mergeSorted()
        """

        tab, tab2, tab3, tab4, tab5, tab6 = None, None, None, None, None, None

        with self.subTest(msg="emptyTable(long)"):
            tab = TableTools.emptyTable(3)
            # set some cols which aren't dumb
            tab = tab.update("intCol=(int)i", "fltCol=(float)i*0.5",
                             "dblCol=(double)i*0.3")

        with self.subTest(msg="newTable(TableDefinition)"):
            # assuming the first test passed...
            tab3 = TableTools.newTable(tab.getDefinition())

        # Essentially table to string methods
        with self.subTest(msg="html test"):
            print("html rendering = \n{}".format(TableTools.html(tab)))

        with self.subTest(msg="show(Table, *cols)"):
            print("show =")
            TableTools.show(tab, "intCol", "dblCol")
        with self.subTest(msg="show(Table, 2, *cols)"):
            print("show & row limit =")
            TableTools.show(tab, 2, "intCol", "dblCol")

        with self.subTest(msg="showCommaDelimited(Table, *cols)"):
            print("showCommaDelimited =")
            TableTools.showCommaDelimited(tab, "intCol", "dblCol")
        with self.subTest(msg="showCommaDelimited(Table, 2, *cols)"):
            print("showCommaDelimited & row limit =")
            TableTools.showCommaDelimited(tab, 2, "intCol", "dblCol")

        with self.subTest(msg="showWithIndex(Table, *cols)"):
            print("showWithIndex =")
            TableTools.showWithIndex(tab, "intCol", "dblCol")
        with self.subTest(msg="showWithIndex(Table, 2, *cols)"):
            print("showWithIndex & row limit =")
            TableTools.showWithIndex(tab, 2, "intCol", "dblCol")

        with self.subTest(msg="string(Table, *cols)"):
            print("string =\n {}".format(
                TableTools.string(tab, "intCol", "dblCol")))
        with self.subTest(msg="string(Table, 2, *cols)"):
            print("string & row limit =\n {}".format(
                TableTools.string(tab, 2, "intCol", "dblCol")))

        with self.subTest(msg="roundDecimalColumns"):
            tab4 = TableTools.roundDecimalColumns(tab)
        with self.subTest(msg="roundDecimalColumns(*cols)"):
            tab5 = TableTools.roundDecimalColumns(tab, "fltCol", "dblCol")
        with self.subTest(msg="roundDecimalColumnsExcept(*cols)"):
            tab6 = TableTools.roundDecimalColumns(tab, "fltCol")

        with self.subTest(msg="diff test of a table with itself"):
            print("diff output of table with itself = \n{}".format(
                TableTools.diff(tab, tab, 3)))
        with self.subTest(
                msg="diff test of a table with rounded version of itself"):
            print("diff output of table with rounded version of itself = \n{}".
                  format(TableTools.diff(tab, tab4, 3)))

        with self.subTest(msg="merge(*tables)"):
            tab4 = TableTools.merge(tab, tab)
        with self.subTest(msg="merge([tables])"):
            tab4 = TableTools.merge([tab, tab])
        with self.subTest(msg="mergeSorted(col, [tables])"):
            tab4 = TableTools.mergeSorted("intCol", [tab, tab])
        with self.subTest(msg="merge(col, *tables)"):
            tab4 = TableTools.mergeSorted("intCol", tab, tab)

        del tab, tab2, tab3, tab4, tab5, tab6
Exemplo n.º 13
0
    def testCreation(self):
        """
        Test suite for reading, writing, and deleting a table to disk
        """

        table = TableTools.emptyTable(3).update("x=i", "y=(double)(i/10.0)",
                                                "z=(double)(i*i)")
        definition = table.getDefinition()
        fileLocation = os.path.join(self.rootDir, 'table1')
        fileLocation2 = os.path.join(self.rootDir, 'table2')

        # make sure that the test workspace is clean
        if os.path.exists(fileLocation):
            shutil.rmtree(fileLocation)
        if os.path.exists(fileLocation2):
            shutil.rmtree(fileLocation2)
        time.sleep(0.01)  # avoid race condition on file existence...

        # Writing
        with self.subTest(msg="writeTable(Table, String)"):
            TableManagementTools.writeTable(table, fileLocation)
            time.sleep(0.01)  # avoid race condition on file existence...
            self.assertTrue(os.path.exists(fileLocation))
            shutil.rmtree(fileLocation)
            time.sleep(0.01)  # avoid race condition on file existence...
        with self.subTest(msg="writeTable(Table, File)"):
            TableManagementTools.writeTable(
                table, TableManagementTools.getFileObject(fileLocation))
            time.sleep(0.01)  # avoid race condition on file existence...
            self.assertTrue(os.path.exists(fileLocation))
            shutil.rmtree(fileLocation)
            time.sleep(0.01)  # avoid race condition on file existence...
        with self.subTest(
                msg="writeTable(Table, String, StorageFormat) - Parquet"):
            TableManagementTools.writeTable(table, fileLocation, 'Parquet')
            time.sleep(0.01)  # avoid race condition on file existence...
            self.assertTrue(os.path.exists(fileLocation))
            shutil.rmtree(fileLocation)
            time.sleep(0.01)  # avoid race condition on file existence...
        with self.subTest(msg="writeTables(Table[], TableDefinition, File[]"):
            TableManagementTools.writeTables([table, table], definition,
                                             [fileLocation, fileLocation2])
            time.sleep(0.01)  # avoid race condition on file existence...
            self.assertTrue(os.path.exists(fileLocation))
            self.assertTrue(os.path.exists(fileLocation2))

        # Reading
        # TODO (core#322) restore this when readtable works with a definition but no file
        # with self.subTest(msg="readTable(File)"):
        #     table2 = TableManagementTools.readTable(fileLocation)
        with self.subTest(msg="readTable(File, TableDefinition)"):
            # this seems like a ridiculous method?
            table2 = TableManagementTools.readTable(fileLocation, definition)

        # Delete
        with self.subTest(msg="delete(File)"):
            if os.path.exists(fileLocation):
                TableManagementTools.deleteTable(fileLocation)
                time.sleep(0.01)  # avoid race condition on file existence...
                self.assertFalse(os.path.exists(fileLocation))
            if os.path.exists(fileLocation2):
                TableManagementTools.deleteTable(fileLocation2)
                time.sleep(0.01)  # avoid race condition on file existence...
                self.assertFalse(os.path.exists(fileLocation2))
Exemplo n.º 14
0
def demo_app(app: ApplicationState):
    print("Running Strict App Demo.")
    size = 42
    app.setField("hello", TableTools.emptyTable(size))
    app.setField("world", TableTools.timeTable("00:00:01"))
 def test_filter(self):
     t = TableTools.emptyTable(10).view(
         "I=ii", "J=(ii * 2)").where("vectorized_func(I, J)")
     html_output = TableTools.html(t)
     self.assertIn("<td>5</td><td>10</td>", html_output)
 def test_part_of_expr(self):
     with self.assertRaises(Exception):
         t = TableTools.emptyTable(10).view(
             "I=ii", "J=(ii * 2)").update("K = 2 * vectorized_func(I, J)")
 def test_column(self):
     t = TableTools.emptyTable(10).view(
         "I=ii", "J=(ii * 2)").update("K = vectorized_func(I, J)")
     html_output = TableTools.html(t)
     self.assertIn("<td>9</td>", html_output)
Exemplo n.º 18
0
from deephaven import TableTools

# Use QueryScope! Careful; this leaks into the REPL state!
size_qs = 42
hello_qs = TableTools.emptyTable(size_qs)
world_qs = TableTools.timeTable("00:00:01")