예제 #1
0
def now() -> DateTime:
    """ Provides the current datetime.

    Returns:
        DateTime

    Raises:
        DHError
    """
    try:
        return _JDateTimeUtils.currentTime()
    except Exception as e:
        raise DHError(e) from e
예제 #2
0
def nanos_to_datetime(ns: int) -> DateTime:
    """ Converts a value of nanoseconds from Epoch to a DateTime.

    Args:
        ns (long): the long nanoseconds since Epoch value to convert

    Returns:
        DateTime
    """
    try:
        return _JDateTimeUtils.nanosToTime(ns)
    except Exception as e:
        raise DHError(e) from e
예제 #3
0
def to_html(table: Table) -> str:
    """  Returns a table formatted as an HTML string. Limit use to small tables to avoid running out of memory.

    Returns:
        a HTML string

    Raises:
        DHError
    """
    try:
        return _JTableTools.html(table.j_table)
    except Exception as e:
        raise DHError(e, "table to_html failed") from e
예제 #4
0
def query_performance_log() -> Table:
    """ Returns a table with Deephaven query performance data. Performance data for individual sub-operations is
    available from calling `query_operation_performance_log`.

    Returns:
        a Table

    Raises:
        DHError
    """
    try:
        return Table(j_table=_JTableLoggers.queryPerformanceLog())
    except Exception as e:
        raise DHError(e, "failed to obtain the query performance log table.") from e
예제 #5
0
def default_calendar_name():
    """ Returns the default calendar name which is set by the 'Calendar.default' property in the configuration file
    that the Deephaven server is started with.

    Returns:
        the default business calendar name

    Raises:
        DHError
    """
    try:
        return _JCalendars.getDefaultName()
    except Exception as e:
        raise DHError(e, "failed to get the default calendar name.") from e
예제 #6
0
def delete(path: str) -> None:
    """ Deletes a Parquet table on disk.

    Args:
        path (str): path to delete

    Raises:
        DHError
    """
    try:
        _JParquetTools.deleteTable(_JFile(path))
    except Exception as e:
        raise DHError(
            e, f"failed to delete a parquet table: {path} on disk.") from e
예제 #7
0
def calendar_names() -> List[str]:
    """ Returns the names of all available calendars.

    Returns:
        a list of names of all available calendars

    Raises:
        DHError
    """
    try:
        return list(_JCalendars.calendarNames())
    except Exception as e:
        raise DHError(e,
                      "failed to obtain the available calendar names.") from e
예제 #8
0
def server_state_log() -> Table:
    """ Returns a table with memory utilization, update graph processor and garbage collection stats
        sampled on a periodic basis.

    Returns:
        a Table

    Raises:
        DHError
    """
    try:
        return Table(j_table=_JTableLoggers.serverStateLog())
    except Exception as e:
        raise DHError(e, "failed to obtain the server state log table.") from e
예제 #9
0
def update_performance_log() -> Table:
    """ Returns a table with Deephaven update performance data.

    Returns
        a Table

    Raises:
        DHError
    """
    try:
        return Table(j_table=_JTableLoggers.updatePerformanceLog())
    except Exception as e:
        raise DHError(
            e, "failed to obtain the update performance log table.") from e
예제 #10
0
def process_metrics_log() -> Table:
    """ Returns a table with metrics collected for the current Deephaven engine process.

    Returns:
        a Table

    Raises:
        DHError
    """
    try:
        return Table(j_table=_JTableLoggers.processMetricsLog())
    except Exception as e:
        raise DHError(e,
                      "failed to obtain the process metrics log table.") from e
예제 #11
0
def query_operation_performance_log() -> Table:
    """ Returns a table with Deephaven performance data for individual subqueries. Performance data for the entire query
    is available from calling 'query_performance_log'.

    Returns:
        a Table

    Raises:
        DHError
    """
    try:
        return Table(j_table=_JTableLoggers.queryOperationPerformanceLog())
    except Exception as e:
        raise DHError(e, "failed to obtain the query operation performance log table.") from e
예제 #12
0
def batch_write(tables: List[Table],
                paths: List[str],
                col_definitions: List[Column],
                col_instructions: List[ColumnInstruction] = None,
                compression_codec_name: str = None,
                max_dictionary_keys: int = None,
                grouping_cols: List[str] = None):
    """ Writes tables to disk in parquet format to a supplied set of paths.

    If you specify grouping columns, there must already be grouping information for those columns in the sources.
    This can be accomplished with .groupBy(<grouping columns>).ungroup() or .sort(<grouping column>).

    Note that either all the tables are written out successfully or none is.

    Args:
        tables (List[Table]): the source tables
        paths (List[str]): the destinations paths. Any non existing directories in the paths provided are
            created. If there is an error, any intermediate directories previously created are removed; note this makes
            this method unsafe for concurrent use
        col_definitions (List[Column]): the column definitions to use
        col_instructions (List[ColumnInstruction]): instructions for customizations while writing
        compression_codec_name (str): the compression codec to use, if not specified, defaults to SNAPPY
        max_dictionary_keys (int): the maximum dictionary keys allowed, if not specified, defaults to 2^20 (1,048,576)
        grouping_cols (List[str]): the group column names

    Raises:
        DHError
    """
    try:
        write_instructions = _build_parquet_instructions(
            col_instructions=col_instructions,
            compression_codec_name=compression_codec_name,
            max_dictionary_keys=max_dictionary_keys,
            for_read=False)

        table_definition = _JTableDefinition.of(
            [col.j_column_definition for col in col_definitions])

        if grouping_cols:
            _JParquetTools.writeParquetTables([t.j_table for t in tables],
                                              table_definition,
                                              write_instructions,
                                              _j_file_array(paths),
                                              grouping_cols)
        else:
            _JParquetTools.writeTables([t.j_table for t in tables],
                                       table_definition, _j_file_array(paths))
    except Exception as e:
        raise DHError(e,
                      "write multiple tables to parquet data failed.") from e
예제 #13
0
    def __init__(self,
                 family: str = 'Arial',
                 style: FontStyle = FontStyle.PLAIN,
                 size: int = 8):
        """ Creates a Font object.

        Args:
            family (str): the font family, defaults to 'Arial'
            style (FontStyle): the font style, defaults to FontStyle.PLAIN
            size (int): the point size of the Font, defaults to 8
        """
        try:
            self.j_font = _JFont.font(family, style.value, size)
        except Exception as e:
            raise DHError(e, "failed to create a font.") from e
예제 #14
0
def write(table: Table, path: str, cols: List[str] = []) -> None:
    """Write a table to a standard CSV file.

    Args:
        table (Table): the source table
        path (str): the path of the CSV file
        cols (List[str]): the names of the columns to be written out

    Raises:
        DHError
    """
    try:
        _JCsvTools.writeCsv(table.j_table, False, path, *cols)
    except Exception as e:
        raise DHError(message="write csv failed.") from e
예제 #15
0
def import_class(name: str) -> None:
    """Adds a Java class to the query library, making it available to be used in Deephaven query strings (formulas
    and conditional expressions). The class must be reachable in the Deephaven server's classpath.

    Args:
        name (str): the fully qualified name of the Java class

    Raises:
        DHError
    """
    try:
        j_class = _JClass.forName(name)
        _JQueryLibrary.importClass(j_class)
    except Exception as e:
        raise DHError(e, "failed to add the Java class to the Query Library.") from e
예제 #16
0
def to_table(df: pandas.DataFrame, cols: List[str] = None) -> Table:
    """  Creates a new table from a pandas.DataFrame.

    Args:
        df (DataFrame): the Pandas DataFrame instance
        cols (List[str]): the dataframe column names, default is None which means including all columns in the dataframe

    Returns:
        a Deephaven table

    Raise:
        DHError
    """

    try:
        if not cols:
            cols = list(df)
        else:
            diff_set = set(cols) - set(list(df))
            if diff_set:
                raise DHError(message=f"columns - {list(diff_set)} not found")

        input_cols = []
        for col in cols:
            np_array = df.get(col).values
            dtype = dtypes.from_np_dtype(np_array.dtype)
            np_array = _map_na(np_array)
            input_cols.append(_make_input_column(col, np_array, dtype))

        return new_table(cols=input_cols)
    except DHError:
        raise
    except Exception as e:
        raise DHError(
            e, "failed to create a Deephaven Table from a Pandas DataFrame."
        ) from e
예제 #17
0
    def restrict_sort_to(self, cols: Union[str, Sequence[str]]):
        """The restrict_sort_to method only allows sorting on specified table columns. This can be useful to prevent
        users from accidentally performing expensive sort operations as they interact with tables in the UI.

        Args:
            cols (Union[str, Sequence[str]]): the column name(s)

        Raises:
            DHError
        """
        try:
            cols = to_sequence(cols)
            return self.j_table.restrictSortTo(*cols)
        except Exception as e:
            raise DHError(e, "table restrict_sort_to operation failed.") from e
예제 #18
0
 def __post_init__(self):
     try:
         if self.input_data is None:
             self.j_column = _JColumn.empty(self.j_column_header)
         else:
             if self.data_type.is_primitive:
                 self.j_column = _JColumn.ofUnsafe(
                     self.name, dtypes.array(self.data_type,
                                             self.input_data))
             else:
                 self.j_column = _JColumn.of(
                     self.j_column_header,
                     dtypes.array(self.data_type, self.input_data))
     except Exception as e:
         raise DHError(e, "failed to create an InputColumn.") from e
예제 #19
0
def write(table: Table,
          path: str,
          col_definitions: List[Column] = None,
          col_instructions: List[ColumnInstruction] = None,
          compression_codec_name: str = None,
          max_dictionary_keys: int = None) -> None:
    """ Write a table to a Parquet file.

    Args:
        table (Table): the source table
        path (str): the destination file path; the file name should end in a ".parquet" extension. If the path
            includes non-existing directories they are created. If there is an error, any intermediate directories
            previously created are removed; note this makes this method unsafe for concurrent use
        col_definitions (List[Column]): the column definitions to use, default is None
        col_instructions (List[ColumnInstruction]): instructions for customizations while writing, default is None
        compression_codec_name (str): the default compression codec to use, if not specified, defaults to SNAPPY
        max_dictionary_keys (int): the maximum dictionary keys allowed, if not specified, defaults to 2^20 (1,048,576)

    Raises:
        DHError
    """
    try:
        write_instructions = _build_parquet_instructions(
            col_instructions=col_instructions,
            compression_codec_name=compression_codec_name,
            max_dictionary_keys=max_dictionary_keys,
            for_read=False)

        table_definition = None
        if col_definitions is not None:
            table_definition = _JTableDefinition.of(
                [col.j_column_definition for col in col_definitions])

        if table_definition:
            if write_instructions:
                _JParquetTools.writeTable(table.j_table, path,
                                          table_definition, write_instructions)
            else:
                _JParquetTools.writeTable(table.j_table, _JFile(path),
                                          table_definition)
        else:
            if write_instructions:
                _JParquetTools.writeTable(table.j_table, _JFile(path),
                                          write_instructions)
            else:
                _JParquetTools.writeTable(table.j_table, path)
    except Exception as e:
        raise DHError(e, "failed to write to parquet data.") from e
예제 #20
0
def new_table(cols: List[InputColumn]) -> Table:
    """Creates an in-memory table from a list of input columns. Each column must have an equal number of elements.

    Args:
        cols (List[InputColumn]): a list of InputColumn

    Returns:
        a Table

    Raises:
        DHError
    """
    try:
        return Table(j_table=_JTableFactory.newTable(*[col.j_column for col in cols]))
    except Exception as e:
        raise DHError(e, "failed to create a new time table.") from e
예제 #21
0
def nanos_to_millis(ns: int) -> int:
    """ Converts nanoseconds to milliseconds.

    Args:
        ns (int): the value of nanoseconds to convert

    Returns:
        int: NULL_LONG if ns is NULL_LONG

    Raises:
        DHError
    """
    try:
        return _JDateTimeUtils.nanosToMillis(ns)
    except Exception as e:
        raise DHError(e) from e
예제 #22
0
def import_package(name: str) -> None:
    """Adds all the public classes and interfaces of a Java package to the query library, making them available to be
    used in Deephaven query strings (formulas and conditional expressions). The package must be reachable in the
    Deephaven server's classpath.

    Args:
        name (str): the fully qualified name of the Java package

    Raises:
        DHError
    """
    try:
        j_package = _JPackage.getPackage(name)
        _JQueryLibrary.importPackage(j_package)
    except Exception as e:
        raise DHError(e, "failed to add the Java package into to the Query Library.") from e
예제 #23
0
def format_nanos(ns: int) -> str:
    """ Returns a string DateTime representation formatted as "yyyy-MM-ddThh:mm:ss.SSSSSSSSS".

    Args:
        ns (int): the number of nanoseconds

    Returns:
        str

    Raises:
        DHError
    """
    try:
        return _JDateTimeUtils.format(ns)
    except Exception as e:
        raise DHError(e) from e
예제 #24
0
    def is_last_business_day_of_week(self, date: str) -> bool:
        """ Returns if the specified date is the last business day of the week.

        Args:
            date (str): the date

        Returns:
            bool

        Raises:
            DHError
        """
        try:
            return self.j_calendar.isLastBusinessDayOfWeek(date)
        except Exception as e:
            raise DHError(e, "failed in is_last_business_day_of_week.") from e
예제 #25
0
def millis(dt: DateTime) -> int:
    """ Returns milliseconds since Epoch for a DateTime value.

    Args:
        dt (DateTime): the DateTime for which the milliseconds offset should be returned

    Returns:
        int: NULL_LONG if dt is None

    Raises:
        DHError
    """
    try:
        return _JDateTimeUtils.millis(dt)
    except Exception as e:
        raise DHError(e) from e
예제 #26
0
def millis_to_nanos(ms: int) -> int:
    """ Converts milliseconds to nanoseconds.

    Args:
        ms (int): the milliseconds value to convert

    Returns:
        int: NULL_LONG if ms is NULL_LONG

    Raises:
        DHError
    """
    try:
        return _JDateTimeUtils.millisToNanos(ms)
    except Exception as e:
        raise DHError(e) from e
예제 #27
0
    def next_non_business_day(self, date: str) -> str:
        """ Gets the non-business day after the given date.

        Args:
            date (str): the date of interest

        Returns:
            str

        Raises:
            DHError
        """
        try:
            return self.j_calendar.nextNonBusinessDay(date)
        except Exception as e:
            raise DHError(e, "failed in next_non_business_day.") from e
예제 #28
0
    def previous_business_day(self, date: str) -> str:
        """ Gets the business day prior to the given date.

        Args:
            date (str): the date of interest

        Returns:
            str

        Raises:
            DHError
        """
        try:
            return self.j_calendar.previousBusinessDay(date)
        except Exception as e:
            raise DHError(e, "failed in previous_business_day.") from e
예제 #29
0
def millis_to_datetime(ms: int) -> DateTime:
    """ Converts a value of milliseconds from Epoch in the UTC time zone to a DateTime.

    Args:
        ms (int): the milliseconds value to convert

    returns:
        DateTime

    Raises:
        DHError
    """
    try:
        return _JDateTimeUtils.millisToTime(ms)
    except Exception as e:
        raise DHError(e) from e
예제 #30
0
    def day_of_week(self, date: str) -> DayOfWeek:
        """ The day of week for the given date.

        Args:
            date (str): the date of interest

        Returns:
            str

        Raises:
            DHError
        """
        try:
            return DayOfWeek(self.j_calendar.dayOfWeek(date))
        except Exception as e:
            raise DHError(e, "failed in day_of_week.") from e