Ejemplo n.º 1
0
def foo_dispatchers():

    foo = TwoLevelDispatcher('foo', doc='Test dispatcher foo')
    foo_m = Dispatcher('foo_m', doc='Control dispatcher foo_m')

    @foo.register(A1, B1)
    @foo_m.register(A1, B1)
    def foo0(x, y):
        return 0

    @foo.register(A1, B2)
    @foo_m.register(A1, B2)
    def foo1(x, y):
        return 1

    @foo.register(A2, B1)
    @foo_m.register(A2, B1)
    def foo2(x, y):
        return 2

    @foo.register(A2, B2)
    @foo_m.register(A2, B2)
    def foo3(x, y):
        return 3

    @foo.register(
        (A1, A2), )
    @foo_m.register(
        (A1, A2), )
    def foo4(x):
        return 4

    return foo, foo_m
Ejemplo n.º 2
0
 def __init__(self, f, *, nargs=None, nouts=None):
     super().__init__(Dispatcher(f.__name__),
                      nargs=get_nargs(f, nargs),
                      nouts=nouts,
                      ndefs=get_ndefs(f, ndefs))
     if self.nargs is None:
         raise TypeError("Explict nargs is required for multidispatch.")
     self.register(*([object] * self.nargs))(f)
Ejemplo n.º 3
0
    def register(self, *types, **kwargs):
        type0 = types[0]

        if isinstance(type0, type):
            type0 = [type0]

        dispatchers = []

        for t in type0:
            if (t, ) in self._meta_dispatcher.funcs:
                dispatcher = self._meta_dispatcher.funcs[(t, )]
            else:
                dispatcher = Dispatcher(f"{self.name}_{t.__name__}")
                self._meta_dispatcher.register(t)(dispatcher)

            dispatchers.append((t, dispatcher))

        def _(func):
            self.add(types, func, **kwargs)
            for t, dispatcher in dispatchers:
                dispatcher.add(tuple([t, *types[1:]]), func, **kwargs)
            return func

        return _
Ejemplo n.º 4
0
from __future__ import absolute_import

from functools import partial

import toolz
from multipledispatch import Dispatcher

import ibis
import ibis.common as com
import ibis.expr.operations as ops

# Individual operation execution
execute_node = Dispatcher(
    'execute_node',
    doc=(
        'Execute an individual operation given the operation and its computed '
        'arguments'),
)


@execute_node.register(ops.Node)
def execute_node_without_scope(node, **kwargs):
    raise com.UnboundExpressionError(
        ('Node of type {!r} has no data bound to it. '
         'You probably tried to execute an expression without a data '
         'source.').format(type(node).__name__))


pre_execute = Dispatcher(
    'pre_execute',
    doc="""\
Ejemplo n.º 5
0
def ibis_schema_to_pandas(schema):
    return list(zip(schema.names, map(ibis_dtype_to_pandas, schema.types)))


convert = Dispatcher(
    'convert',
    doc="""\
Convert `column` to the pandas dtype corresponding to `out_dtype`, where the
dtype of `column` is `in_dtype`.

Parameters
----------
in_dtype : Union[np.dtype, pandas_dtype]
    The dtype of `column`, used for dispatching
out_dtype : ibis.expr.datatypes.DataType
    The requested ibis type of the output
column : pd.Series
    The column to convert

Returns
-------
result : pd.Series
    The converted column
""",
)


@convert.register(DatetimeTZDtype, dt.Timestamp, pd.Series)
def convert_datetimetz_to_timestamp(in_dtype, out_dtype, column):
    output_timezone = out_dtype.timezone
Ejemplo n.º 6
0
    ]

    result = execute_node(
        op,
        *data,
        scope=scope,
        timecontext=timecontext,
        aggcontext=aggcontext,
        clients=clients,
        **kwargs,
    )
    computed = post_execute_(op, result, timecontext=timecontext)
    return Scope({op: computed}, timecontext)


execute = Dispatcher('execute')


@execute.register(ir.Expr)
@trace
def main_execute(
    expr,
    params=None,
    scope=None,
    timecontext: Optional[TimeContext] = None,
    aggcontext=None,
    **kwargs,
):
    """Execute an expression against data that are bound to it. If no data
    are bound, raise an Exception.
Ejemplo n.º 7
0
        # synchronous by default
        with self.client._execute(
            self.compiled_sql,
            results=True,
            query_parameters=self.query_parameters,
        ) as cur:
            result = self._fetch(cur)

        return self._wrap_result(result)


class BigQueryDatabase(Database):
    """A BigQuery dataset."""


bigquery_param = Dispatcher('bigquery_param')


@bigquery_param.register(ir.StructScalar, OrderedDict)
def bq_param_struct(param, value):
    field_params = [bigquery_param(param[k], v) for k, v in value.items()]
    result = bq.StructQueryParameter(param.get_name(), *field_params)
    return result


@bigquery_param.register(ir.ArrayValue, list)
def bq_param_array(param, value):
    param_type = param.type()
    assert isinstance(param_type, dt.Array), str(param_type)

    try:
Ejemplo n.º 8
0
from ibis.backends.base.sql.alchemy import (
    AlchemyExprTranslator,
    fixed_arity,
    reduction,
    sqlalchemy_operation_registry,
    sqlalchemy_window_functions_registry,
    unary,
    varargs,
    variance_reduction,
)
from ibis.backends.base.sql.alchemy.registry import _clip, _gen_string_find

operation_registry = sqlalchemy_operation_registry.copy()
operation_registry.update(sqlalchemy_window_functions_registry)

sqlite_cast = Dispatcher("sqlite_cast")


@sqlite_cast.register(AlchemyExprTranslator, ir.IntegerValue, dt.Timestamp)
def _unixepoch(t, arg, _):
    return sa.func.datetime(t.translate(arg), "unixepoch")


@sqlite_cast.register(AlchemyExprTranslator, ir.StringValue, dt.Timestamp)
def _string_to_timestamp(t, arg, _):
    return sa.func.strftime('%Y-%m-%d %H:%M:%f', t.translate(arg))


@sqlite_cast.register(AlchemyExprTranslator, ir.IntegerValue, dt.Date)
def _integer_to_date(t, arg, _):
    return sa.func.date(sa.func.datetime(t.translate(arg), "unixepoch"))
Ejemplo n.º 9
0
    Notes
    -----
    This is used to raise an exception when INT64 types are encountered to
    avoid suprising results due to BigQuery's handling of INT64 types in
    JavaScript UDFs.
    """

    __slots__ = ()


class UDFContext(TypeTranslationContext):
    __slots__ = ()


ibis_type_to_bigquery_type = Dispatcher('ibis_type_to_bigquery_type')


@ibis_type_to_bigquery_type.register(str)
def trans_string_default(datatype):
    return ibis_type_to_bigquery_type(dt.dtype(datatype))


@ibis_type_to_bigquery_type.register(dt.DataType)
def trans_default(t):
    return ibis_type_to_bigquery_type(t, TypeTranslationContext())


@ibis_type_to_bigquery_type.register(str, TypeTranslationContext)
def trans_string_context(datatype, context):
    return ibis_type_to_bigquery_type(dt.dtype(datatype), context)
Ejemplo n.º 10
0
                assert self.tok is not None
                srid = self.tok.value

            if self._accept(Tokens.COLON):
                if self._accept(Tokens.GEOGRAPHY):
                    geotype = 'geography'
                elif self._accept(Tokens.GEOMETRY):
                    geotype = 'geometry'

            return MultiPolygon(geotype=geotype, srid=srid)

        else:
            raise SyntaxError('Type cannot be parsed: {}'.format(self.text))


dtype = Dispatcher('dtype')

validate_type = dtype


def _get_timedelta_units(timedelta: datetime.timedelta) -> List[str]:
    # pandas Timedelta has more granularity
    if hasattr(timedelta, 'components'):
        unit_fields = timedelta.components._fields
        base_object = timedelta.components
    # datetime.timedelta only stores days, seconds, and microseconds internally
    else:
        unit_fields = ['days', 'seconds', 'microseconds']
        base_object = timedelta

    time_units = []
Ejemplo n.º 11
0
from __future__ import absolute_import, division, print_function

from multipledispatch import Dispatcher
from datashape.dispatch import namespace
from .convert import convert

if 'append' not in namespace:
    namespace['append'] = Dispatcher('append')
append = namespace['append']


@append.register(object, object)
def append_not_found(a, b, **kwargs):
    """ Append one dataset on to another

    Examples
    --------

    >>> data = [1, 2, 3]
    >>> _ = append(data, (4, 5, 6))
    >>> data
    [1, 2, 3, 4, 5, 6]
    """
    raise NotImplementedError("Don't know how to append datasets of type "
                              "%s on to type %s" % (type(b), type(a)))


@append.register(list, list)
def list_to_list(a, b, **kwargs):
    a.extend(b)
    return a
Ejemplo n.º 12
0
from __future__ import absolute_import

from multipledispatch import Dispatcher

# Main interface to execution; ties the following functions together
execute = Dispatcher('execute')

# Individual operation execution
execute_node = Dispatcher('execute_node')

# Compute from the top of the expression downward
execute_first = Dispatcher('execute_first')

# Possibly preload data from the client, given a node
data_preload = Dispatcher('data_preload')

# Given a node, compute a (possibly partial) scope prior to regular execution
# This is useful if parts of the tree structure need to be executed at the
# same time or if there are other reasons to need to interrupt the
# regular depth-first traversal of the tree
pre_execute = Dispatcher('pre_execute')


# Default does nothing
@data_preload.register(object, object)
def data_preload_default(node, data, **kwargs):
    return data


# Default returns an empty scope
@pre_execute.register(object, object)
Ejemplo n.º 13
0
exp log expm1 log10 log1p radians degrees ceil floor trunc isnan'''.split()

reduction_names = '''any all sum min max mean var std'''.split()

__all__ = math_names + reduction_names


types = {builtins: object,
         np: (np.ndarray, np.number),
         pymath: Number,
         blazemath: Expr,
         reductions: Expr}


for funcname in math_names:  # sin, sqrt, ceil, ...
    d = Dispatcher(funcname)

    for module, typ in types.items():
        if hasattr(module, funcname):
            d.add((typ,), getattr(module, funcname))

    namespace[funcname] = d
    locals()[funcname] = d


for funcname in reduction_names:  # any, all, sum, max, ...
    d = Dispatcher(funcname)

    for module, typ in types.items():
        if hasattr(module, funcname):
            d.add((typ,), getattr(module, funcname))
Ejemplo n.º 14
0
fallback_binary_mappings = {
    'greatest': {
        builtins: max,
        np: np.maximum,
        pymath: max,
    },
    'least': {
        builtins: min,
        np: np.minimum,
        pymath: min,
    }
}


for funcname in math_names:  # sin, sqrt, ceil, ...
    d = Dispatcher(funcname)

    for module, typ in types.items():
        if hasattr(module, funcname):
            d.add((typ,), getattr(module, funcname))

    namespace[funcname] = d
    locals()[funcname] = d


for funcname in binary_math_names:  # hypot, atan2, fmod, ...
    d = Dispatcher(funcname)

    for module, pairs in binary_types.items():
        for pair in pairs:
            if hasattr(module, funcname):
Ejemplo n.º 15
0
types = {builtins: object,
         np: (np.ndarray, np.number),
         pymath: Number}


def switch(funcname, x):
    f = getattr(blazemath, funcname)
    if iscollection(x.dshape):
        return broadcast(f, x)
    else:
        return f(x)


for funcname in math_names:  # sin, sqrt, ceil, ...
    d = Dispatcher(funcname)

    d.add((Expr,), curry(switch, funcname))

    for module, typ in types.items():
        if hasattr(module, funcname):
            d.add((typ,), getattr(module, funcname))

    namespace[funcname] = d
    locals()[funcname] = d


for funcname in reduction_names:  # any, all, sum, max, ...
    d = Dispatcher(funcname)

    d.add((Expr,), getattr(reductions, funcname))
Ejemplo n.º 16
0
                self._expect(Token_Oracle.RPAREN)
            else:
                precision = 9
                scale = 0
            return Number(precision, scale)
        elif self._accept(Token_Oracle.BFILE):
            return BFILE()
        elif self._accept(Token_Oracle.RAW):
            return RAW()
        elif self._accept(Token_Oracle.LONGRAW):
            return LONGRAW()
        else:
            raise SyntaxError('Type cannot be parsed: {}'.format(self.text))


dtype = Dispatcher('dtype')
validate_type = dtype

castable = Dispatcher('castable')


@castable.register(CLOB, CLOB)
def can_cast_clob(source, target, **kwargs):
    return True


@castable.register(NCLOB, NCLOB)
def can_cast_nclob(source, target, **kwargs):
    return True

Ejemplo n.º 17
0
import ibis.expr.datatypes as dt
import ibis.expr.signature as sig
import ibis.expr.operations as ops

from ibis.pandas.core import scalar_types
from ibis.pandas.dispatch import execute_node

rule_to_python_type = Dispatcher(
    'rule_to_python_type',
    doc="""\
Convert an ibis :class:`~ibis.expr.datatypes.DataType` into a pandas backend
friendly ``multipledispatch`` signature.

Parameters
----------
rule : DataType
    The :class:`~ibis.expr.datatypes.DataType` subclass to map to a pandas
    friendly type.

Returns
-------
Union[Type[U], Tuple[Type[T], ...]]
    A pandas-backend-friendly signature
""",
)


def arguments_from_signature(signature, *args, **kwargs):
    """Validate signature against `args` and `kwargs` and return the kwargs
    asked for in the signature

    Parameters
Ejemplo n.º 18
0
        """
        return np.stack([
            processor.process(sample_rate, input_samples)
            for processor in self.processors
        ], 1)


################
# implementation
################

# simplify a single track spec; type TrackSpec -> TrackSpec
# The return value should have the same effect as the parameter. This is
# applied to just the top-level track-spec -- the simplification function for
# each type should apply this recursively to any input track specs.
_simplify_track_spec = Dispatcher("_simplify_track_spec")

# build a processor for a single track spec; type: TrackSpec -> TrackProcessorBase
# The track spec must have been simplified before this is called; this allows
# processors to ignore some cases which can be simplified away.
_track_spec_processor = Dispatcher("track_processor")


@_simplify_track_spec.register(TrackSpec)
def _simplify_base(track_spec):
    """If no simplification is specified for a type, do nothing."""
    return track_spec


# silent
Ejemplo n.º 19
0
import shutil
import numpy as np

from contextlib import contextmanager
from multiprocessing.pool import ThreadPool

from multipledispatch import Dispatcher

from datashape import dshape, Record
from datashape.discovery import is_zero_time

from toolz import pluck, get, curry, keyfilter

from .compatibility import unicode

sample = Dispatcher('sample')


def iter_except(func, exception, first=None):
    """Call a `func` repeatedly until `exception` is raised. Optionally call
    `first` first.

    Parameters
    ----------
    func : callable
        Repeatedly call this until `exception` is raised.
    exception : Exception
        Stop calling `func` when this is raised.
    first : callable, optional, default ``None``
        Call this first if it isn't ``None``.
Ejemplo n.º 20
0
        if hasattr(arg, 'op') else arg for arg in computable_args
    ]
    result = execute_node(
        op,
        *data,
        scope=scope,
        timecontext=timecontext,
        aggcontext=aggcontext,
        clients=clients,
        **kwargs,
    )
    computed = post_execute_(op, result, timecontext=timecontext)
    return Scope({op: computed}, timecontext)


execute = Dispatcher('execute')


@execute.register(ir.Expr)
@trace
def main_execute(
    expr,
    params=None,
    scope=None,
    timecontext: Optional[TimeContext] = None,
    aggcontext=None,
    **kwargs,
):
    """Execute an expression against data that are bound to it. If no data
    are bound, raise an Exception.
Ejemplo n.º 21
0
        'uint8': uint8,
        'uint16': uint16,
        'uint32': uint32,
        'uint64': uint64,
        'float16': float16,
        'float32': float32,
        'float64': float64,
        'double': double,
        'str': string,
        'datetime64': timestamp,
        'datetime64[ns]': timestamp,
        'timedelta64': interval,
        'timedelta64[ns]': Interval('ns')
    })

dtype = Dispatcher('dtype')

validate_type = dtype


@dtype.register(object)
def default(value):
    raise TypeError('Value {!r} is not a valid type or string'.format(value))


@dtype.register(DataType)
def from_ibis_dtype(value):
    return value


@dtype.register(np.dtype)
Ejemplo n.º 22
0
                    outputs[i_when, i_which, :] = (self.ode_system.y_dx(which[i_which], when[i_when], states)
                                                   @ sensitivities
                                                   + self.ode_system.y_dk(which[i_which], when[i_when], states))
            return outputs


# Multiple dispatch contraption for simulate
def simulate(model: Model, experiments: Union[Experiment, List[Experiment]] = InitialValueExperiment(), **kwargs) -> \
        Union[Simulation, List[Experiment]]:
    if isinstance(experiments, Experiment):
        return simulate.dispatcher(model, experiments, **kwargs)
    else:
        return [simulate.dispatcher(model, experiment, **kwargs) for experiment in experiments]


simulate.dispatcher = Dispatcher('simulate')


@simulate.dispatcher.register(Model, InitialValueExperiment)
def simulate_analytic_initial(model: Model, experiment: InitialValueExperiment, *,
                              final_time: float = 0.0, parameters: List[str] = ()):
    system = update(model, experiment.variant)
    return BioluciaSystemSimulation(system, final_time, parameters)


@simulate.dispatcher.register(Model, SteadyStateExperiment)
def simulate_analytic_steady_state(model: Model, experiment: SteadyStateExperiment, *,
                                   final_time: float = 0.0, parameters: List[str] = ()):
    starter = update(model, update(experiment.starter))
    system = update(model, experiment.variant)
Ejemplo n.º 23
0
 def bind_class(self, cls):
     new = Dispatcher(self.name, self.doc)
     for ts, name in self.funcs.items():
         new.add((object, *ts), getattr(cls, name))
     return new
Ejemplo n.º 24
0
def pairwise_distance(x, *, metric, **kwargs):
    """Compute pairwise distance between columns of `x`.

    Parameters
    ----------
    x : array_like
    metric : str

    Returns
    -------
    out : array_like, float
        Condensed distance matrix.

    """

    # Check inputs.
    check_array_like(x, ndim=2)

    # Dispatch.
    out = dispatch_pairwise_distance(x, metric=metric, **kwargs)

    return out


dispatch_pairwise_distance = Dispatcher("dispatch_pairwise_distance")
dispatch_map_block_cityblock = Dispatcher("map_block_cityblock")
dispatch_map_block_sqeuclidean = Dispatcher("map_block_sqeuclidean")
dispatch_map_block_hamming = Dispatcher("map_block_hamming")
dispatch_map_block_jaccard = Dispatcher("map_block_jaccard")
Ejemplo n.º 25
0
from ..dispatch import execute_node
from ..execution import constants, util
from ..execution.util import coerce_to_output

compute_projection = Dispatcher(
    'compute_projection',
    doc="""\
Compute a projection, dispatching on whether we're computing a scalar, column,
or table expression.

Parameters
----------
expr : Union[ir.ScalarExpr, ir.ColumnExpr, ir.TableExpr]
parent : ops.Selection
data : pd.DataFrame
scope : Scope
timecontext:Optional[TimeContext]

Returns
-------
value : scalar, pd.Series, pd.DataFrame

Notes
-----
:class:`~ibis.expr.types.ScalarExpr` instances occur when a specific column
projection is a window operation.
""",
)


@compute_projection.register(ir.ScalarExpr, ops.Selection, pd.DataFrame)
def compute_projection_scalar_expr(
Ejemplo n.º 26
0
}

PRODUCT_INVERSES = {
    mul: safediv,
    add: safesub,
}

######################
# Numeric Array Ops
######################

all = Op(np.all)
amax = Op(np.amax)
amin = Op(np.amin)
any = Op(np.any)
astype = Dispatcher("ops.astype")
cat = Dispatcher("ops.cat")
clamp = Dispatcher("ops.clamp")
diagonal = Dispatcher("ops.diagonal")
einsum = Dispatcher("ops.einsum")
full_like = Op(np.full_like)
prod = Op(np.prod)
stack = Dispatcher("ops.stack")
sum = Op(np.sum)
transpose = Dispatcher("ops.transpose")

array = (np.ndarray, np.generic)


@astype.register(array, str)
def _astype(x, dtype):
Ejemplo n.º 27
0
        return compiled


def _extract_field(sql_attr):
    def extract_field_formatter(translator, expr):
        op = expr.op()
        arg = translator.translate(op.args[0])
        if sql_attr == 'epochseconds':
            return f'UNIX_SECONDS({arg})'
        else:
            return f'EXTRACT({sql_attr} from {arg})'

    return extract_field_formatter


bigquery_cast = Dispatcher('bigquery_cast')


@bigquery_cast.register(str, dt.Timestamp, dt.Integer)
def bigquery_cast_timestamp_to_integer(compiled_arg, from_, to):
    return 'UNIX_MICROS({})'.format(compiled_arg)


@bigquery_cast.register(str, dt.DataType, dt.DataType)
def bigquery_cast_generate(compiled_arg, from_, to):
    sql_type = ibis_type_to_bigquery_type(to)
    return 'CAST({} AS {})'.format(compiled_arg, sql_type)


def _cast(translator, expr):
    op = expr.op()
Ejemplo n.º 28
0
    def has_schema(self):
        return True

    def equals(self, other, cache=None):
        return type(self) == type(other) and self.schema.equals(other.schema,
                                                                cache=cache)

    def root_tables(self):
        return [self]

    @property
    def schema(self):
        raise NotImplementedError


schema = Dispatcher('schema')
infer = Dispatcher('infer')


@schema.register(Schema)
def identity(s):
    return s


@schema.register(collections.abc.Mapping)
def schema_from_mapping(d):
    return Schema.from_dict(d)


@schema.register(collections.abc.Iterable)
def schema_from_pairs(lst):
Ejemplo n.º 29
0
from multipledispatch import Dispatcher

import ibis.backends.pandas.core as core_dispatch
import ibis.backends.pandas.dispatch as pandas_dispatch
from ibis.backends.dask.trace import TraceTwoLevelDispatcher

execute_node = TraceTwoLevelDispatcher('execute_node')
for types, func in pandas_dispatch.execute_node.funcs.items():
    execute_node.register(*types)(func)

execute = Dispatcher('execute')
execute.funcs.update(core_dispatch.execute.funcs)

pre_execute = Dispatcher('pre_execute')
pre_execute.funcs.update(core_dispatch.pre_execute.funcs)

execute_literal = Dispatcher('execute_literal')
execute_literal.funcs.update(core_dispatch.execute_literal.funcs)

post_execute = Dispatcher('post_execute')
post_execute.funcs.update(core_dispatch.post_execute.funcs)
Ejemplo n.º 30
0
    optional,
    tuple_of,
    validator,
)
from ...util import frozendict
from .. import types as ir

try:
    import shapely.geometry

    IS_SHAPELY_AVAILABLE = True
except ImportError:
    IS_SHAPELY_AVAILABLE = False


dtype = Dispatcher('dtype')

validate_type = dtype


@dtype.register(object)
def default(value, **kwargs) -> DataType:
    raise IbisTypeError(f'Value {value!r} is not a valid datatype')


@dtype.register(str)
def from_string(value: str) -> DataType:
    try:
        return parse_type(value)
    except SyntaxError:
        raise IbisTypeError(f'{value!r} cannot be parsed as a datatype')
Ejemplo n.º 31
0
        if not callable(function):
            raise TypeError(
                'Object {} is not callable or a string'.format(function)
            )

        return grouped_data.apply(_apply(function, args, kwargs))


class Transform(AggregationContext):
    __slots__ = ()

    def agg(self, grouped_data, function, *args, **kwargs):
        return grouped_data.transform(function, *args, **kwargs)


compute_window_spec = Dispatcher('compute_window_spec')


@compute_window_spec.register(ir.Expr, dt.Interval)
def compute_window_spec_interval(expr, dtype):
    value = ibis.pandas.execute(expr)
    return pd.tseries.frequencies.to_offset(value)


@compute_window_spec.register(ir.Expr, dt.DataType)
def compute_window_spec_expr(expr, _):
    return ibis.pandas.execute(expr)


@compute_window_spec.register(object, type(None))
def compute_window_spec_default(obj, _):
Ejemplo n.º 32
0
        return compiled


def _extract_field(sql_attr):
    def extract_field_formatter(translator, expr):
        op = expr.op()
        arg = translator.translate(op.args[0])
        if sql_attr == "epochseconds":
            return f"UNIX_SECONDS({arg})"
        else:
            return f"EXTRACT({sql_attr} from {arg})"

    return extract_field_formatter


bigquery_cast = Dispatcher("bigquery_cast")


@bigquery_cast.register(str, dt.Timestamp, dt.Integer)
def bigquery_cast_timestamp_to_integer(compiled_arg, from_, to):
    """Convert TIMESTAMP to INT64 (seconds since Unix epoch)."""
    return "UNIX_MICROS({})".format(compiled_arg)


@bigquery_cast.register(str, dt.DataType, dt.DataType)
def bigquery_cast_generate(compiled_arg, from_, to):
    """Cast to desired type."""
    sql_type = ibis_type_to_bigquery_type(to)
    return "CAST({} AS {})".format(compiled_arg, sql_type)

Ejemplo n.º 33
0
        # synchronous by default
        with self.client._execute(
            self.compiled_sql,
            results=True,
            query_parameters=self.query_parameters,
        ) as cur:
            result = self._fetch(cur)

        return self._wrap_result(result)


class BigQueryDatabase(Database):
    """A BigQuery dataset."""


bigquery_param = Dispatcher("bigquery_param")


@bigquery_param.register(ir.StructScalar, OrderedDict)
def bq_param_struct(param, value):
    field_params = [bigquery_param(param[k], v) for k, v in value.items()]
    result = bq.StructQueryParameter(param.get_name(), *field_params)
    return result


@bigquery_param.register(ir.ArrayValue, list)
def bq_param_array(param, value):
    param_type = param.type()
    assert isinstance(param_type, dt.Array), str(param_type)

    try:
Ejemplo n.º 34
0
    ]

#_BINARY_LOGICAL_OPERATORS = [
    #'and',
    #'or',
    #'xor',
    #]

_BINARY_OPERATORS = _BINARY_OPERATORS_WITH_REVERSE + _BINARY_OPERATORS_WITHOUT_REVERSE #+ _BINARY_LOGICAL_OPERATORS

import functools

# Binary operators using multiple dispatch
for op in _BINARY_OPERATORS:
    # Create a dispatcher for each operator
    D = Dispatcher(op)
    # And store the dispatcher on this module
    setattr(_thismodule, op, D)
    # Furthermore, we like to add (object, object) operations
    D.add((object, object), getattr(operator, op))


# Logical AND

@dispatch(object, object)
def logical_and(a, b):
    return a and b

@dispatch(object, np.ndarray)
def logical_and(a, b):
    return np.logical_and(a, b)