Example #1
0
def data_descriptor_from_cffi(ffi, cdata, writable):
    """
    Parameters
    ----------
    ffi : cffi.FFI
        The cffi namespace which contains the cdata.
    cdata : cffi.CData
        The cffi data object which owns the data.
    writable : bool
        Should be true if the data is writable, flase
        if it's read-only.
    """
    if not isinstance(cdata, ffi.CData):
        raise TypeError('object is not a cffi.CData object, has type %s' %
                        type(cdata))
    owner = (ffi, cdata)
    # Get the raw pointer out of the cdata as an integer
    ptr = int(ffi.cast('uintptr_t', ffi.cast('char *', cdata)))
    ds = datashape.from_cffi(ffi, ffi.typeof(cdata))
    if (isinstance(ds, datashape.DataShape)
            and isinstance(ds[0], datashape.TypeVar)):
        # If the outermost dimension is an array without fixed
        # size, get its size from the data
        ds = datashape.DataShape(*(datashape.Fixed(len(cdata)), ) + ds[1:])
    access = "readwrite" if writable else "readonly"
    dyndarr = _lowlevel.array_from_ptr(ndt.type(str(ds)), ptr, owner, access)
    return DyNDDataDescriptor(dyndarr)
def test_array_subarray():
    assert (datashape.dshape('3 * int32').subarray(0) == datashape.dshape(
        '3 * int32'))
    assert (datashape.dshape('3 * int32').subarray(1) == datashape.DataShape(
        datashape.int32))
    assert (str(datashape.dshape('3 * var * M * int32').subarray(2)) == str(
        datashape.dshape('M * int32')))
    assert (str(datashape.dshape('3 * var * M * float64').subarray(3)) == str(
        datashape.float64))
Example #3
0
def discover_sqlcontext(ctx):
    try:
        table_names = list(map(str, ctx.tableNames()))
    except AttributeError:
        java_names = ctx._ssql_ctx.catalog().tables().keySet()
        table_names = list(scala_set_to_set(ctx, java_names))

    table_names.sort()

    dshapes = zip(table_names, map(discover, map(ctx.table, table_names)))
    return datashape.DataShape(datashape.Record(dshapes))
Example #4
0
def test_extend(tmpcsv, schema):
    dd = CSV(tmpcsv, 'w', schema=schema, delimiter=' ')
    dd.extend(data)
    with open(tmpcsv) as f:
        lines = f.readlines()
    expected_lines = 'Alice 100', 'Bob 200', 'Alice 50'
    for i, eline in enumerate(expected_lines):
        assert lines[i].strip() == eline

    expected_dshape = datashape.DataShape(datashape.Var(),
                                          datashape.dshape(schema))

    assert str(dd.dshape) == str(expected_dshape)
Example #5
0
 def test_array_subarray(self):
     self.assertEqual(
         datashape.dshape('3 * int32').subarray(0),
         datashape.dshape('3 * int32'))
     self.assertEqual(
         datashape.dshape('3 * int32').subarray(1),
         datashape.DataShape(datashape.int32))
     self.assertEqual(
         str(datashape.dshape('3 * var * M * int32').subarray(2)),
         str(datashape.dshape('M * int32')))
     self.assertEqual(
         str(datashape.dshape('3 * var * M * float64').subarray(3)),
         str(datashape.float64))
    def test_extend(self):
        dd = CSV(self.filename, 'w', schema=self.schema, delimiter=' ')
        dd.extend(self.data)
        with open(self.filename) as f:
            lines = f.readlines()
            self.assertEqual(lines[0].strip(), 'Alice 100')
            self.assertEqual(lines[1].strip(), 'Bob 200')
            self.assertEqual(lines[2].strip(), 'Alice 50')

        expected_dshape = datashape.DataShape(datashape.Var(), self.schema)
        # TODO: datashape comparison is broken
        self.assertEqual(
            str(dd.dshape).replace(' ', ''),
            str(expected_dshape).replace(' ', ''))
Example #7
0
 def _expr(self):
     return symbol(self._name, datashape.DataShape(self.dshape.measure))
Example #8
0
 def dshape(self):
     return datashape.DataShape(*(self._child.dshape.shape +
                                  tuple(self.schema)))
Example #9
0
def discover_sqlcontext(ctx):
    table_names = sorted(map(str, ctx.tableNames()))
    dshapes = zip(table_names, map(discover, map(ctx.table, table_names)))
    return datashape.DataShape(datashape.Record(dshapes))
 def dshape(self):
     return datashape.DataShape(datashape.Var(), self.schema)