Ejemplo n.º 1
0
def test_parse_array_datatype():
    d = parse_datatype('float64[10]')
    assert not d.is_scalar()
    assert not d.is_int_scalar()
    assert not d.is_float_scalar()
    assert d.is_array()
    assert not d.is_blob()
    assert d.base == 'float64'
    assert d.shape == (10,)

    d = parse_datatype('int8[10,10]')
    assert not d.is_scalar()
    assert not d.is_int_scalar()
    assert not d.is_float_scalar()
    assert d.is_array()
    assert not d.is_blob()
    assert d.base == 'int8'
    assert d.shape == (10, 10)

    # Check spaces
    d = parse_datatype('int16[ 10,  1  , 40]')
    assert not d.is_scalar()
    assert not d.is_int_scalar()
    assert not d.is_float_scalar()
    assert d.is_array()
    assert not d.is_blob()
    assert d.base == 'int16'
    assert d.shape == (10, 1, 40)
Ejemplo n.º 2
0
def test_parse_array_datatype():
    d = parse_datatype('float64[10]')
    assert not d.is_scalar()
    assert not d.is_int_scalar()
    assert not d.is_float_scalar()
    assert d.is_array()
    assert not d.is_blob()
    assert d.base == 'float64'
    assert d.shape == (10, )

    d = parse_datatype('int8[10,10]')
    assert not d.is_scalar()
    assert not d.is_int_scalar()
    assert not d.is_float_scalar()
    assert d.is_array()
    assert not d.is_blob()
    assert d.base == 'int8'
    assert d.shape == (10, 10)

    # Check spaces
    d = parse_datatype('int16[ 10,  1  , 40]')
    assert not d.is_scalar()
    assert not d.is_int_scalar()
    assert not d.is_float_scalar()
    assert d.is_array()
    assert not d.is_blob()
    assert d.base == 'int16'
    assert d.shape == (10, 1, 40)
Ejemplo n.º 3
0
def test_parse_bad_array():
    with pytest.raises(BadTypeError):
        parse_datatype('foo[10]')
    with pytest.raises(BadTypeError):
        parse_datatype('int8[foo]')
    with pytest.raises(BadTypeError):
        parse_datatype('int8[10,foo]')
    with pytest.raises(BadTypeError):
        parse_datatype('int8[10,foo')
    with pytest.raises(BadTypeError):
        parse_datatype('int8[[10]')
Ejemplo n.º 4
0
def test_parse_bad_array():
    with pytest.raises(BadTypeError):
        parse_datatype('foo[10]')
    with pytest.raises(BadTypeError):
        parse_datatype('int8[foo]')
    with pytest.raises(BadTypeError):
        parse_datatype('int8[10,foo]')
    with pytest.raises(BadTypeError):
        parse_datatype('int8[10,foo')
    with pytest.raises(BadTypeError):
        parse_datatype('int8[[10]')
Ejemplo n.º 5
0
    def create_series(self, name, type, reduction, interpolation, unit,
            description, metadata):

        # Raises exception if datatype is invalid
        parse_datatype(type)

        with self.session_scope() as session:
            if session.query(Series).filter_by(name=name).count() != 0:
                raise SeriesCreationError('Series %s already exists.'
                    % name)

            series = Series(name=name, type=type, reduction=reduction,
                interpolation=interpolation, unit=unit, description=description,
                meta=metadata)
            session.add(series)
Ejemplo n.º 6
0
def create_selector(series_config, reduction=None, interpolation=None):
    '''Create a Selector representing the given parts.

    :param series_config: dict of series properties
    :param reduction: String name of requested reduction strategy, or ``None``
        to select the default for this series.
    :param interpolation: String name of requested reduction strategy, or
        ``None`` to select the default for this series.
    '''
    name = series_config['name']
    datatype = parse_datatype(series_config['type'])
    if reduction is None:
        reduction = series_config['reduction']
    if interpolation is None:
        interpolation = series_config['interpolation']

    try:
        reduction_func = REDUCTIONS[reduction]
    except KeyError:
        raise BadSelectorError('Unknown reduction "%s"' % reduction)

    try:
        interpolation_func = INTERPOLATIONS[interpolation]
    except KeyError:
        raise BadSelectorError('Unknown interpolation "%s"' % interpolation)

    return Selector(series_name=name, datatype=datatype, reduction=reduction,
        interpolation=interpolation, reduction_func=reduction_func,
        interpolation_func=interpolation_func)
Ejemplo n.º 7
0
    def get_data(self, name, offset=None, limit=None):
        with self.session_scope() as session:
            config = session.query(Series.name, Series.type).filter_by(name=name).first()

            if config is None:
                raise SeriesDoesNotExistError('Series %s does not exist.' % name)

            datatype = parse_datatype(config.type)
            table = self._pick_table(datatype)

            # Select appropriate columns from table
            if datatype.is_blob():
                query = session.query(table.sequence, table.timestamp)\
                    .filter_by(name=name)
            else:
                query = session.query(table.sequence, table.timestamp, table.value)\
                    .filter_by(name=name)

            # Decide how many entries to fetch
            if offset == None:  # get last entry
                row = query.order_by(table.sequence.desc()).first()

                if row is None:
                    return [], [], None  # No entry to return
                elif datatype.is_blob():
                    value = SQLBlob(index=row.sequence, mimetype=datatype.mimetype,
                        series_name=name, backend=self)
                else:
                    value = datatype.convert_to_jsonable(row.value)

                return [row.timestamp], [value], None
            else:
                query = query.order_by(table.sequence)

                # Apply limits and decide what the next sequence number is, if any
                if limit is None:
                    rows = query[offset:]
                    next_offset = None
                else:
                    rows = query[offset:offset + limit + 1]
                    if len(rows) > limit:
                        next_offset = rows[-1].sequence
                    else:
                        next_offset = None
                    rows = rows[:limit]

                times = []
                values = []

                for row in rows:
                    times.append(row.timestamp)
                    if datatype.is_blob():
                        value = SQLBlob(index=row.sequence,
                            mimetype=datatype.mimetype,
                            series_name=name, backend=self)
                    else:
                        value = datatype.convert_to_jsonable(row.value)
                    values.append(value)

                return times, values, next_offset
Ejemplo n.º 8
0
def create_selector(series_config, reduction=None, interpolation=None):
    '''Create a Selector representing the given parts.

    :param series_config: dict of series properties
    :param reduction: String name of requested reduction strategy, or ``None``
        to select the default for this series.
    :param interpolation: String name of requested reduction strategy, or
        ``None`` to select the default for this series.
    '''
    name = series_config['name']
    datatype = parse_datatype(series_config['type'])
    if reduction is None:
        reduction = series_config['reduction']
    if interpolation is None:
        interpolation = series_config['interpolation']

    try:
        reduction_func = REDUCTIONS[reduction]
    except KeyError:
        raise BadSelectorError('Unknown reduction "%s"' % reduction)

    try:
        interpolation_func = INTERPOLATIONS[interpolation]
    except KeyError:
        raise BadSelectorError('Unknown interpolation "%s"' % interpolation)

    return Selector(series_name=name,
                    datatype=datatype,
                    reduction=reduction,
                    interpolation=interpolation,
                    reduction_func=reduction_func,
                    interpolation_func=interpolation_func)
Ejemplo n.º 9
0
    def add_data(self, name, time, value):
        with self.session_scope() as session:
            config = session.query(Series.name, Series.type).filter_by(name=name).first()

            if config is None:
                raise SeriesDoesNotExistError('Series %s does not exist.' % name)

            datatype = parse_datatype(config.type)
            value = datatype.coerce(value)
            table = self._pick_table(datatype)

            # get last entry for this series (if exists)
            last_entry = session.query(table.timestamp, table.sequence) \
                .filter_by(name=name).order_by(table.timestamp.desc()).first()

            # compute new sequence number
            if last_entry is None:
                sequence = 0
            else:
                if last_entry.timestamp > time:
                    raise SeriesTimeOrderError('New data point is chronologically before last point in series')
                sequence = last_entry.sequence + 1

            # create new entry
            entry = table(name=name, sequence=sequence, timestamp=time,
                value=value)
            session.add(entry)
            return sequence
Ejemplo n.º 10
0
def test_parse_blob_datatype():
    d = parse_datatype('blob:image/png')
    assert not d.is_scalar()
    assert not d.is_int_scalar()
    assert not d.is_float_scalar()
    assert not d.is_array()
    assert d.is_blob()
    assert d.base == 'blob'
    assert d.mimetype == 'image/png'
Ejemplo n.º 11
0
def test_parse_blob_datatype():
    d = parse_datatype('blob:image/png')
    assert not d.is_scalar()
    assert not d.is_int_scalar()
    assert not d.is_float_scalar()
    assert not d.is_array()
    assert d.is_blob()
    assert d.base == 'blob'
    assert d.mimetype == 'image/png'
Ejemplo n.º 12
0
def test_parse_scalar_datatype():
    for scalar in ['int8', 'int16', 'int32', 'float32', 'float64']:
        d = parse_datatype(scalar)
        assert d.base == scalar
        assert d.is_scalar()
        if 'int' in scalar:
            assert d.is_int_scalar()
            assert not d.is_float_scalar()
        else:
            assert not d.is_int_scalar()
            assert d.is_float_scalar()
        assert not d.is_array()
        assert not d.is_blob()
Ejemplo n.º 13
0
def test_parse_scalar_datatype():
    for scalar in ['int8', 'int16', 'int32', 'float32', 'float64']:
        d = parse_datatype(scalar)
        assert d.base == scalar
        assert d.is_scalar()
        if 'int' in scalar:
            assert d.is_int_scalar()
            assert not d.is_float_scalar()
        else:
            assert not d.is_int_scalar()
            assert d.is_float_scalar()
        assert not d.is_array()
        assert not d.is_blob()
Ejemplo n.º 14
0
def test_parse_bad_blob():
    with pytest.raises(BadTypeError):
        parse_datatype('foo:image/png')
Ejemplo n.º 15
0
def test_parse_bad_scalar():
    with pytest.raises(BadTypeError):
        parse_datatype('foo')
Ejemplo n.º 16
0
def test_parse_bad_blob():
    with pytest.raises(BadTypeError):
        parse_datatype('foo:image/png')
Ejemplo n.º 17
0
def test_parse_bad_scalar():
    with pytest.raises(BadTypeError):
        parse_datatype('foo')