Exemple #1
0
def test_broadcast_block_retry(fx_session: scoped_session, fx_user: User,
                               limit: int, blocks: int, expected: int):
    for i in range(blocks):
        block = Block.create(fx_user, [])
    url = 'http://test.neko'
    now = datetime.datetime.utcnow()
    node = Node(url=url, last_connected_at=now)
    fx_session.add(node)
    fx_session.flush()
    patch = unittest.mock.patch('nekoyume.broadcast.BROADCAST_LIMIT', limit)
    with mock() as m, patch:
        m.register_uri('POST', 'http://test.neko/blocks', [{
            'json': {
                'result': 'failed',
                'block_id': 0,
                'mesage': "new block isn't our next block."
            },
            'status_code': 403
        }, {
            'json': {
                'result': 'success',
            },
            'status_code': 200
        }])
        broadcast_block(
            block.serialize(use_bencode=False,
                            include_suffix=True,
                            include_moves=True,
                            include_hash=True))
        assert m.call_count == expected
        assert node.last_connected_at > now
Exemple #2
0
def test_move_broadcast(fx_session: scoped_session, fx_user: User,
                        fx_novice_status: typing.Mapping[str, str]):
    move = fx_user.create_novice(fx_novice_status)
    fx_session.add(move)
    fx_session.commit()
    with unittest.mock.patch('nekoyume.tasks.multicast') as m:
        move_broadcast(move.id,
                       'http://localhost:5000',
                       'http://localhost:5001',
                       session=fx_session)
        serialized = move.serialize(
            use_bencode=False,
            include_signature=True,
            include_id=True,
        )
        assert m.called
        args = m.call_args[1]
        assert serialized == args['serialized']
        assert isinstance(args['sent_node'], Node)
        assert args['sent_node'].url == 'http://localhost:5000'
        assert isinstance(args['my_node'], Node)
        assert args['my_node'].url == 'http://localhost:5001'
        broadcast = args['broadcast']
        assert isinstance(broadcast, typing.Callable)
        assert broadcast.__name__ == 'broadcast_move'
Exemple #3
0
def test_broadcast_node_same_url(fx_session: scoped_session):
    url = 'http://test.neko'
    now = datetime.datetime.utcnow()
    node = Node(url=url, last_connected_at=now)
    fx_session.add(node)
    fx_session.commit()
    with Mocker() as m:
        broadcast_node(serialized={'url': url}, sent_node=node)
        assert not m.called
    assert node.last_connected_at == now
Exemple #4
0
def _update_or_add_metadata(
    hass: HomeAssistant,
    session: scoped_session,
    new_metadata: StatisticMetaData,
) -> int:
    """Get metadata_id for a statistic_id.

    If the statistic_id is previously unknown, add it. If it's already known, update
    metadata if needed.

    Updating metadata source is not possible.
    """
    statistic_id = new_metadata["statistic_id"]
    old_metadata_dict = get_metadata_with_session(hass, session,
                                                  [statistic_id], None)
    if not old_metadata_dict:
        unit = new_metadata["unit_of_measurement"]
        has_mean = new_metadata["has_mean"]
        has_sum = new_metadata["has_sum"]
        meta = StatisticsMeta.from_meta(DOMAIN, statistic_id, unit, has_mean,
                                        has_sum)
        session.add(meta)
        session.flush()  # Flush to get the metadata id assigned
        _LOGGER.debug(
            "Added new statistics metadata for %s, new_metadata: %s",
            statistic_id,
            new_metadata,
        )
        return meta.id  # type: ignore[no-any-return]

    metadata_id, old_metadata = old_metadata_dict[statistic_id]
    if (old_metadata["has_mean"] != new_metadata["has_mean"]
            or old_metadata["has_sum"] != new_metadata["has_sum"]
            or old_metadata["unit_of_measurement"] !=
            new_metadata["unit_of_measurement"]):
        session.query(StatisticsMeta).filter_by(
            statistic_id=statistic_id).update(
                {
                    StatisticsMeta.has_mean:
                    new_metadata["has_mean"],
                    StatisticsMeta.has_sum:
                    new_metadata["has_sum"],
                    StatisticsMeta.unit_of_measurement:
                    new_metadata["unit_of_measurement"],
                },
                synchronize_session=False,
            )
        _LOGGER.debug(
            "Updated statistics metadata for %s, old_metadata: %s, new_metadata: %s",
            statistic_id,
            old_metadata,
            new_metadata,
        )

    return metadata_id
Exemple #5
0
def compile_hourly_statistics(instance: Recorder, session: scoped_session,
                              start: datetime) -> None:
    """Compile hourly statistics."""
    start_time = start.replace(minute=0)
    end_time = start_time + timedelta(hours=1)
    # Get last hour's average, min, max
    summary = {}
    baked_query = instance.hass.data[STATISTICS_SHORT_TERM_BAKERY](
        lambda session: session.query(*QUERY_STATISTICS_SUMMARY_MEAN))

    baked_query += lambda q: q.filter(StatisticsShortTerm.start >= bindparam(
        "start_time"))
    baked_query += lambda q: q.filter(StatisticsShortTerm.start < bindparam(
        "end_time"))
    baked_query += lambda q: q.group_by(StatisticsShortTerm.metadata_id)
    baked_query += lambda q: q.order_by(StatisticsShortTerm.metadata_id)

    stats = execute(
        baked_query(session).params(start_time=start_time, end_time=end_time))

    if stats:
        for stat in stats:
            metadata_id, _mean, _min, _max = stat
            summary[metadata_id] = {
                "metadata_id": metadata_id,
                "mean": _mean,
                "min": _min,
                "max": _max,
            }

    # Get last hour's sum
    subquery = (session.query(*QUERY_STATISTICS_SUMMARY_SUM).filter(
        StatisticsShortTerm.start >= bindparam("start_time")).filter(
            StatisticsShortTerm.start < bindparam("end_time")).subquery())
    query = (session.query(subquery).filter(subquery.c.rownum == 1).order_by(
        subquery.c.metadata_id))
    stats = execute(query.params(start_time=start_time, end_time=end_time))

    if stats:
        for stat in stats:
            metadata_id, start, last_reset, state, _sum, sum_increase, _ = stat
            summary[metadata_id] = {
                **summary.get(metadata_id, {}),
                **{
                    "metadata_id": metadata_id,
                    "last_reset": process_timestamp(last_reset),
                    "state": state,
                    "sum": _sum,
                    "sum_increase": sum_increase,
                },
            }

    for stat in summary.values():
        session.add(
            Statistics.from_stats(stat.pop("metadata_id"), start_time, stat))
Exemple #6
0
def test_broadcast_block_same_node(fx_session: scoped_session, fx_user: User):
    block = Block.create(fx_user, [])
    url = 'http://test.neko'
    now = datetime.datetime.utcnow()
    node = Node(url=url, last_connected_at=now)
    fx_session.add(node)
    fx_session.flush()
    broadcast_block(block.serialize(use_bencode=False,
                                    include_suffix=True,
                                    include_moves=True,
                                    include_hash=True),
                    sent_node=node)
    assert node.last_connected_at == now
Exemple #7
0
def insert_modification_record(method: str, resource_url: str,
                               session: scoped_session) -> int:
    """
    Insert a modification record into the database.
    :param method: HTTP method type of related operation.
    :param resource_url: URL of resource modified.
    :param session: sqlalchemy session.
    :return: ID of new modification record.
    """
    modification = Modification(method=method, resource_url=resource_url)
    session.add(modification)
    session.commit()
    return modification.job_id
Exemple #8
0
def test_broadcast_my_node(fx_session: scoped_session):
    url = 'http://test.neko'
    now = datetime.datetime.utcnow()
    node = Node(url=url, last_connected_at=now)
    fx_session.add(node)
    fx_session.commit()
    with Mocker() as m:
        m.post('http://test.neko/nodes', json={'result': 'success'})
        broadcast_node({'url': url}, my_node=node)
        assert node.last_connected_at > now
        # check request.json value
        assert m.request_history[0].json() == {
            'url': 'http://test.neko',
            'sent_node': 'http://test.neko'
        }
Exemple #9
0
def _insert_statistics(
    session: scoped_session,
    table: type[Statistics | StatisticsShortTerm],
    metadata_id: int,
    statistic: StatisticData,
) -> None:
    """Insert statistics in the database."""
    try:
        session.add(table.from_stats(metadata_id, statistic))
    except SQLAlchemyError:
        _LOGGER.exception(
            "Unexpected exception when inserting statistics %s:%s ",
            metadata_id,
            statistic,
        )
Exemple #10
0
def _update_or_add_metadata(
    hass: HomeAssistant,
    session: scoped_session,
    statistic_id: str,
    new_metadata: StatisticMetaData,
) -> str:
    """Get metadata_id for a statistic_id, add if it doesn't exist."""
    old_metadata_dict = _get_metadata(hass, session, [statistic_id], None)
    if not old_metadata_dict:
        unit = new_metadata["unit_of_measurement"]
        has_mean = new_metadata["has_mean"]
        has_sum = new_metadata["has_sum"]
        session.add(
            StatisticsMeta.from_meta(DOMAIN, statistic_id, unit, has_mean,
                                     has_sum))
        metadata_ids = _get_metadata_ids(hass, session, [statistic_id])
        _LOGGER.debug(
            "Added new statistics metadata for %s, new_metadata: %s",
            statistic_id,
            new_metadata,
        )
        return metadata_ids[0]

    metadata_id, old_metadata = next(iter(old_metadata_dict.items()))
    if (old_metadata["has_mean"] != new_metadata["has_mean"]
            or old_metadata["has_sum"] != new_metadata["has_sum"]
            or old_metadata["unit_of_measurement"] !=
            new_metadata["unit_of_measurement"]):
        session.query(StatisticsMeta).filter_by(
            statistic_id=statistic_id).update(
                {
                    StatisticsMeta.has_mean:
                    new_metadata["has_mean"],
                    StatisticsMeta.has_sum:
                    new_metadata["has_sum"],
                    StatisticsMeta.unit_of_measurement:
                    new_metadata["unit_of_measurement"],
                },
                synchronize_session=False,
            )
        _LOGGER.debug(
            "Updated statistics metadata for %s, old_metadata: %s, new_metadata: %s",
            statistic_id,
            old_metadata,
            new_metadata,
        )

    return metadata_id
Exemple #11
0
def broadcast_node_failed(fx_session: scoped_session,
                          fx_other_session: Session, error):
    now = datetime.datetime.utcnow()
    node = Node(url='http://test.neko', last_connected_at=now)
    node2 = Node(url='http://other.neko',
                 last_connected_at=datetime.datetime.utcnow())
    fx_session.add(node)
    fx_session.commit()
    fx_other_session.add(node2)
    fx_other_session.commit()
    assert not fx_session.query(Node).filter(Node.url == node2.url).first()
    with Mocker() as m:
        m.post('http://test.neko', exc=error)
        broadcast_node(serialized={'url': fx_other_server.url})
    assert not fx_session.query(Node).filter(Node.url == node2.url).first()
    assert node.last_connected_at == now
Exemple #12
0
def _get_or_add_metadata_id(
    hass: HomeAssistant,
    session: scoped_session,
    statistic_id: str,
    metadata: StatisticMetaData,
) -> str:
    """Get metadata_id for a statistic_id, add if it doesn't exist."""
    metadata_id = _get_metadata_ids(hass, session, [statistic_id])
    if not metadata_id:
        unit = metadata["unit_of_measurement"]
        has_mean = metadata["has_mean"]
        has_sum = metadata["has_sum"]
        session.add(
            StatisticsMeta.from_meta(DOMAIN, statistic_id, unit, has_mean,
                                     has_sum))
        metadata_id = _get_metadata_ids(hass, session, [statistic_id])
    return metadata_id[0]
Exemple #13
0
def test_broadcast_block_raise_exception(fx_session: scoped_session,
                                         fx_user: User,
                                         error: typing.Union[ConnectionError,
                                                             Timeout]):
    block = Block.create(fx_user, [])
    url = 'http://test.neko'
    now = datetime.datetime.utcnow()
    node = Node(url=url, last_connected_at=now)
    fx_session.add(node)
    fx_session.flush()
    with Mocker() as m:
        m.post('http://test.neko/blocks', exc=error)
        broadcast_block(
            block.serialize(use_bencode=False,
                            include_suffix=True,
                            include_moves=True,
                            include_hash=True))
        assert node.last_connected_at == now
Exemple #14
0
def test_broadcast_node(
    fx_server: WSGIServer,
    fx_session: scoped_session,
    fx_other_server: WSGIServer,
    fx_other_session: Session,
):
    now = datetime.datetime.utcnow()
    node = Node(url=fx_server.url, last_connected_at=now)
    node2 = Node(url=fx_other_server.url,
                 last_connected_at=datetime.datetime.utcnow())
    fx_session.add(node)
    fx_session.commit()
    fx_other_session.add(node2)
    fx_other_session.commit()
    assert not fx_session.query(Node).filter(Node.url == node2.url).first()
    broadcast_node(serialized={'url': fx_other_server.url})
    assert fx_session.query(Node).filter(Node.url == node2.url).first()
    assert node.last_connected_at > now
Exemple #15
0
def test_post_block_return_block_id(fx_test_client: FlaskClient, fx_user: User,
                                    fx_session: scoped_session):
    block = Block.create(fx_user, [])
    fx_session.add(block)
    fx_session.commit()
    block2 = Block.create(fx_user, [])
    des = block2.serialize(use_bencode=False,
                           include_suffix=True,
                           include_moves=True,
                           include_hash=True)
    des['id'] = 3
    resp = fx_test_client.post('/blocks',
                               data=json.dumps(des),
                               content_type='application/json')
    assert resp.status_code == 403
    data = json.loads(resp.get_data())
    assert data['result'] == 'failed'
    assert data['message'] == "new block isn't our next block."
    assert data['block_id'] == 2
Exemple #16
0
def test_broadcast_block_my_node(fx_session: scoped_session, fx_user: User):
    block = Block.create(fx_user, [])
    url = 'http://test.neko'
    now = datetime.datetime.utcnow()
    node = Node(url=url, last_connected_at=now)
    fx_session.add(node)
    fx_session.flush()
    with Mocker() as m:
        m.post('http://test.neko/blocks', text='success')
        expected = serialized = block.serialize(use_bencode=False,
                                                include_suffix=True,
                                                include_moves=True,
                                                include_hash=True)
        broadcast_block(serialized, my_node=node)
        expected['sent_node'] = url
        assert node.last_connected_at > now
        assert node.last_connected_at > now
        # check request.json value
        assert m.request_history[0].json() == expected
Exemple #17
0
def test_block_broadcast(fx_session: scoped_session, fx_user: User):
    block = Block.create(fx_user, [])
    fx_session.add(block)
    fx_session.commit()
    with unittest.mock.patch('nekoyume.tasks.broadcast_block') as m:
        block_broadcast(block.id,
                        'http://localhost:5000',
                        'http://localhost:5001',
                        session=fx_session)
        serialized = block.serialize(use_bencode=False,
                                     include_suffix=True,
                                     include_moves=True,
                                     include_hash=True)
        assert m.called
        args = m.call_args[1]
        assert serialized == args['serialized']
        assert isinstance(args['sent_node'], Node)
        assert args['sent_node'].url == 'http://localhost:5000'
        assert isinstance(args['my_node'], Node)
        assert args['my_node'].url == 'http://localhost:5001'
Exemple #18
0
def insert(object_: Dict[str, Any],
           session: scoped_session,
           id_: Optional[int] = None) -> int:
    """Insert an object to database [POST] and returns the inserted object."""
    rdf_class = None
    instance = None

    # Check for class in the begging
    try:
        rdf_class = session.query(RDFClass).filter(
            RDFClass.name == object_["@type"]).one()
    except NoResultFound:
        raise ClassNotFound(type_=object_["@type"])

    if id_ is not None:
        if session.query(exists().where(Instance.id == id_)).scalar():
            raise InstanceExists(type_=rdf_class.name, id_=id_)
        else:
            instance = Instance(id=id_, type_=rdf_class.id)
    else:
        instance = Instance(type_=rdf_class.id)
    session.add(instance)
    session.flush()

    for prop_name in object_:
        if prop_name not in ["@type", "@context"]:
            try:
                property_ = session.query(properties).filter(
                    properties.name == prop_name).one()
            except NoResultFound:
                # Adds new Property
                session.close()
                raise PropertyNotFound(type_=prop_name)

            # For insertion in III
            if type(object_[prop_name]) == dict:
                instance_id = insert(object_[prop_name], session=session)
                instance_object = session.query(Instance).filter(
                    Instance.id == instance_id).one()

                if property_.type_ == "PROPERTY" or property_.type_ == "INSTANCE":
                    property_.type_ = "INSTANCE"
                    session.add(property_)
                    triple = GraphIII(subject=instance.id,
                                      predicate=property_.id,
                                      object_=instance_object.id)
                    session.add(triple)
                else:
                    session.close()
                    raise NotInstanceProperty(type_=prop_name)

            # For insertion in IAC
            elif session.query(exists().where(
                    RDFClass.name == str(object_[prop_name]))).scalar():
                if property_.type_ == "PROPERTY" or property_.type_ == "ABSTRACT":
                    property_.type_ = "ABSTRACT"
                    session.add(property_)
                    class_ = session.query(RDFClass).filter(
                        RDFClass.name == object_[prop_name]).one()
                    triple = GraphIAC(subject=instance.id,
                                      predicate=property_.id,
                                      object_=class_.id)
                    session.add(triple)
                else:
                    session.close()
                    raise NotAbstractProperty(type_=prop_name)

            # For insertion in IIT
            else:
                terminal = Terminal(value=object_[prop_name])
                session.add(terminal)
                session.flush()  # Assigns ID without committing

                if property_.type_ == "PROPERTY" or property_.type_ == "INSTANCE":
                    property_.type_ = "INSTANCE"
                    session.add(property_)
                    triple = GraphIIT(subject=instance.id,
                                      predicate=property_.id,
                                      object_=terminal.id)
                    # Add things directly to session, if anything fails whole transaction is aborted
                    session.add(triple)
                else:
                    session.close()
                    raise NotInstanceProperty(type_=prop_name)

    session.commit()
    return instance.id
Exemple #19
0
def insert_multiple(objects_: List[Dict[str,
                                        Any]],
                    session: scoped_session,
                    id_: Optional[str] = "") -> List[int]:
    """
    Adds a list of object with given ids to the database
    :param objects_: List of dict's to be added to the database
    :param session: scoped session from getSession in utils
    :param id_: optional parameter containing the ids of objects that have to be inserted
    :return: Ids that have been inserted
    """
    # instance list to store instances
    instance_list = list()
    triples_list = list()
    properties_list = list()
    instances = list()
    id_list = id_.split(',')
    instance_id_list = list()

    # the number of objects would be the same as number of instances
    for index in range(len(objects_)):
        try:
            rdf_class = session.query(RDFClass).filter(
                RDFClass.name == objects_[index]["@type"]).one()
        except NoResultFound:
            raise ClassNotFound(type_=objects_[index]["@type"])
        if index in range(len(id_list)) and id_list[index] != "":
            if session.query(
                    exists().where(
                        Instance.id == id_list[index])).scalar():
                print(session.query(
                    exists().where(
                        Instance.id == id_list[index])))
                # TODO handle where intance already exists , if instance is
                # fetched later anyways remove this
                raise InstanceExists(type_=rdf_class.name, id_=id_list[index])
            else:
                instance = Instance(id=id_list[index], type_=rdf_class.id)
                instances.append(instance)
        else:
            instance = Instance(type_=rdf_class.id)
            instances.append(instance)

    session.add_all(instances)
    session.flush()
    for i in range(len(instances)):
        instance_id_list.append(instances[i].id)

    for index in range(len(objects_)):
        for prop_name in objects_[index]:
            if prop_name not in ["@type", "@context"]:
                try:
                    property_ = session.query(properties).filter(
                        properties.name == prop_name).one()
                except NoResultFound:
                    # Adds new Property
                    session.close()
                    raise PropertyNotFound(type_=prop_name)

                # For insertion in III
                if isinstance(objects_[index][prop_name], dict):
                    instance_id = insert(
                        objects_[index][prop_name], session=session)
                    instance_object = session.query(Instance).filter(
                        Instance.id == instance_id).one()

                    if property_.type_ == "PROPERTY" or property_.type_ == "INSTANCE":
                        property_.type_ = "INSTANCE"
                        properties_list.append(property_)
                        triple = GraphIII(
                            subject=instances[index].id,
                            predicate=property_.id,
                            object_=instance_object.id)
                        triples_list.append(triple)
                    else:
                        session.close()
                        raise NotInstanceProperty(type_=prop_name)

                # For insertion in IAC
                elif session.query(
                        exists().where(RDFClass.name == str(objects_[index][prop_name]))).scalar():
                    if property_.type_ == "PROPERTY" or property_.type_ == "ABSTRACT":
                        property_.type_ = "ABSTRACT"
                        properties_list.append(property_)
                        class_ = session.query(RDFClass).filter(
                            RDFClass.name == objects_[index][prop_name]).one()
                        triple = GraphIAC(
                            subject=instances[index].id,
                            predicate=property_.id,
                            object_=class_.id)
                        triples_list.append(triple)

                    else:
                        session.close()
                        raise NotAbstractProperty(type_=prop_name)

                # For insertion in IIT
                else:
                    terminal = Terminal(value=objects_[index][prop_name])
                    session.add(terminal)
                    session.flush()  # Assigns ID without committing

                    if property_.type_ == "PROPERTY" or property_.type_ == "INSTANCE":
                        property_.type_ = "INSTANCE"
                        properties_list.append(property_)
                        triple = GraphIIT(
                            subject=instances[index].id,
                            predicate=property_.id,
                            object_=terminal.id)
                        # Add things directly to session, if anything fails
                        # whole transaction is aborted
                        triples_list.append(triple)
                    else:
                        session.close()
                        raise NotInstanceProperty(type_=prop_name)
    session.bulk_save_objects(properties_list)
    session.bulk_save_objects(triples_list)
    session.commit()
    return instance_id_list
Exemple #20
0
def insert_object(object_: Dict[str, Any],
                  session: scoped_session,
                  collection: bool = False) -> str:
    """
    Insert the object in the database
    :param object_: Dict containing object properties
    :param session: sqlalchemy session
    :return: The ID of the inserted object
    """
    type_ = get_type(object_)
    database_class = get_database_class(type_)
    id_ = object_.get("id", None)
    if collection:
        # if type_ is of a collection class
        members = object_['members']
        collection_id = id_ if id_ else str(uuid.uuid4())
        for member in members:
            # add all the members of that collection
            inserted_object = database_class(
                members=member['id_'],
                collection_id=collection_id,
                member_type=member['@type'],
            )
            try:
                session.add(inserted_object)
                session.commit()
            except InvalidRequestError:
                session.rollback()
        return collection_id
    else:
        # when type_ is of a non-collection class
        if (id_ is not None and session.query(
                exists().where(database_class.id == id_)).scalar()):
            raise InstanceExists(type_, id_)
        foreign_keys = database_class.__table__.foreign_keys
        for fk in foreign_keys:
            # the name of the column through which this foreign key relationship
            # is being established
            fk_column = fk.info["column_name"]
            try:
                fk_object = object_[fk_column]
            except KeyError as e:
                wrong_property = e.args[0]
                raise PropertyNotGiven(type_=wrong_property)
            # insert the foreign key object
            fk_object_id = insert_object(fk_object, session)
            # put the id of the foreign instance in this table's column
            object_[fk_column] = fk_object_id
        try:
            # remove the @type from object before using the object to make a
            # instance of it using sqlalchemy class
            object_.pop("@type")
            inserted_object = database_class(**object_)
        except TypeError as e:
            # extract the wrong property name from TypeError object
            wrong_property = e.args[0].split("'")[1]
            raise PropertyNotFound(type_=wrong_property)
        try:
            session.add(inserted_object)
            session.commit()
        except InvalidRequestError:
            session.rollback()

        return inserted_object.id
Exemple #21
0
def compile_hourly_statistics(instance: Recorder, session: scoped_session,
                              start: datetime) -> None:
    """Compile hourly statistics.

    This will summarize 5-minute statistics for one hour:
    - average, min max is computed by a database query
    - sum is taken from the last 5-minute entry during the hour
    """
    start_time = start.replace(minute=0)
    end_time = start_time + timedelta(hours=1)

    # Compute last hour's average, min, max
    summary: dict[str, StatisticData] = {}
    baked_query = instance.hass.data[STATISTICS_SHORT_TERM_BAKERY](
        lambda session: session.query(*QUERY_STATISTICS_SUMMARY_MEAN))

    baked_query += lambda q: q.filter(StatisticsShortTerm.start >= bindparam(
        "start_time"))
    baked_query += lambda q: q.filter(StatisticsShortTerm.start < bindparam(
        "end_time"))
    baked_query += lambda q: q.group_by(StatisticsShortTerm.metadata_id)
    baked_query += lambda q: q.order_by(StatisticsShortTerm.metadata_id)

    stats = execute(
        baked_query(session).params(start_time=start_time, end_time=end_time))

    if stats:
        for stat in stats:
            metadata_id, _mean, _min, _max = stat
            summary[metadata_id] = {
                "start": start_time,
                "mean": _mean,
                "min": _min,
                "max": _max,
            }

    # Get last hour's last sum
    if instance._db_supports_row_number:  # pylint: disable=[protected-access]
        subquery = (session.query(*QUERY_STATISTICS_SUMMARY_SUM).filter(
            StatisticsShortTerm.start >= bindparam("start_time")).filter(
                StatisticsShortTerm.start < bindparam("end_time")).subquery())
        query = (session.query(subquery).filter(
            subquery.c.rownum == 1).order_by(subquery.c.metadata_id))
        stats = execute(query.params(start_time=start_time, end_time=end_time))

        if stats:
            for stat in stats:
                metadata_id, start, last_reset, state, _sum, _ = stat
                if metadata_id in summary:
                    summary[metadata_id].update({
                        "last_reset":
                        process_timestamp(last_reset),
                        "state":
                        state,
                        "sum":
                        _sum,
                    })
                else:
                    summary[metadata_id] = {
                        "start": start_time,
                        "last_reset": process_timestamp(last_reset),
                        "state": state,
                        "sum": _sum,
                    }
    else:
        baked_query = instance.hass.data[STATISTICS_SHORT_TERM_BAKERY](
            lambda session: session.query(*QUERY_STATISTICS_SUMMARY_SUM_LEGACY
                                          ))

        baked_query += lambda q: q.filter(StatisticsShortTerm.start >=
                                          bindparam("start_time"))
        baked_query += lambda q: q.filter(StatisticsShortTerm.start <
                                          bindparam("end_time"))
        baked_query += lambda q: q.order_by(StatisticsShortTerm.metadata_id,
                                            StatisticsShortTerm.start.desc())

        stats = execute(
            baked_query(session).params(start_time=start_time,
                                        end_time=end_time))

        if stats:
            for metadata_id, group in groupby(
                    stats, lambda stat: stat["metadata_id"]):  # type: ignore
                (
                    metadata_id,
                    last_reset,
                    state,
                    _sum,
                ) = next(group)
                if metadata_id in summary:
                    summary[metadata_id].update({
                        "start":
                        start_time,
                        "last_reset":
                        process_timestamp(last_reset),
                        "state":
                        state,
                        "sum":
                        _sum,
                    })
                else:
                    summary[metadata_id] = {
                        "start": start_time,
                        "last_reset": process_timestamp(last_reset),
                        "state": state,
                        "sum": _sum,
                    }

    # Insert compiled hourly statistics in the database
    for metadata_id, stat in summary.items():
        session.add(Statistics.from_stats(metadata_id, stat))
Exemple #22
0
def insert(object_: Dict[str, Any], session: scoped_session,
           id_: Optional[str] = None) -> str:
    """Insert an object to database [POST] and returns the inserted object.
    :param object_: object to be inserted
    :param session: sqlalchemy scoped session
    :param id_: id of the object to be inserted (optional param)
    :return: ID of object inserted


    Raises:
        ClassNotFound: If `object_["@type"] is not a valid/defined RDFClass.
        InstanceExists: If an Instance `id_` already exists.
        PropertyNotFound: If any property name of `object_` other than `@type` or `@context`
            is not a valid/defined property.
        NotInstanceProperty: If any property of `object_` is a dictionary but
            not an Instance property
        NotAbstractProperty: If any property of `object_` is a
            valid/defined RDFClass but is not a dictionary neither an Abstract Property

    """
    rdf_class = None
    instance = None
    # Check for class in the begging
    try:
        rdf_class = session.query(RDFClass).filter(
            RDFClass.name == object_["@type"]).one()
    except NoResultFound:
        raise ClassNotFound(type_=object_["@type"])
    if id_ is not None and session.query(exists().where(Instance.id == id_)).scalar():
        raise InstanceExists(type_=rdf_class.name, id_=id_)
    elif id_ is not None:
        instance = Instance(id=id_, type_=rdf_class.id)
    else:
        instance = Instance(type_=rdf_class.id)
    session.add(instance)
    session.flush()

    for prop_name in object_:

        if prop_name not in ["@type", "@context"]:
            try:
                property_ = session.query(properties).filter(
                    properties.name == prop_name).one()
            except NoResultFound:
                # Adds new Property
                session.close()
                raise PropertyNotFound(type_=prop_name)

            # For insertion in III
            if isinstance(object_[prop_name], dict):
                instance_id = insert(object_[prop_name], session=session)
                instance_object = session.query(Instance).filter(
                    Instance.id == instance_id).one()
                if property_.type_ == "PROPERTY" or property_.type_ == "INSTANCE":
                    property_.type_ = "INSTANCE"
                    session.add(property_)
                    triple = GraphIII(
                        subject=instance.id,
                        predicate=property_.id,
                        object_=instance_object.id)
                    session.add(triple)
                else:
                    session.close()
                    raise NotInstanceProperty(type_=prop_name)

            # For insertion in IAC
            elif session.query(exists().where(RDFClass.name == str(object_[prop_name]))).scalar() \
                    and property_.type_ == "PROPERTY" or property_.type_ == "ABSTRACT":
                property_.type_ = "ABSTRACT"
                session.add(property_)
                class_ = session.query(RDFClass).filter(
                    RDFClass.name == object_[prop_name]).one()
                triple = GraphIAC(
                    subject=instance.id,
                    predicate=property_.id,
                    object_=class_.id)
                session.add(triple)
            elif session.query(exists().where(RDFClass.name == str(object_[prop_name]))).scalar():
                session.close()
                raise NotAbstractProperty(type_=prop_name)

            # For insertion in IIT
            else:
                terminal = Terminal(value=object_[prop_name])
                session.add(terminal)
                session.flush()  # Assigns ID without committing

                if property_.type_ == "PROPERTY" or property_.type_ == "INSTANCE":
                    property_.type_ = "INSTANCE"
                    session.add(property_)
                    triple = GraphIIT(
                        subject=instance.id,
                        predicate=property_.id,
                        object_=terminal.id)
                    # Add things directly to session, if anything fails whole
                    # transaction is aborted
                    session.add(triple)
                else:
                    session.close()
                    raise NotInstanceProperty(type_=prop_name)

    session.commit()
    return instance.id