def _update_statistics( session: scoped_session, table: type[Statistics | StatisticsShortTerm], stat_id: int, statistic: StatisticData, ) -> None: """Insert statistics in the database.""" try: session.query(table).filter_by(id=stat_id).update( { table.mean: statistic.get("mean"), table.min: statistic.get("min"), table.max: statistic.get("max"), table.last_reset: statistic.get("last_reset"), table.state: statistic.get("state"), table.sum: statistic.get("sum"), }, synchronize_session=False, ) except SQLAlchemyError: _LOGGER.exception( "Unexpected exception when updating statistics %s:%s ", id, statistic, )
def test_move_broadcast(fx_session: scoped_session, fx_user: User, fx_novice_status: typing.Mapping[str, str]): move = fx_user.create_novice(fx_novice_status) fx_session.add(move) fx_session.commit() with unittest.mock.patch('nekoyume.tasks.multicast') as m: move_broadcast(move.id, 'http://localhost:5000', 'http://localhost:5001', session=fx_session) serialized = move.serialize( use_bencode=False, include_signature=True, include_id=True, ) assert m.called args = m.call_args[1] assert serialized == args['serialized'] assert isinstance(args['sent_node'], Node) assert args['sent_node'].url == 'http://localhost:5000' assert isinstance(args['my_node'], Node) assert args['my_node'].url == 'http://localhost:5001' broadcast = args['broadcast'] assert isinstance(broadcast, typing.Callable) assert broadcast.__name__ == 'broadcast_move'
def test_broadcast_block_retry(fx_session: scoped_session, fx_user: User, limit: int, blocks: int, expected: int): for i in range(blocks): block = Block.create(fx_user, []) url = 'http://test.neko' now = datetime.datetime.utcnow() node = Node(url=url, last_connected_at=now) fx_session.add(node) fx_session.flush() patch = unittest.mock.patch('nekoyume.broadcast.BROADCAST_LIMIT', limit) with mock() as m, patch: m.register_uri('POST', 'http://test.neko/blocks', [{ 'json': { 'result': 'failed', 'block_id': 0, 'mesage': "new block isn't our next block." }, 'status_code': 403 }, { 'json': { 'result': 'success', }, 'status_code': 200 }]) broadcast_block( block.serialize(use_bencode=False, include_suffix=True, include_moves=True, include_hash=True)) assert m.call_count == expected assert node.last_connected_at > now
def get_single(type_: str, api_name: str, session: scoped_session, path: str = None) -> Dict[str, Any]: """Get instance of classes with single objects. :param type_: type of object to be updated :param api_name: api name specified while starting server :param session: sqlalchemy scoped session :param path: endpoint :return: response containing information about a single object """ try: rdf_class = session.query(RDFClass).filter( RDFClass.name == type_).one() except NoResultFound: raise ClassNotFound(type_=type_) try: instance = session.query(Instance).filter( Instance.type_ == rdf_class.id).all()[-1] except (NoResultFound, IndexError, ValueError): raise InstanceNotFound(type_=rdf_class.name) object_ = get(instance.id, rdf_class.name, session=session, api_name=api_name, path=path) if path is not None: object_["@id"] = "/{}/{}".format(api_name, path) else: object_["@id"] = "/{}/{}".format(api_name, type_) return object_
def _statistics_at_time( session: scoped_session, metadata_ids: set[int], table: type[Statistics | StatisticsShortTerm], start_time: datetime, ) -> list | None: """Return last known statics, earlier than start_time, for the metadata_ids.""" # Fetch metadata for the given (or all) statistic_ids if table == StatisticsShortTerm: base_query = QUERY_STATISTICS_SHORT_TERM else: base_query = QUERY_STATISTICS query = session.query(*base_query) most_recent_statistic_ids = (session.query( func.max(table.id).label("max_id"), ).filter( table.start < start_time).filter( table.metadata_id.in_(metadata_ids))) most_recent_statistic_ids = most_recent_statistic_ids.group_by( table.metadata_id) most_recent_statistic_ids = most_recent_statistic_ids.subquery() query = query.join( most_recent_statistic_ids, table.id == most_recent_statistic_ids.c.max_id, ) return execute(query)
def get_collection(API_NAME: str, type_: str, session: scoped_session) -> Dict[str, Any]: """Retrieve a type of collection from the database.""" collection_template = { "@id": "/" + API_NAME + "/" + type_ + "Collection/", "@context": None, "@type": type_ + "Collection", "members": list() } # type: Dict[str, Any] try: rdf_class = session.query(RDFClass).filter( RDFClass.name == type_).one() except NoResultFound: raise ClassNotFound(type_=type_) try: instances = session.query(Instance).filter( Instance.type_ == rdf_class.id).all() except NoResultFound: instances = list() for instance_ in instances: object_template = { "@id": "/" + API_NAME + "/" + type_ + "Collection/" + str(instance_.id), "@type": type_ } collection_template["members"].append(object_template) return collection_template
def apply_filter(object_id: str, search_props: Dict[str, Any], triples: Graph, session: scoped_session) -> bool: """Check whether objects has properties with query values or not. :param object_id: Id of the instance. :param search_props: Dictionary of query parameters with property id and values. :param triples: All triples. :param session: sqlalchemy scoped session. :return: True if the instance has properties with given values, False otherwise. """ for prop in search_props: # For nested properties if isinstance(search_props[prop], dict): data = session.query(triples).filter( triples.GraphIII.subject == object_id, triples.GraphIII.predicate == prop).one() if apply_filter(data.object_, search_props[prop], triples, session) is False: return False else: data = session.query(triples).filter( triples.GraphIIT.subject == object_id, triples.GraphIIT.predicate == prop).one() terminal = session.query(Terminal).filter( Terminal.id == data.object_).one() if terminal.value != search_props[prop]: return False return True
def update_single(object_: Dict[str, Any], session: scoped_session, api_name: str, path: str = None) -> int: """Update instance of classes with single objects. :param object_: new object :param session: sqlalchemy scoped session :param api_name: api name specified while starting server :param path: endpoint :return: id of the updated object """ try: rdf_class = session.query(RDFClass).filter( RDFClass.name == object_["@type"]).one() except NoResultFound: raise ClassNotFound(type_=object_["@type"]) try: instance = session.query(Instance).filter( Instance.type_ == rdf_class.id).all()[-1] except (NoResultFound, IndexError, ValueError): raise InstanceNotFound(type_=rdf_class.name) return update( id_=instance.id, type_=object_["@type"], object_=object_, session=session, api_name=api_name, path=path)
def get_object(query_info: Dict[str, str], session: scoped_session, collection: bool = False) -> Dict[str, str]: """ Get the object from the database :param query_info: Dict containing the id and @type of object that has to retrieved :param session: sqlalchemy session :param collection: True if the type_ is of a collection, False for any other class :return: dict of object with its properties """ type_ = query_info["@type"] id_ = query_info["id_"] database_class = get_database_class(type_) if collection: objects = (session.query( database_class.members, database_class.member_type).filter( database_class.collection_id == id_).all()) if len(objects) == 0: raise InstanceNotFound(type_=type_, id_=id_) object_template = {} object_template["@type"] = query_info["@type"] object_template["members"] = objects return object_template else: try: object_ = (session.query(database_class).filter( database_class.id == id_).one()).__dict__ except NoResultFound: raise InstanceNotFound(type_=type_, id_=id_) # Remove the unnecessary keys from the object retrieved from database object_template = copy.deepcopy(object_) object_template.pop("_sa_instance_state") object_template.pop("id") object_template["@type"] = query_info["@type"] return object_template
def get_modification_table_diff( session: scoped_session, agent_job_id: str = None) -> List[Dict[str, Any]]: """ Get modification table difference. :param session: sqlalchemy session. :param agent_job_id: Job id from the client. :return: List of all modifications done after job with job_id = agent_job_id. """ # If agent_job_id is not given then return all the elements. if agent_job_id is None: modifications = session.query(Modification).order_by( Modification.job_id.asc()).all() # If agent_job_id is given then return all records which are older # than the record with agent_job_id. else: try: record_for_agent_job_id = session.query(Modification).filter( Modification.job_id == agent_job_id).one() except NoResultFound: return [] modifications = session.query(Modification).filter( Modification.job_id > record_for_agent_job_id.job_id).order_by( Modification.job_id.asc()).all() # Create response body list_of_modification_records = [] for modification in modifications: modification_record = { "job_id": modification.job_id, "method": modification.method, "resource_url": modification.resource_url } list_of_modification_records.append(modification_record) return list_of_modification_records
def insert_properties(properties: Set[str], session: scoped_session) -> Optional[Any]: """Insert all the properties as defined in the APIDocumentation into DB.""" prop_list = [ BaseProperty(name=prop) for prop in properties if not session.query(exists().where( BaseProperty.name == prop)).scalar() ] session.add_all(prop_list) session.commit() return None # if __name__ == "__main__": # Session = sessionmaker(bind=engine) # session = Session() # # doc = doc_gen("test", "test") # # Extract all classes with supportedProperty from both # classes = get_classes(doc.generate()) # # # Extract all properties from both # # import pdb; pdb.set_trace() # properties = get_all_properties(classes) # # Add all the classes # insert_classes(classes, session) # print("Classes inserted successfully") # # Add all the properties # insert_properties(properties, session) # print("Properties inserted successfully")
def test_broadcast_move( fx_server: WSGIServer, fx_session: scoped_session, fx_other_server: WSGIServer, fx_other_session: Session, fx_user: User, fx_novice_status: typing.Mapping[str, str], ): now = datetime.datetime.utcnow() node = Node(url=fx_server.url, last_connected_at=now) node2 = Node(url=fx_other_server.url, last_connected_at=datetime.datetime.utcnow()) move = fx_user.create_novice(fx_novice_status) fx_session.add_all([node, node2, move]) fx_session.commit() assert not fx_other_session.query(Move).get(move.id) serialized = move.serialize( use_bencode=False, include_signature=True, include_id=True, ) multicast(serialized=serialized, broadcast=broadcast_move) assert fx_other_session.query(Move).get(move.id) assert node.last_connected_at > now
def _find_duplicates( session: scoped_session, table: type[Statistics | StatisticsShortTerm] ) -> tuple[list[int], list[dict]]: """Find duplicated statistics.""" subquery = (session.query( table.start, table.metadata_id, literal_column("1").label("is_duplicate"), ).group_by(table.metadata_id, table.start).having(func.count() > 1).subquery()) query = (session.query(table).outerjoin( subquery, (subquery.c.metadata_id == table.metadata_id) & (subquery.c.start == table.start), ).filter(subquery.c.is_duplicate == 1).order_by( table.metadata_id, table.start, table.id.desc()).limit(1000 * MAX_ROWS_TO_PURGE)) duplicates = execute(query) original_as_dict = {} start = None metadata_id = None duplicate_ids: list[int] = [] non_identical_duplicates_as_dict: list[dict] = [] if not duplicates: return (duplicate_ids, non_identical_duplicates_as_dict) def columns_to_dict( duplicate: type[Statistics | StatisticsShortTerm]) -> dict: """Convert a SQLAlchemy row to dict.""" dict_ = {} for key in duplicate.__mapper__.c.keys(): dict_[key] = getattr(duplicate, key) return dict_ def compare_statistic_rows(row1: dict, row2: dict) -> bool: """Compare two statistics rows, ignoring id and created.""" ignore_keys = ["id", "created"] keys1 = set(row1).difference(ignore_keys) keys2 = set(row2).difference(ignore_keys) return keys1 == keys2 and all(row1[k] == row2[k] for k in keys1) for duplicate in duplicates: if start != duplicate.start or metadata_id != duplicate.metadata_id: original_as_dict = columns_to_dict(duplicate) start = duplicate.start metadata_id = duplicate.metadata_id continue duplicate_as_dict = columns_to_dict(duplicate) duplicate_ids.append(duplicate.id) if not compare_statistic_rows(original_as_dict, duplicate_as_dict): non_identical_duplicates_as_dict.append({ "duplicate": duplicate_as_dict, "original": original_as_dict }) return (duplicate_ids, non_identical_duplicates_as_dict)
def test_broadcast_node_same_url(fx_session: scoped_session): url = 'http://test.neko' now = datetime.datetime.utcnow() node = Node(url=url, last_connected_at=now) fx_session.add(node) fx_session.commit() with Mocker() as m: broadcast_node(serialized={'url': url}, sent_node=node) assert not m.called assert node.last_connected_at == now
def compile_hourly_statistics(instance: Recorder, session: scoped_session, start: datetime) -> None: """Compile hourly statistics.""" start_time = start.replace(minute=0) end_time = start_time + timedelta(hours=1) # Get last hour's average, min, max summary = {} baked_query = instance.hass.data[STATISTICS_SHORT_TERM_BAKERY]( lambda session: session.query(*QUERY_STATISTICS_SUMMARY_MEAN)) baked_query += lambda q: q.filter(StatisticsShortTerm.start >= bindparam( "start_time")) baked_query += lambda q: q.filter(StatisticsShortTerm.start < bindparam( "end_time")) baked_query += lambda q: q.group_by(StatisticsShortTerm.metadata_id) baked_query += lambda q: q.order_by(StatisticsShortTerm.metadata_id) stats = execute( baked_query(session).params(start_time=start_time, end_time=end_time)) if stats: for stat in stats: metadata_id, _mean, _min, _max = stat summary[metadata_id] = { "metadata_id": metadata_id, "mean": _mean, "min": _min, "max": _max, } # Get last hour's sum subquery = (session.query(*QUERY_STATISTICS_SUMMARY_SUM).filter( StatisticsShortTerm.start >= bindparam("start_time")).filter( StatisticsShortTerm.start < bindparam("end_time")).subquery()) query = (session.query(subquery).filter(subquery.c.rownum == 1).order_by( subquery.c.metadata_id)) stats = execute(query.params(start_time=start_time, end_time=end_time)) if stats: for stat in stats: metadata_id, start, last_reset, state, _sum, sum_increase, _ = stat summary[metadata_id] = { **summary.get(metadata_id, {}), **{ "metadata_id": metadata_id, "last_reset": process_timestamp(last_reset), "state": state, "sum": _sum, "sum_increase": sum_increase, }, } for stat in summary.values(): session.add( Statistics.from_stats(stat.pop("metadata_id"), start_time, stat))
def get_collection(API_NAME: str, type_: str, session: scoped_session, path: str = None) -> Dict[str, Any]: """Retrieve a type of collection from the database. :param API_NAME: api name specified while starting server :param type_: type of object to be updated :param session: sqlalchemy scoped session :param path: endpoint :return: response containing all the objects of that particular type_ Raises: ClassNotFound: If `type_` does not represt a valid/defined RDFClass. """ if path is not None: collection_template = { "@id": "/{}/{}/".format(API_NAME, path), "@context": None, "@type": "{}Collection".format(type_), "members": list() } # type: Dict[str, Any] else: collection_template = { "@id": "/{}/{}Collection/".format(API_NAME, type_), "@context": None, "@type": "{}Collection".format(type_), "members": list() } # type: Dict[str, Any] try: rdf_class = session.query(RDFClass).filter( RDFClass.name == type_).one() except NoResultFound: raise ClassNotFound(type_=type_) try: instances = session.query(Instance).filter( Instance.type_ == rdf_class.id).all() except NoResultFound: instances = list() for instance_ in instances: if path is not None: object_template = { "@id": "/{}/{}/{}".format(API_NAME, path, instance_.id), "@type": type_ } else: object_template = { "@id": "/{}/{}Collection/{}".format(API_NAME, type_, instance_.id), "@type": type_ } collection_template["members"].append(object_template) return collection_template
def test_broadcast_block_same_node(fx_session: scoped_session, fx_user: User): block = Block.create(fx_user, []) url = 'http://test.neko' now = datetime.datetime.utcnow() node = Node(url=url, last_connected_at=now) fx_session.add(node) fx_session.flush() broadcast_block(block.serialize(use_bencode=False, include_suffix=True, include_moves=True, include_hash=True), sent_node=node) assert node.last_connected_at == now
def test_post_node(fx_test_client: FlaskClient, fx_session: scoped_session): url = 'http://test.neko' assert not fx_session.query(Node).first() with Mocker() as m: m.get(url + '/ping', text='pong') res = fx_test_client.post('/nodes', data=json.dumps({'url': url}), content_type='application/json') assert res.status_code == 200 assert json.loads(res.get_data())['result'] == 'success' node = fx_session.query(Node).filter(Node.url == url).first() assert node assert node.last_connected_at
def insert_modification_record(method: str, resource_url: str, session: scoped_session) -> int: """ Insert a modification record into the database. :param method: HTTP method type of related operation. :param resource_url: URL of resource modified. :param session: sqlalchemy session. :return: ID of new modification record. """ modification = Modification(method=method, resource_url=resource_url) session.add(modification) session.commit() return modification.job_id
def test_post_node_connection_error(fx_test_client: FlaskClient, fx_session: scoped_session): url = 'http://test.neko' assert not fx_session.query(Node).first() with Mocker() as m: m.get(url + '/ping', exc=ConnectionError) res = fx_test_client.post('/nodes', data=json.dumps({'url': url}), content_type='application/json') assert res.status_code == 403 data = json.loads(res.get_data()) assert data['result'] == 'failed' assert data['message'] == f'Connection to node {url} was failed.' assert not fx_session.query(Node).filter(Node.url == url).first()
def _insert_statistics( session: scoped_session, table: type[Statistics | StatisticsShortTerm], metadata_id: int, statistic: StatisticData, ) -> None: """Insert statistics in the database.""" try: session.add(table.from_stats(metadata_id, statistic)) except SQLAlchemyError: _LOGGER.exception( "Unexpected exception when inserting statistics %s:%s ", metadata_id, statistic, )
def test_broadcast_my_node(fx_session: scoped_session): url = 'http://test.neko' now = datetime.datetime.utcnow() node = Node(url=url, last_connected_at=now) fx_session.add(node) fx_session.commit() with Mocker() as m: m.post('http://test.neko/nodes', json={'result': 'success'}) broadcast_node({'url': url}, my_node=node) assert node.last_connected_at > now # check request.json value assert m.request_history[0].json() == { 'url': 'http://test.neko', 'sent_node': 'http://test.neko' }
def delete_single(type_: str, session: scoped_session) -> None: """Delete instance of classes with single objects.""" try: rdf_class = session.query(RDFClass).filter( RDFClass.name == type_).one() except NoResultFound: raise ClassNotFound(type_=type_) try: instance = session.query(Instance).filter( Instance.type_ == rdf_class.id).all()[-1] except (NoResultFound, IndexError, ValueError): raise InstanceNotFound(type_=rdf_class.name) return delete(instance.id, type_, session=session)
def insert_single(object_: Dict[str, Any], session: scoped_session) -> Any: """Insert instance of classes with single objects.""" try: rdf_class = session.query(RDFClass).filter( RDFClass.name == object_["@type"]).one() except NoResultFound: raise ClassNotFound(type_=object_["@type"]) try: session.query(Instance).filter( Instance.type_ == rdf_class.id).all()[-1] except (NoResultFound, IndexError, ValueError): return insert(object_, session=session) raise InstanceExists(type_=rdf_class.name)
def _update_or_add_metadata( hass: HomeAssistant, session: scoped_session, statistic_id: str, new_metadata: StatisticMetaData, ) -> str: """Get metadata_id for a statistic_id, add if it doesn't exist.""" old_metadata_dict = _get_metadata(hass, session, [statistic_id], None) if not old_metadata_dict: unit = new_metadata["unit_of_measurement"] has_mean = new_metadata["has_mean"] has_sum = new_metadata["has_sum"] session.add( StatisticsMeta.from_meta(DOMAIN, statistic_id, unit, has_mean, has_sum)) metadata_ids = _get_metadata_ids(hass, session, [statistic_id]) _LOGGER.debug( "Added new statistics metadata for %s, new_metadata: %s", statistic_id, new_metadata, ) return metadata_ids[0] metadata_id, old_metadata = next(iter(old_metadata_dict.items())) if (old_metadata["has_mean"] != new_metadata["has_mean"] or old_metadata["has_sum"] != new_metadata["has_sum"] or old_metadata["unit_of_measurement"] != new_metadata["unit_of_measurement"]): session.query(StatisticsMeta).filter_by( statistic_id=statistic_id).update( { StatisticsMeta.has_mean: new_metadata["has_mean"], StatisticsMeta.has_sum: new_metadata["has_sum"], StatisticsMeta.unit_of_measurement: new_metadata["unit_of_measurement"], }, synchronize_session=False, ) _LOGGER.debug( "Updated statistics metadata for %s, old_metadata: %s, new_metadata: %s", statistic_id, old_metadata, new_metadata, ) return metadata_id
def broadcast_move_failed(fx_session: scoped_session, fx_user: User, fx_novice_status: typing.Mapping[str, str], error): now = datetime.datetime.utcnow() move = fx_user.create_novice(fx_novice_status) node = Node(url='http://test.neko', last_connected_at=now) fx_session.add_all([node, move]) fx_session.commit() with Mocker() as m: serialized = move.serialize( use_bencode=False, include_signature=True, include_id=True, ) m.post('http://test.neko', exc=error) broadcast_move(serialized=serialized) assert node.last_connected_at == now
def test_get_new_novice_broadcasting( fx_test_client: FlaskClient, fx_user: User, fx_private_key: PrivateKey, fx_session: scoped_session, ): with unittest.mock.patch('nekoyume.game.multicast') as m: fx_test_client.post('/login', data={ 'private_key': fx_private_key.to_hex(), 'name': 'test_user', }, follow_redirects=True) res = fx_test_client.get('/new') assert res.status_code == 200 move = fx_session.query(Move).filter( Move.name == 'create_novice', ).first() assert move serialized = move.serialize( use_bencode=False, include_signature=True, include_id=True, ) assert m.called args = m.call_args[1] assert serialized == args['serialized'] my_node = args['my_node'] assert isinstance(my_node, Node) assert my_node.url == 'http://localhost' broadcast = args['broadcast'] assert isinstance(broadcast, typing.Callable) assert broadcast.__name__ == 'broadcast_move'
def _get_or_add_metadata_id( hass: HomeAssistant, session: scoped_session, statistic_id: str, metadata: StatisticMetaData, ) -> str: """Get metadata_id for a statistic_id, add if it doesn't exist.""" metadata_id = _get_metadata_ids(hass, session, [statistic_id]) if not metadata_id: unit = metadata["unit_of_measurement"] has_mean = metadata["has_mean"] has_sum = metadata["has_sum"] session.add( StatisticsMeta.from_meta(DOMAIN, statistic_id, unit, has_mean, has_sum)) metadata_id = _get_metadata_ids(hass, session, [statistic_id]) return metadata_id[0]
def test_broadcast_move_same_url(fx_session: scoped_session, fx_user: User, fx_novice_status: typing.Mapping[str, str]): url = 'http://test.neko' now = datetime.datetime.utcnow() node = Node(url=url, last_connected_at=now) move = fx_user.create_novice(fx_novice_status) fx_session.add_all([node, move]) fx_session.commit() with Mocker() as m: serialized = move.serialize( use_bencode=False, include_signature=True, include_id=True, ) broadcast_move(serialized=serialized, sent_node=node) assert not m.called assert node.last_connected_at == now
def get_all_filtered_instances(session: scoped_session, search_params: Dict[str, Any], type_: str, collection: bool = False): """Get all the filtered instances of from the database based on given query parameters. :param session: sqlalchemy scoped session :param search_params: Query parameters :param type_: @type of object to be deleted :param collection: True if the type_ is of a collection, False for any other class :return: filtered instances """ database_class = get_database_class(type_) if collection: query = session.query( database_class.collection_id.label('id')).distinct() else: query = session.query(database_class) for param, param_value in search_params.items(): # nested param if type(param_value) is dict: foreign_keys = database_class.__table__.foreign_keys for fk in foreign_keys: if fk.info['column_name'] == param: fk_table_name = fk.column.table.name continue nested_param_db_class = get_database_class(fk_table_name) # build query for attr, value in param_value.items(): query = query.join(nested_param_db_class) try: query = query.filter( getattr(nested_param_db_class, attr) == value) except AttributeError: raise InvalidSearchParameter(f'{param}[{attr}]') else: value = search_params[param] try: query = query.filter( getattr(database_class, param) == value) except AttributeError: raise InvalidSearchParameter(f'{param}') filtered_instances = query.all() return filtered_instances