def setUp(self) -> None: db_session.rollback() db_session.query(Keyword).delete() db_session.query(Collection).delete() self.collection_repo = CollectionRepository(db_session) self.keyword_repo = KeywordRepository(db_session) self.keywords = [ Keyword( name="Test setup", doc="Prepare test environment, use teardown after this one", ), Keyword(name="Login keyword", doc="Perform some check"), Keyword(name="Teardown", doc="Clean up environment"), ] self.app_keyword = Keyword(name="Login to Application") self.collections = [ Collection(name="First collection", type="robot", keywords=self.keywords), Collection(name="Second collection", type="Robot", keywords=[self.app_keyword]), Collection(name="Third", type="Library"), ] self.sorted_keywords = sorted(self.keywords + [self.app_keyword], key=lambda k: k.name) db_session.add_all(self.collections) db_session.commit() for item in self.collections: db_session.refresh(item)
def setUpClass(cls): cls.keyword = Keyword( name="My keyword", doc="Keyword description\n\nFurther description", args='["path", "arg1"]', collection_id=1, ) cls.empty_keyword = Keyword(name="Empty keyword", collection_id=1)
def test_should_add_collection_with_keywords(self) -> None: name_to_add = "test_collection" collection = Collection(name=name_to_add) collection.keywords = [Keyword(name="Keyword1"), Keyword(name="Keyword2")] self.collection_repo.add(collection) results: List[Collection] = db_session.query(Collection).filter_by( name=name_to_add ).all() self.assertEqual(len(results), 1) self.assertEqual(results[0].name, name_to_add) self.assertIsNotNone(results[0].id) self.assertEqual(len(results[0].keywords), 2) self.assertEqual( [k.name for k in results[0].keywords], ["Keyword1", "Keyword2"] )
def recreate_data(session: Session) -> None: session.query(Keyword).delete() session.query(Collection).delete() keywords = [ Keyword(name="Test setup", doc="Prepare test environment, use teardown after this one"), Keyword(name="Some keyword", doc="Perform some check"), Keyword(name="Teardown", doc="Clean up environment") ] collections = [ Collection(name="First collection", type="robot", keywords=keywords), Collection(name="Second collection", type="Robot"), Collection(name="Third", type="Library") ] session.add_all(collections) session.commit()
def test_should_add_keyword_with_collection_id(self) -> None: name_to_add = "test_keyword" keyword = Keyword(name=name_to_add, collection_id=self.collections[-1].id) self.keyword_repo.add(keyword) results: List[Keyword] = db_session.query(Keyword).filter_by( name=name_to_add).all() self.assertEqual(len(results), 1) self.assertEqual(results[0].name, name_to_add) self.assertIsNotNone(results[0].id) self.assertEqual(results[0].collection, self.collections[-1])
def from_stats_row(row: Tuple[Keyword, int, float]) -> KeywordWithStats: keyword = row[0] return KeywordWithStats( id=keyword.id, name=keyword.name, doc=keyword.doc, args=keyword.args, tags=Keyword.from_json_list(keyword.tags), arg_string=keyword.arg_string, html_doc=keyword.html_doc, synopsis=keyword.synopsis, collection=keyword.collection.to_nested_model(), times_used=row[1], avg_elapsed=row[2], )
def create_keyword( *, _: bool = Depends(is_authenticated), repository: KeywordRepository = Depends(get_keyword_repository), collection_repository: CollectionRepository = Depends( get_collection_repository), keyword: KeywordCreate, ): collection: Optional[DBCollection] = collection_repository.get( keyword.collection_id) if not collection: raise HTTPException(status_code=400, detail="Collection does not exist") db_keyword: DBKeyword = repository.add(DBKeyword.create(keyword)) return db_keyword.to_model()
def get_all_with_stats( self, *, pattern: Optional[str] = None, collection_name: Optional[str] = None, collection_id: Optional[int] = None, use_doc: bool = True, use_tags: bool = False, skip: int = 0, limit: int = 100, ordering: List[OrderingItem] = None, ) -> List[KeywordWithStats]: return [ self.from_stats_row(row) for row in (self._items_with_stats.filter( *self.filter_criteria(pattern, collection_name, collection_id, use_doc, use_tags)).order_by( *Keyword.ordering_criteria(ordering) ).offset(skip).limit(limit).all()) ]
def get_all( self, *, pattern: Optional[str] = None, collection_name: Optional[str] = None, collection_id: Optional[int] = None, use_doc: bool = True, use_tags: bool = False, skip: int = 0, limit: int = 100, ordering: List[OrderingItem] = None, ) -> List[ModelKeyword]: return [ keyword.to_model() for keyword in (self.session.query(Keyword).join(Keyword.collection).filter( *self.filter_criteria(pattern, collection_name, collection_id, use_doc, use_tags)).order_by( *Keyword.ordering_criteria(ordering) ).offset(skip).limit(limit).all()) ]
def update(self, item: Keyword, update_data: dict) -> Keyword: if "tags" in update_data: update_data["tags"] = Keyword.from_json_list(update_data["tags"]) return super().update(item, update_data)
def test_should_not_add_keyword_without_collection_id(self) -> None: name_to_add = "test_keyword" keyword = Keyword(name=name_to_add) self.assertRaises(IntegrityError, lambda: self.keyword_repo.add(keyword))
def recreate_data(session: Session) -> None: session.query(Keyword).delete() session.query(Collection).delete() session.query(KeywordStatistics).delete() keywords = [ Keyword( name="Test setup", doc="Prepare test environment, use teardown after this one", ), Keyword(name="Some keyword", doc="Perform some check"), Keyword(name="Teardown", doc="Clean up environment"), ] keywords_2 = [Keyword(name="zzz", doc="zzzzzz")] collections = [ Collection(name="First collection", type="robot", keywords=keywords), Collection(name="Second collection", type="Robot", keywords=keywords_2), Collection(name="Third", type="Library"), ] statistics = [ KeywordStatistics( collection="First collection", keyword="Test setup", execution_time=datetime(2019, 12, 21, 2, 30, 0, tzinfo=timezone.utc), times_used=10, total_elapsed=1000, min_elapsed=10, max_elapsed=100, ), KeywordStatistics( collection="First collection", keyword="Some keyword", execution_time=datetime(2019, 12, 21, 2, 30, 0, tzinfo=timezone.utc), times_used=5, total_elapsed=3000, min_elapsed=300, max_elapsed=1500, ), KeywordStatistics( collection="First collection", keyword="Some keyword", execution_time=datetime(2019, 12, 20, 1, 30, 0, tzinfo=timezone.utc), times_used=5, total_elapsed=2000, min_elapsed=200, max_elapsed=1000, ), KeywordStatistics( collection="Second collection", keyword="Old keyword", execution_time=datetime(2019, 12, 21, 1, 30, 0, tzinfo=timezone.utc), times_used=5, total_elapsed=2500, min_elapsed=200, max_elapsed=1000, ), KeywordStatistics( collection="Second collection", keyword="Old keyword", execution_time=datetime(2019, 12, 21, 2, 30, 0, tzinfo=timezone.utc), times_used=5, total_elapsed=2500, min_elapsed=100, max_elapsed=1000, ), KeywordStatistics( collection="Second collection", keyword="Old keyword", execution_time=datetime(2019, 12, 21, 3, 30, 0, tzinfo=timezone.utc), times_used=5, total_elapsed=2500, min_elapsed=200, max_elapsed=1100, ), ] session.add_all(collections) session.add_all(statistics) session.commit()