def test_serialize(self, session): import_error = [ ImportError( filename="Lorem_ipsum.py", stacktrace="Lorem ipsum", timestamp=timezone.parse(self.timestamp, timezone="UTC"), ) for i in range(2) ] session.add_all(import_error) session.commit() query = session.query(ImportError) query_list = query.all() serialized_data = import_error_collection_schema.dump( ImportErrorCollection(import_errors=query_list, total_entries=2)) # To maintain consistency in the key sequence across the db in tests serialized_data["import_errors"][0]["import_error_id"] = 1 serialized_data["import_errors"][1]["import_error_id"] = 2 assert { "import_errors": [ { "filename": "Lorem_ipsum.py", "import_error_id": 1, "stack_trace": "Lorem ipsum", "timestamp": "2020-06-10T12:02:44+00:00", }, { "filename": "Lorem_ipsum.py", "import_error_id": 2, "stack_trace": "Lorem ipsum", "timestamp": "2020-06-10T12:02:44+00:00", }, ], "total_entries": 2, } == serialized_data
def get_import_errors(session, limit, offset=None): """Get all import errors""" total_entries = session.query(func.count(ImportError.id)).scalar() import_errors = session.query(ImportError).order_by( ImportError.id).offset(offset).limit(limit).all() return import_error_collection_schema.dump( ImportErrorCollection(import_errors=import_errors, total_entries=total_entries))
def get_import_errors(session, limit, offset=None, order_by='import_error_id'): """Get all import errors""" to_replace = {"import_error_id": 'id'} allowed_filter_attrs = ['import_error_id', "timestamp", "filename"] total_entries = session.query(func.count(ImportErrorModel.id)).scalar() query = session.query(ImportErrorModel) query = apply_sorting(query, order_by, to_replace, allowed_filter_attrs) import_errors = query.offset(offset).limit(limit).all() return import_error_collection_schema.dump( ImportErrorCollection(import_errors=import_errors, total_entries=total_entries) )
def get_import_errors(session): """ Get all import errors """ offset = request.args.get(parameters.page_offset, 0) limit = min(int(request.args.get(parameters.page_limit, 100)), 100) total_entries = session.query(func.count(ImportError.id)).scalar() import_errors = session.query(ImportError).order_by( ImportError.id).offset(offset).limit(limit).all() return import_error_collection_schema.dump( ImportErrorCollection(import_errors=import_errors, total_entries=total_entries)).data
def get_import_errors(session): """ Get all import errors """ offset = request.args.get(parameters.page_offset, 0) limit = min(int(request.args.get(parameters.page_limit, 100)), 100) query = session.query(ImportError) total_entries = query.count() query_list = query.offset(offset).limit(limit).all() return import_error_collection_schema.dump( ImportErrorCollection(import_errors=query_list, total_entries=total_entries)).data
def get_import_errors( *, limit: int, offset: Optional[int] = None, order_by: str = "import_error_id", session: Session = NEW_SESSION, ) -> APIResponse: """Get all import errors""" to_replace = {"import_error_id": 'id'} allowed_filter_attrs = ['import_error_id', "timestamp", "filename"] total_entries = session.query(func.count(ImportErrorModel.id)).scalar() query = session.query(ImportErrorModel) query = apply_sorting(query, order_by, to_replace, allowed_filter_attrs) import_errors = query.offset(offset).limit(limit).all() return import_error_collection_schema.dump( ImportErrorCollection(import_errors=import_errors, total_entries=total_entries))