def test_import_logs_schema_error(self): info = [ ExportInfo('star_user', 'User', size=1, url="/api/export/user") ] info_json = ExportInfoSchema(many=True).jsonify(info).data user_json = json.dumps([{"id": "55", "pickes": "42"}]) httpretty.register_uri(httpretty.GET, "http://na.edu/api/export", body=info_json, status=200) httpretty.register_uri(httpretty.GET, "http://na.edu/api/export/user", body=user_json, status=200) data_importer = self.get_data_importer_setup_auth() date = datetime.datetime.now() export_list = data_importer.get_export_list() log = data_importer.log_for_export(export_list, date) data = data_importer.request_data(export_list) try: data_importer.load_all_data(data, log) except: pass # Totally should happen. self.assertEqual("/api/export/user", httpretty.last_request().path) logs = db.session.query(DataTransferLog).all() self.assertTrue(len(logs) > 0) log = logs[-1] self.assertIsNotNone(log.date_started) self.assertIsNotNone(log.last_updated) details = log.details self.assertEqual("User", log.details[0].class_name) self.assertFalse(log.details[0].successful) self.assertEqual(0, log.details[0].success_count) self.assertEqual(1, log.details[0].failure_count)
def request_user_setup(self): info = [ ExportInfo('star_user', 'User', size=1, url="/api/export/user") ] info_json = ExportInfoSchema(many=True).jsonify(info).data user = User(id=4, last_updated=datetime.datetime.now(), email="*****@*****.**", role=Role.user, email_verified=True, _password="******") user_json = json.dumps(UserExportSchema(many=True).dump([user]).data) admin_json = json.dumps(AdminExportSchema(many=True).dump([user]).data) httpretty.register_uri(httpretty.GET, "http://na.edu/api/export", body=info_json, status=200) httpretty.register_uri(httpretty.GET, "http://na.edu/api/export/user", body=user_json, status=200) httpretty.register_uri(httpretty.GET, "http://na.edu/api/export/admin", body=admin_json, status=200)
def load_database(self, all_data): rv = self.app.get('/api/export', headers=self.logged_in_headers()) response = json.loads(rv.get_data(as_text=True)) exports = ExportInfoSchema(many=True).load(response).data importer = ImportService(app, db) log = importer.log_for_export(exports, datetime.datetime.utcnow()) for export in exports: export.json_data = all_data[export.class_name] importer.load_data(export, log)
def get_export_list(self, full_backup=False): url = self.master_url + self.EXPORT_ENDPOINT last_log = self.db.session.query(DataTransferLog).filter(DataTransferLog.type == 'import')\ .order_by(desc(DataTransferLog.last_updated)).limit(1).first() if last_log and last_log.successful() and not full_backup: date_string = last_log.date_started.strftime( ExportService.DATE_FORMAT) url += "?after=" + date_string response = requests.get(url, headers=self.get_headers()) exportables = ExportInfoSchema(many=True).load(response.json()) return exportables
def get_export(self): """Grabs everything exportable via the API, and returns it fully serialized ss json""" all_data = {} rv = self.app.get('/api/export', headers=self.logged_in_headers()) response = json.loads(rv.get_data(as_text=True)) exports = ExportInfoSchema(many=True).load(response).data for export in exports: rv = self.app.get(export.url, follow_redirects=True, content_type="application/json", headers=self.logged_in_headers()) all_data[export.class_name] = json.loads(rv.get_data(as_text=True)) return all_data
def test_get_export_list(self): info = [ ExportInfo('my_table', 'my_class', size=0, url="http://na.edu/api/export/my_class") ] info_json = ExportInfoSchema(many=True).jsonify(info).data httpretty.register_uri(httpretty.GET, "http://na.edu/api/export", body=info_json, status=200) data_importer = self.get_data_importer_setup_auth() exportables = data_importer.get_export_list() self.assertEqual(1, len(exportables))
def test_no_subsequent_requests_when_size_is_0(self): data_importer = self.get_data_importer_setup_auth() info = [ ExportInfo('my_table', 'my_class', size=0, url="http://na.edu/api/export/my_class") ] info_json = ExportInfoSchema(many=True).jsonify(info).data httpretty.register_uri(httpretty.GET, "http://na.edu/api/export", body=info_json, status=200) export_list = data_importer.get_export_list() data_importer.request_data(export_list) self.assertEqual(2, len(httpretty.httpretty.latest_requests) ) # Assumes one request for auth check. self.assertEqual("/api/export", httpretty.last_request().path)
def get(self): """ Lists available questionnaires. Used for data export to get meta without specifying flow and relationship. Returns: list[ExportInfoSchema] - A list of dict objects, including the following info for each questionnaire: table_name (str): Snake-case database table name. E.g., "chain_session_questionnaire", class_name (str): Pascal-case class name for Model class. E.g., "ChainSession", display_name (str): Questionnaire title. E.g., "Chain Session Assessment", size (int): Number of questionnaire records in the database, url (str): Export endpoint. E.g., "/api/export/chain_session_questionnaire", question_type (str): 'sensitive' | 'identifying' | 'unrestricted' | 'sub-table' sub_tables (list[ExportInfoSchema]): A list of sub-tables within this table, if applicable. """ info_list = ExportService.get_table_info() info_list = [item for item in info_list if item.question_type] info_list = sorted(info_list, key=lambda item: item.table_name) return ExportInfoSchema(many=True).dump(info_list)
class ExportListEndpoint(flask_restful.Resource): schema = ExportInfoSchema(many=True) @auth.login_required @requires_roles(Role.admin) def get(self): date_started = datetime.datetime.utcnow() info_list = ExportService.get_table_info(get_date_arg()) # Remove items that are not exportable, or that are identifying info_list = [item for item in info_list if item.exportable] info_list = [ item for item in info_list if item.question_type != ExportService.TYPE_IDENTIFYING ] # Get a count of the records, and log it. log = DataTransferLog(type="export") total_records_for_export = 0 for item in info_list: total_records_for_export += item.size if item.size > 0: log_detail = DataTransferLogDetail(date_started=date_started, class_name=item.class_name, successful=True, success_count=item.size) log.details.append(log_detail) log.total_records = total_records_for_export # If we find we aren't exporting anything, don't create a new log, just update the last one. if total_records_for_export == 0: log = db.session.query(DataTransferLog).filter(DataTransferLog.type == 'export')\ .order_by(desc(DataTransferLog.last_updated)).limit(1).first() if log is None: log = DataTransferLog(type="export", total_records=0) log.last_updated = datetime.datetime.utcnow() db.session.add(log) db.session.commit() return self.schema.dump(info_list)
def get(self): info_list = ExportService.get_table_info() info_list = [item for item in info_list if item.question_type] info_list = sorted(info_list, key=lambda item: item.table_name) return ExportInfoSchema(many=True).dump(info_list)