def handle(self, *args, **options): # type: (*Any, **Any) -> None models_to_import = [Realm, Stream, UserProfile, Recipient, Subscription, Client, Message, UserMessage, Huddle, DefaultStream, RealmAlias, RealmFilter] if len(args) == 0: print("Please provide at least one realm dump to import.") exit(1) if options["destroy_rebuild_database"]: print("Rebuilding the database!") db_name = settings.DATABASES['default']['NAME'] self.do_destroy_and_rebuild_database(db_name) elif not options["import_into_nonempty"]: for model in models_to_import: self.new_instance_check(model) for path in args: if not os.path.exists(path): print("Directory not found: '%s'" % (path,)) exit(1) print("Processing dump: %s ..." % (path,)) do_import_realm(path)
def handle(self, *args, **options): # type: (*Any, **Any) -> None models_to_import = [ Realm, Stream, UserProfile, Recipient, Subscription, Client, Message, UserMessage, Huddle, DefaultStream, RealmAlias, RealmFilter ] if len(args) == 0: print("Please provide at least one realm dump to import.") exit(1) if options["destroy_rebuild_database"]: print("Rebuilding the database!") db_name = settings.DATABASES['default']['NAME'] self.do_destroy_and_rebuild_database(db_name) elif not options["import_into_nonempty"]: for model in models_to_import: self.new_instance_check(model) for path in args: if not os.path.exists(path): print("Directory not found: '%s'" % (path, )) exit(1) print("Processing dump: %s ..." % (path, )) do_import_realm(path)
def handle(self, *args: Any, **options: Any) -> None: models_to_import = [ Realm, Stream, UserProfile, Recipient, Subscription, Client, Message, UserMessage, Huddle, DefaultStream, RealmDomain, RealmFilter ] if options["destroy_rebuild_database"]: print("Rebuilding the database!") db_name = settings.DATABASES['default']['NAME'] self.do_destroy_and_rebuild_database(db_name) elif not options["import_into_nonempty"]: for model in models_to_import: self.new_instance_check(model) for path in options['export_files']: if not os.path.exists(path): print("Directory not found: '%s'" % (path, )) exit(1) print("Processing dump: %s ..." % (path, )) do_import_realm(path) if options["destroy_rebuild_database"]: print("Resetting auto-increment sequence for Postgres......") subprocess.check_call([ os.path.join(settings.DEPLOY_ROOT, "scripts/setup/postgres-reset-sequences") ])
def test_slack_import_to_existing_database(self, mock_get_user_data: mock.Mock, mock_build_avatar_url: mock.Mock, mock_build_avatar: mock.Mock) -> None: test_slack_zip_file = os.path.join(settings.DEPLOY_ROOT, "zerver", "fixtures", "slack_fixtures", "test_slack_importer.zip") test_realm_subdomain = 'test-slack-import' output_dir = '/tmp/test-slack-importer-data' token = 'valid-token' user_data_fixture = os.path.join(settings.DEPLOY_ROOT, "zerver", "fixtures", "slack_fixtures", "user_data.json") mock_get_user_data.return_value = ujson.load(open(user_data_fixture))['members'] do_convert_data(test_slack_zip_file, test_realm_subdomain, output_dir, token) self.assertTrue(os.path.exists(output_dir)) self.assertTrue(os.path.exists(output_dir + '/realm.json')) # test import of the converted slack data into an existing database do_import_realm(output_dir) self.assertTrue(get_realm(test_realm_subdomain).name, test_realm_subdomain) Realm.objects.filter(name=test_realm_subdomain).delete() remove_folder(output_dir) # remove tar file created in 'do_convert_data' function os.remove(output_dir + '.tar.gz') self.assertFalse(os.path.exists(output_dir))
def handle(self, *args: Any, **options: Any) -> None: models_to_import = [ Realm, Stream, UserProfile, Recipient, Subscription, Client, Message, UserMessage, Huddle, DefaultStream, RealmDomain, RealmFilter ] subdomain = options['subdomain'] if subdomain is None: print("Enter subdomain!") exit(1) if options["destroy_rebuild_database"]: print("Rebuilding the database!") db_name = settings.DATABASES['default']['NAME'] self.do_destroy_and_rebuild_database(db_name) elif not options["import_into_nonempty"]: for model in models_to_import: self.new_instance_check(model) check_subdomain_available(subdomain) for path in options['export_files']: if not os.path.exists(path): print("Directory not found: '%s'" % (path, )) exit(1) print("Processing dump: %s ..." % (path, )) realm = do_import_realm(path, subdomain) print("Checking the system bots.") do_import_system_bots(realm)
def handle(self, *args: Any, **options: Any) -> None: models_to_import = [Realm, Stream, UserProfile, Recipient, Subscription, Client, Message, UserMessage, Huddle, DefaultStream, RealmDomain, RealmFilter] if options["destroy_rebuild_database"]: print("Rebuilding the database!") db_name = settings.DATABASES['default']['NAME'] self.do_destroy_and_rebuild_database(db_name) elif not options["import_into_nonempty"]: for model in models_to_import: self.new_instance_check(model) for path in options['export_files']: if not os.path.exists(path): print("Directory not found: '%s'" % (path,)) exit(1) print("Processing dump: %s ..." % (path,)) realm = do_import_realm(path) print("Resetting auto-increment sequence for Postgres......") subprocess.check_call([os.path.join(settings.DEPLOY_ROOT, "scripts/setup/postgres-reset-sequences")]) print("Checking the system bots.") do_import_system_bots(realm)
def test_slack_import_to_existing_database( self, mock_get_slack_api_data: mock.Mock, mock_build_avatar_url: mock.Mock, mock_build_avatar: mock.Mock, mock_process_uploads: mock.Mock, mock_attachment: mock.Mock) -> None: test_slack_dir = os.path.join(settings.DEPLOY_ROOT, "zerver", "fixtures", "slack_fixtures") test_slack_zip_file = os.path.join(test_slack_dir, "test_slack_importer.zip") test_slack_unzipped_file = os.path.join(test_slack_dir, "test_slack_importer") test_realm_subdomain = 'test-slack-import' output_dir = os.path.join(settings.DEPLOY_ROOT, "var", "test-slack-importer-data") token = 'valid-token' # If the test fails, the 'output_dir' would not be deleted and hence it would give an # error when we run the tests next time, as 'do_convert_data' expects an empty 'output_dir' # hence we remove it before running 'do_convert_data' rm_tree(output_dir) # Also the unzipped data file should be removed if the test fails at 'do_convert_data' rm_tree(test_slack_unzipped_file) user_data_fixture = os.path.join(settings.DEPLOY_ROOT, "zerver", "fixtures", "slack_fixtures", "user_data.json") mock_get_slack_api_data.side_effect = [ ujson.load(open(user_data_fixture))['members'], {} ] do_convert_data(test_slack_zip_file, test_realm_subdomain, output_dir, token) self.assertTrue(os.path.exists(output_dir)) self.assertTrue(os.path.exists(output_dir + '/realm.json')) # test import of the converted slack data into an existing database do_import_realm(output_dir) self.assertTrue( get_realm(test_realm_subdomain).name, test_realm_subdomain) Realm.objects.filter(name=test_realm_subdomain).delete() remove_folder(output_dir) # remove tar file created in 'do_convert_data' function os.remove(output_dir + '.tar.gz') self.assertFalse(os.path.exists(output_dir))
def handle(self, *args: Any, **options: Any) -> None: models_to_import = [Realm, Stream, UserProfile, Recipient, Subscription, Client, Message, UserMessage, Huddle, DefaultStream, RealmDomain, RealmFilter] if options["destroy_rebuild_database"]: print("Rebuilding the database!") db_name = settings.DATABASES['default']['NAME'] self.do_destroy_and_rebuild_database(db_name) elif not options["import_into_nonempty"]: for model in models_to_import: self.new_instance_check(model) for path in options['export_files']: if not os.path.exists(path): print("Directory not found: '%s'" % (path,)) exit(1) print("Processing dump: %s ..." % (path,)) do_import_realm(path)
def test_slack_import_to_existing_database(self, mock_get_user_data: mock.Mock, mock_build_avatar_url: mock.Mock, mock_build_avatar: mock.Mock, mock_process_uploads: mock.Mock) -> None: test_slack_dir = os.path.join(settings.DEPLOY_ROOT, "zerver", "fixtures", "slack_fixtures") test_slack_zip_file = os.path.join(test_slack_dir, "test_slack_importer.zip") test_slack_unzipped_file = os.path.join(test_slack_dir, "test_slack_importer") test_realm_subdomain = 'test-slack-import' output_dir = os.path.join(settings.DEPLOY_ROOT, "var", "test-slack-importer-data") token = 'valid-token' # If the test fails, the 'output_dir' would not be deleted and hence it would give an # error when we run the tests next time, as 'do_convert_data' expects an empty 'output_dir' # hence we remove it before running 'do_convert_data' rm_tree(output_dir) # Also the unzipped data file should be removed if the test fails at 'do_convert_data' rm_tree(test_slack_unzipped_file) user_data_fixture = os.path.join(settings.DEPLOY_ROOT, "zerver", "fixtures", "slack_fixtures", "user_data.json") mock_get_user_data.return_value = ujson.load(open(user_data_fixture))['members'] do_convert_data(test_slack_zip_file, test_realm_subdomain, output_dir, token) self.assertTrue(os.path.exists(output_dir)) self.assertTrue(os.path.exists(output_dir + '/realm.json')) # test import of the converted slack data into an existing database do_import_realm(output_dir) self.assertTrue(get_realm(test_realm_subdomain).name, test_realm_subdomain) Realm.objects.filter(name=test_realm_subdomain).delete() remove_folder(output_dir) # remove tar file created in 'do_convert_data' function os.remove(output_dir + '.tar.gz') self.assertFalse(os.path.exists(output_dir))