Ejemplo n.º 1
0
    def test_slack_import_to_existing_database(
            self, mock_get_slack_api_data: mock.Mock,
            mock_build_avatar_url: mock.Mock, mock_build_avatar: mock.Mock,
            mock_process_uploads: mock.Mock,
            mock_attachment: mock.Mock) -> None:
        test_slack_dir = os.path.join(settings.DEPLOY_ROOT, "zerver", "tests",
                                      "fixtures", "slack_fixtures")
        test_slack_zip_file = os.path.join(test_slack_dir,
                                           "test_slack_importer.zip")
        test_slack_unzipped_file = os.path.join(test_slack_dir,
                                                "test_slack_importer")

        test_realm_subdomain = 'test-slack-import'
        output_dir = os.path.join(settings.DEPLOY_ROOT, "var",
                                  "test-slack-importer-data")
        token = 'valid-token'

        # If the test fails, the 'output_dir' would not be deleted and hence it would give an
        # error when we run the tests next time, as 'do_convert_data' expects an empty 'output_dir'
        # hence we remove it before running 'do_convert_data'
        self.rm_tree(output_dir)
        # Also the unzipped data file should be removed if the test fails at 'do_convert_data'
        self.rm_tree(test_slack_unzipped_file)

        user_data_fixture = ujson.loads(
            self.fixture_data('user_data.json', type='slack_fixtures'))
        mock_get_slack_api_data.side_effect = [
            user_data_fixture['members'], {}
        ]

        do_convert_data(test_slack_zip_file, output_dir, token)
        self.assertTrue(os.path.exists(output_dir))
        self.assertTrue(os.path.exists(output_dir + '/realm.json'))

        # test import of the converted slack data into an existing database
        with self.settings(BILLING_ENABLED=False):
            do_import_realm(output_dir, test_realm_subdomain)
        realm = get_realm(test_realm_subdomain)
        self.assertTrue(realm.name, test_realm_subdomain)

        # test RealmAuditLog
        realmauditlog = RealmAuditLog.objects.filter(realm=realm)
        realmauditlog_event_type = {log.event_type for log in realmauditlog}
        self.assertEqual(
            realmauditlog_event_type, {
                RealmAuditLog.SUBSCRIPTION_CREATED,
                RealmAuditLog.REALM_PLAN_TYPE_CHANGED
            })

        Realm.objects.filter(name=test_realm_subdomain).delete()

        remove_folder(output_dir)
        # remove tar file created in 'do_convert_data' function
        os.remove(output_dir + '.tar.gz')
        self.assertFalse(os.path.exists(output_dir))
Ejemplo n.º 2
0
    def test_slack_import_to_existing_database(self, mock_get_slack_api_data: mock.Mock,
                                               mock_build_avatar_url: mock.Mock,
                                               mock_build_avatar: mock.Mock,
                                               mock_process_uploads: mock.Mock,
                                               mock_attachment: mock.Mock) -> None:
        test_slack_dir = os.path.join(settings.DEPLOY_ROOT, "zerver", "tests", "fixtures",
                                      "slack_fixtures")
        test_slack_zip_file = os.path.join(test_slack_dir, "test_slack_importer.zip")
        test_slack_unzipped_file = os.path.join(test_slack_dir, "test_slack_importer")

        test_realm_subdomain = 'test-slack-import'
        output_dir = os.path.join(settings.DEPLOY_ROOT, "var", "test-slack-importer-data")
        token = 'valid-token'

        # If the test fails, the 'output_dir' would not be deleted and hence it would give an
        # error when we run the tests next time, as 'do_convert_data' expects an empty 'output_dir'
        # hence we remove it before running 'do_convert_data'
        rm_tree(output_dir)
        # Also the unzipped data file should be removed if the test fails at 'do_convert_data'
        rm_tree(test_slack_unzipped_file)

        user_data_fixture = ujson.loads(self.fixture_data('user_data.json', type='slack_fixtures'))
        mock_get_slack_api_data.side_effect = [user_data_fixture['members'], {}]

        do_convert_data(test_slack_zip_file, output_dir, token)
        self.assertTrue(os.path.exists(output_dir))
        self.assertTrue(os.path.exists(output_dir + '/realm.json'))

        # test import of the converted slack data into an existing database
        with self.settings(BILLING_ENABLED=False):
            do_import_realm(output_dir, test_realm_subdomain)
        realm = get_realm(test_realm_subdomain)
        self.assertTrue(realm.name, test_realm_subdomain)

        # test RealmAuditLog
        realmauditlog = RealmAuditLog.objects.filter(realm=realm)
        realmauditlog_event_type = {log.event_type for log in realmauditlog}
        self.assertEqual(realmauditlog_event_type, {'subscription_created',
                                                    'realm_plan_type_changed'})

        Realm.objects.filter(name=test_realm_subdomain).delete()

        remove_folder(output_dir)
        # remove tar file created in 'do_convert_data' function
        os.remove(output_dir + '.tar.gz')
        self.assertFalse(os.path.exists(output_dir))
Ejemplo n.º 3
0
    def handle(self, *args: Any, **options: Any) -> None:
        output_dir = options["output_dir"]
        if output_dir is None:
            output_dir = tempfile.mkdtemp(prefix="converted-slack-data-")
        else:
            output_dir = os.path.realpath(output_dir)

        token = options["token"]
        if token is None:
            raise CommandError("Enter Slack legacy token!")

        num_threads = int(options["threads"])
        if num_threads < 1:
            raise CommandError("You must have at least one thread.")

        for path in options["slack_data_path"]:
            if not os.path.exists(path):
                raise CommandError(f"Slack data directory not found: '{path}'")

            print("Converting data ...")
            do_convert_data(path, output_dir, token, threads=num_threads)
Ejemplo n.º 4
0
    def handle(self, *args: Any, **options: Any) -> None:
        output_dir = options["output_dir"]
        if output_dir is None:
            output_dir = tempfile.mkdtemp(prefix="converted-slack-data-")
        else:
            output_dir = os.path.realpath(output_dir)

        token = options['token']
        if token is None:
            print("Enter slack legacy token!")
            exit(1)

        num_threads = int(options['threads'])
        if num_threads < 1:
            raise CommandError('You must have at least one thread.')

        for path in options['slack_data_zip']:
            if not os.path.exists(path):
                print("Slack data directory not found: '%s'" % (path,))
                exit(1)

            print("Converting Data ...")
            do_convert_data(path, output_dir, token, threads=num_threads)
Ejemplo n.º 5
0
    def handle(self, *args: Any, **options: Any) -> None:
        output_dir = options["output_dir"]
        if output_dir is None:
            output_dir = tempfile.mkdtemp(prefix="/tmp/converted-slack-data-")
        else:
            output_dir = os.path.realpath(output_dir)

        token = options['token']
        if token is None:
            print("Enter slack legacy token!")
            exit(1)

        num_threads = int(options['threads'])
        if num_threads < 1:
            raise CommandError('You must have at least one thread.')

        for path in options['slack_data_zip']:
            if not os.path.exists(path):
                print("Slack data directory not found: '%s'" % (path, ))
                exit(1)

            print("Converting Data ...")
            do_convert_data(path, output_dir, token, threads=num_threads)
Ejemplo n.º 6
0
    def test_slack_import_to_existing_database(
        self,
        mock_get_slack_api_data: mock.Mock,
        mock_build_avatar_url: mock.Mock,
        mock_build_avatar: mock.Mock,
        mock_process_uploads: mock.Mock,
        mock_attachment: mock.Mock,
        mock_requests_get: mock.Mock,
    ) -> None:
        test_slack_dir = os.path.join(settings.DEPLOY_ROOT, "zerver", "tests",
                                      "fixtures", "slack_fixtures")
        test_slack_zip_file = os.path.join(test_slack_dir,
                                           "test_slack_importer.zip")
        test_slack_unzipped_file = os.path.join(test_slack_dir,
                                                "test_slack_importer")

        test_realm_subdomain = "test-slack-import"
        output_dir = os.path.join(settings.DEPLOY_ROOT, "var",
                                  "test-slack-importer-data")
        token = "xoxp-valid-token"

        # If the test fails, the 'output_dir' would not be deleted and hence it would give an
        # error when we run the tests next time, as 'do_convert_data' expects an empty 'output_dir'
        # hence we remove it before running 'do_convert_data'
        self.rm_tree(output_dir)
        # Also the unzipped data file should be removed if the test fails at 'do_convert_data'
        self.rm_tree(test_slack_unzipped_file)

        user_data_fixture = orjson.loads(
            self.fixture_data("user_data.json", type="slack_fixtures"))
        team_info_fixture = orjson.loads(
            self.fixture_data("team_info.json", type="slack_fixtures"))
        mock_get_slack_api_data.side_effect = [
            user_data_fixture["members"],
            {},
            team_info_fixture["team"],
        ]
        with get_test_image_file("img.png") as f:
            mock_requests_get.return_value.raw = BytesIO(f.read())

        with self.assertLogs(level="INFO"):
            do_convert_data(test_slack_zip_file, output_dir, token)

        self.assertTrue(os.path.exists(output_dir))
        self.assertTrue(os.path.exists(output_dir + "/realm.json"))

        realm_icons_path = os.path.join(output_dir, "realm_icons")
        realm_icon_records_path = os.path.join(realm_icons_path,
                                               "records.json")

        self.assertTrue(os.path.exists(realm_icon_records_path))
        with open(realm_icon_records_path, "rb") as f:
            records = orjson.loads(f.read())
            self.assert_length(records, 2)
            self.assertEqual(records[0]["path"], "0/icon.original")
            self.assertTrue(
                os.path.exists(
                    os.path.join(realm_icons_path, records[0]["path"])))

            self.assertEqual(records[1]["path"], "0/icon.png")
            self.assertTrue(
                os.path.exists(
                    os.path.join(realm_icons_path, records[1]["path"])))

        # test import of the converted slack data into an existing database
        with self.settings(BILLING_ENABLED=False), self.assertLogs(
                level="INFO"):
            do_import_realm(output_dir, test_realm_subdomain)
        realm = get_realm(test_realm_subdomain)
        self.assertTrue(realm.name, test_realm_subdomain)
        self.assertEqual(realm.icon_source, Realm.ICON_UPLOADED)

        # test RealmAuditLog
        realmauditlog = RealmAuditLog.objects.filter(realm=realm)
        realmauditlog_event_type = {log.event_type for log in realmauditlog}
        self.assertEqual(
            realmauditlog_event_type,
            {
                RealmAuditLog.SUBSCRIPTION_CREATED,
                RealmAuditLog.REALM_PLAN_TYPE_CHANGED,
                RealmAuditLog.REALM_CREATED,
            },
        )

        Realm.objects.filter(name=test_realm_subdomain).delete()

        remove_folder(output_dir)
        # remove tar file created in 'do_convert_data' function
        os.remove(output_dir + ".tar.gz")
        self.assertFalse(os.path.exists(output_dir))