Beispiel #1
0
    def handle(self, *args: Any, **options: Any) -> None:
        subdomain = options['subdomain']

        if options["destroy_rebuild_database"]:
            print("Rebuilding the database!")
            db_name = settings.DATABASES['default']['NAME']
            self.do_destroy_and_rebuild_database(db_name)
        elif options["import_into_nonempty"]:
            print(
                "NOTE: The argument 'import_into_nonempty' is now the default behavior."
            )

        check_subdomain_available(subdomain, from_management_command=True)

        for path in options['export_paths']:
            if not os.path.exists(path):
                print("Directory not found: '%s'" % (path, ))
                exit(1)
            if not os.path.isdir(path):
                print(
                    "Export file should be folder; if it's a tarball, please unpack it first."
                )
                exit(1)

        for path in options['export_paths']:
            print("Processing dump: %s ..." % (path, ))
            realm = do_import_realm(path, subdomain)
            print("Checking the system bots.")
            do_import_system_bots(realm)
Beispiel #2
0
    def handle(self, *args: Any, **options: Any) -> None:
        subdomain = options['subdomain']

        if options["destroy_rebuild_database"]:
            print("Rebuilding the database!")
            db_name = settings.DATABASES['default']['NAME']
            self.do_destroy_and_rebuild_database(db_name)
        elif options["import_into_nonempty"]:
            print("NOTE: The argument 'import_into_nonempty' is now the default behavior.")

        check_subdomain_available(subdomain, from_management_command=True)

        paths = []
        for path in options['export_paths']:
            path = os.path.realpath(os.path.expanduser(path))
            if not os.path.exists(path):
                print("Directory not found: '%s'" % (path,))
                exit(1)
            if not os.path.isdir(path):
                print("Export file should be folder; if it's a tarball, please unpack it first.")
                exit(1)
            paths.append(path)

        for path in paths:
            print("Processing dump: %s ..." % (path,))
            realm = do_import_realm(path, subdomain)
            print("Checking the system bots.")
            do_import_system_bots(realm)
Beispiel #3
0
    def handle(self, *args: Any, **options: Any) -> None:
        models_to_import = [Realm, Stream, UserProfile, Recipient, Subscription,
                            Client, Message, UserMessage, Huddle, DefaultStream, RealmDomain,
                            RealmFilter]

        subdomain = options['subdomain']
        if subdomain is None:
            print("Enter subdomain!")
            exit(1)

        if options["destroy_rebuild_database"]:
            print("Rebuilding the database!")
            db_name = settings.DATABASES['default']['NAME']
            self.do_destroy_and_rebuild_database(db_name)
        elif not options["import_into_nonempty"]:
            for model in models_to_import:
                self.new_instance_check(model)

        check_subdomain_available(subdomain, from_management_command=True)

        for path in options['export_files']:
            if not os.path.exists(path):
                print("Directory not found: '%s'" % (path,))
                exit(1)

            print("Processing dump: %s ..." % (path,))
            realm = do_import_realm(path, subdomain)
            print("Checking the system bots.")
            do_import_system_bots(realm)
Beispiel #4
0
    def handle(self, *args: Any, **options: Any) -> None:
        num_processes = int(options['processes'])
        if num_processes < 1:
            raise CommandError('You must have at least one process.')

        subdomain = options['subdomain']

        if options["destroy_rebuild_database"]:
            print("Rebuilding the database!")
            db_name = settings.DATABASES['default']['NAME']
            self.do_destroy_and_rebuild_database(db_name)
        elif options["import_into_nonempty"]:
            print(
                "NOTE: The argument 'import_into_nonempty' is now the default behavior."
            )

        check_subdomain_available(subdomain, from_management_command=True)

        paths = []
        for path in options['export_paths']:
            path = os.path.realpath(os.path.expanduser(path))
            if not os.path.exists(path):
                raise CommandError(f"Directory not found: '{path}'")
            if not os.path.isdir(path):
                raise CommandError("Export file should be folder; if it's a "
                                   "tarball, please unpack it first.")
            paths.append(path)

        for path in paths:
            print(f"Processing dump: {path} ...")
            realm = do_import_realm(path, subdomain, num_processes)
            print("Checking the system bots.")
            do_import_system_bots(realm)
Beispiel #5
0
def do_convert_data(slack_zip_file: str, realm_subdomain: str, output_dir: str, token: str) -> None:
    check_subdomain_available(realm_subdomain)

    domain_name = settings.EXTERNAL_HOST

    slack_data_dir = slack_zip_file.replace('.zip', '')
    if not os.path.exists(slack_data_dir):
        os.makedirs(slack_data_dir)

    os.makedirs(output_dir, exist_ok=True)
    # output directory should be empty initially
    if os.listdir(output_dir):
        raise Exception('Output directory should be empty!')

    subprocess.check_call(['unzip', '-q', slack_zip_file, '-d', slack_data_dir])
    # with zipfile.ZipFile(slack_zip_file, 'r') as zip_ref:
    #     zip_ref.extractall(slack_data_dir)

    script_path = os.path.dirname(os.path.abspath(__file__)) + '/'
    fixtures_path = script_path + '../fixtures/'

    realm_id = allocate_ids(Realm, 1)[0]

    user_list = get_user_data(token)
    realm, added_users, added_recipient, added_channels, avatar_list = slack_workspace_to_realm(
        domain_name, realm_id, user_list, realm_subdomain, fixtures_path, slack_data_dir)

    message_json, uploads_list, zerver_attachment = convert_slack_workspace_messages(
        slack_data_dir, user_list, realm_id, added_users, added_recipient, added_channels,
        realm, domain_name)

    avatar_folder = os.path.join(output_dir, 'avatars')
    avatar_realm_folder = os.path.join(avatar_folder, str(realm_id))

    os.makedirs(avatar_realm_folder, exist_ok=True)
    avatar_records = process_avatars(avatar_list, avatar_folder, realm_id)

    uploads_folder = os.path.join(output_dir, 'uploads')
    os.makedirs(os.path.join(uploads_folder, str(realm_id)), exist_ok=True)
    uploads_records = process_uploads(uploads_list, uploads_folder)
    attachment = {"zerver_attachment": zerver_attachment}

    # IO realm.json
    create_converted_data_files(realm, output_dir, '/realm.json')
    # IO message.json
    create_converted_data_files(message_json, output_dir, '/messages-000001.json')
    # IO avatar records
    create_converted_data_files(avatar_records, output_dir, '/avatars/records.json')
    # IO uploads TODO
    create_converted_data_files(uploads_records, output_dir, '/uploads/records.json')
    # IO attachments
    create_converted_data_files(attachment, output_dir, '/attachment.json')

    # remove slack dir
    rm_tree(slack_data_dir)
    subprocess.check_call(["tar", "-czf", output_dir + '.tar.gz', output_dir, '-P'])

    logging.info('######### DATA CONVERSION FINISHED #########\n')
    logging.info("Zulip data dump created at %s" % (output_dir))
def do_convert_data(slack_zip_file: str, realm_subdomain: str, output_dir: str,
                    token: str) -> None:
    check_subdomain_available(realm_subdomain)
    slack_data_dir = slack_zip_file.replace('.zip', '')
    if not os.path.exists(slack_data_dir):
        os.makedirs(slack_data_dir)

    os.makedirs(output_dir, exist_ok=True)
    # output directory should be empty initially
    if os.listdir(output_dir):
        raise Exception('Output directory should be empty!')

    subprocess.check_call(
        ['unzip', '-q', slack_zip_file, '-d', slack_data_dir])
    # with zipfile.ZipFile(slack_zip_file, 'r') as zip_ref:
    #     zip_ref.extractall(slack_data_dir)

    script_path = os.path.dirname(os.path.abspath(__file__)) + '/'
    fixtures_path = script_path + '../fixtures/'

    REALM_ID = allocate_ids(Realm, 1)[0]

    user_list = get_user_data(token)
    realm, added_users, added_recipient, added_channels = slack_workspace_to_realm(
        REALM_ID, user_list, realm_subdomain, fixtures_path, slack_data_dir)
    message_json = convert_slack_workspace_messages(slack_data_dir, user_list,
                                                    REALM_ID, added_users,
                                                    added_recipient,
                                                    added_channels, realm)

    zerver_attachment = []  # type: List[ZerverFieldsT]
    attachment = {"zerver_attachment": zerver_attachment}

    # IO realm.json
    create_converted_data_files(realm, output_dir, '/realm.json', False)
    # IO message.json
    create_converted_data_files(message_json, output_dir,
                                '/messages-000001.json', False)
    # IO avatar records
    create_converted_data_files([], output_dir, '/avatars/records.json', True)
    # IO uploads TODO
    create_converted_data_files([], output_dir, '/uploads/records.json', True)
    # IO attachments
    create_converted_data_files(attachment, output_dir, '/attachment.json',
                                False)

    # remove slack dir
    rm_tree(slack_data_dir)
    subprocess.check_call(
        ["tar", "-czf", output_dir + '.tar.gz', output_dir, '-P'])

    logging.info('######### DATA CONVERSION FINISHED #########\n')
    logging.info("Zulip data dump created at %s" % (output_dir))
Beispiel #7
0
def do_convert_data(slack_zip_file: str, realm_subdomain: str,
                    output_dir: str) -> None:
    check_subdomain_available(realm_subdomain)
    slack_data_dir = slack_zip_file.replace('.zip', '')
    if not os.path.exists(slack_data_dir):
        os.makedirs(slack_data_dir)
    subprocess.check_call(
        ['unzip', '-q', slack_zip_file, '-d', slack_data_dir])
    # with zipfile.ZipFile(slack_zip_file, 'r') as zip_ref:
    #     zip_ref.extractall(slack_data_dir)

    script_path = os.path.dirname(os.path.abspath(__file__)) + '/'
    fixtures_path = script_path + '../fixtures/'

    REALM_ID = get_model_id(Realm, 'zerver_realm', 1)
    realm, added_users, added_recipient, added_channels = slack_workspace_to_realm(
        REALM_ID, realm_subdomain, fixtures_path, slack_data_dir)
    message_json = convert_slack_workspace_messages(slack_data_dir, REALM_ID,
                                                    added_users,
                                                    added_recipient,
                                                    added_channels, realm)

    zerver_attachment = []  # type: List[ZerverFieldsT]
    attachment = {"zerver_attachment": zerver_attachment}

    # IO realm.json
    create_converted_data_files(realm, output_dir, '/realm.json', False)
    # IO message.json
    create_converted_data_files(message_json, output_dir,
                                '/messages-000001.json', False)
    # IO avatar records
    create_converted_data_files([], output_dir, '/avatars/records.json', True)
    # IO uploads TODO
    create_converted_data_files([], output_dir, '/uploads/records.json', True)
    # IO attachments
    create_converted_data_files(attachment, output_dir, '/attachment.json',
                                False)

    # remove slack dir
    rm_tree(slack_data_dir)
    subprocess.check_call(
        ["tar", "-czf", output_dir + '.tar.gz', output_dir, '-P'])

    print('######### DATA CONVERSION FINISHED #########\n')
    print("Zulip data dump created at %s" % (output_dir))
    sys.exit(0)
Beispiel #8
0
    def handle(self, *args: Any, **options: Any) -> None:
        num_processes = int(options["processes"])
        if num_processes < 1:
            raise CommandError("You must have at least one process.")

        subdomain = options["subdomain"]

        if options["destroy_rebuild_database"]:
            print("Rebuilding the database!")
            db_name = settings.DATABASES["default"]["NAME"]
            self.do_destroy_and_rebuild_database(db_name)
        elif options["import_into_nonempty"]:
            print(
                "NOTE: The argument 'import_into_nonempty' is now the default behavior."
            )

        allow_reserved_subdomain = False

        if options["allow_reserved_subdomain"]:
            allow_reserved_subdomain = True

        try:
            check_subdomain_available(subdomain, allow_reserved_subdomain)
        except ValidationError:
            raise CommandError(
                "Subdomain reserved: pass --allow-reserved-subdomain to use.")

        paths = []
        for path in options["export_paths"]:
            path = os.path.realpath(os.path.expanduser(path))
            if not os.path.exists(path):
                raise CommandError(f"Directory not found: '{path}'")
            if not os.path.isdir(path):
                raise CommandError(
                    "Export file should be folder; if it's a tarball, please unpack it first."
                )
            paths.append(path)

        for path in paths:
            print(f"Processing dump: {path} ...")
            realm = do_import_realm(path, subdomain, num_processes)
            print("Checking the system bots.")
            do_import_system_bots(realm)
Beispiel #9
0
def support(
    request: HttpRequest,
    realm_id: Optional[int] = REQ(default=None, converter=to_non_negative_int),
    plan_type: Optional[int] = REQ(default=None,
                                   converter=to_non_negative_int),
    discount: Optional[Decimal] = REQ(default=None, converter=to_decimal),
    new_subdomain: Optional[str] = REQ(default=None),
    status: Optional[str] = REQ(
        default=None, str_validator=check_string_in(VALID_STATUS_VALUES)),
    billing_method: Optional[str] = REQ(
        default=None, str_validator=check_string_in(VALID_BILLING_METHODS)),
    sponsorship_pending: Optional[bool] = REQ(default=None,
                                              json_validator=check_bool),
    approve_sponsorship: Optional[bool] = REQ(default=None,
                                              json_validator=check_bool),
    downgrade_method: Optional[str] = REQ(
        default=None, str_validator=check_string_in(VALID_DOWNGRADE_METHODS)),
    scrub_realm: Optional[bool] = REQ(default=None, json_validator=check_bool),
    query: Optional[str] = REQ("q", default=None),
    org_type: Optional[int] = REQ(default=None, converter=to_non_negative_int),
) -> HttpResponse:
    context: Dict[str, Any] = {}

    if "success_message" in request.session:
        context["success_message"] = request.session["success_message"]
        del request.session["success_message"]

    if settings.BILLING_ENABLED and request.method == "POST":
        # We check that request.POST only has two keys in it: The
        # realm_id and a field to change.
        keys = set(request.POST.keys())
        if "csrfmiddlewaretoken" in keys:
            keys.remove("csrfmiddlewaretoken")
        if len(keys) != 2:
            raise JsonableError(_("Invalid parameters"))

        realm = Realm.objects.get(id=realm_id)

        acting_user = request.user
        assert isinstance(acting_user, UserProfile)
        if plan_type is not None:
            current_plan_type = realm.plan_type
            do_change_plan_type(realm, plan_type, acting_user=acting_user)
            msg = f"Plan type of {realm.string_id} changed from {get_plan_name(current_plan_type)} to {get_plan_name(plan_type)} "
            context["success_message"] = msg
        elif org_type is not None:
            current_realm_type = realm.org_type
            do_change_realm_org_type(realm, org_type, acting_user=acting_user)
            msg = f"Org type of {realm.string_id} changed from {get_org_type_display_name(current_realm_type)} to {get_org_type_display_name(org_type)} "
            context["success_message"] = msg
        elif discount is not None:
            current_discount = get_discount_for_realm(realm) or 0
            attach_discount_to_realm(realm, discount, acting_user=acting_user)
            context[
                "success_message"] = f"Discount of {realm.string_id} changed to {discount}% from {current_discount}%."
        elif new_subdomain is not None:
            old_subdomain = realm.string_id
            try:
                check_subdomain_available(new_subdomain)
            except ValidationError as error:
                context["error_message"] = error.message
            else:
                do_change_realm_subdomain(realm,
                                          new_subdomain,
                                          acting_user=acting_user)
                request.session[
                    "success_message"] = f"Subdomain changed from {old_subdomain} to {new_subdomain}"
                return HttpResponseRedirect(
                    reverse("support") + "?" + urlencode({"q": new_subdomain}))
        elif status is not None:
            if status == "active":
                do_send_realm_reactivation_email(realm,
                                                 acting_user=acting_user)
                context[
                    "success_message"] = f"Realm reactivation email sent to admins of {realm.string_id}."
            elif status == "deactivated":
                do_deactivate_realm(realm, acting_user=acting_user)
                context["success_message"] = f"{realm.string_id} deactivated."
        elif billing_method is not None:
            if billing_method == "send_invoice":
                update_billing_method_of_current_plan(
                    realm, charge_automatically=False, acting_user=acting_user)
                context[
                    "success_message"] = f"Billing method of {realm.string_id} updated to pay by invoice."
            elif billing_method == "charge_automatically":
                update_billing_method_of_current_plan(
                    realm, charge_automatically=True, acting_user=acting_user)
                context[
                    "success_message"] = f"Billing method of {realm.string_id} updated to charge automatically."
        elif sponsorship_pending is not None:
            if sponsorship_pending:
                update_sponsorship_status(realm, True, acting_user=acting_user)
                context[
                    "success_message"] = f"{realm.string_id} marked as pending sponsorship."
            else:
                update_sponsorship_status(realm,
                                          False,
                                          acting_user=acting_user)
                context[
                    "success_message"] = f"{realm.string_id} is no longer pending sponsorship."
        elif approve_sponsorship:
            do_approve_sponsorship(realm, acting_user=acting_user)
            context[
                "success_message"] = f"Sponsorship approved for {realm.string_id}"
        elif downgrade_method is not None:
            if downgrade_method == "downgrade_at_billing_cycle_end":
                downgrade_at_the_end_of_billing_cycle(realm)
                context[
                    "success_message"] = f"{realm.string_id} marked for downgrade at the end of billing cycle"
            elif downgrade_method == "downgrade_now_without_additional_licenses":
                downgrade_now_without_creating_additional_invoices(realm)
                context[
                    "success_message"] = f"{realm.string_id} downgraded without creating additional invoices"
            elif downgrade_method == "downgrade_now_void_open_invoices":
                downgrade_now_without_creating_additional_invoices(realm)
                voided_invoices_count = void_all_open_invoices(realm)
                context[
                    "success_message"] = f"{realm.string_id} downgraded and voided {voided_invoices_count} open invoices"
        elif scrub_realm:
            do_scrub_realm(realm, acting_user=acting_user)
            context["success_message"] = f"{realm.string_id} scrubbed."

    if query:
        key_words = get_invitee_emails_set(query)

        users = set(UserProfile.objects.filter(delivery_email__in=key_words))
        realms = set(Realm.objects.filter(string_id__in=key_words))

        for key_word in key_words:
            try:
                URLValidator()(key_word)
                parse_result = urllib.parse.urlparse(key_word)
                hostname = parse_result.hostname
                assert hostname is not None
                if parse_result.port:
                    hostname = f"{hostname}:{parse_result.port}"
                subdomain = get_subdomain_from_hostname(hostname)
                try:
                    realms.add(get_realm(subdomain))
                except Realm.DoesNotExist:
                    pass
            except ValidationError:
                users.update(
                    UserProfile.objects.filter(full_name__iexact=key_word))

        for realm in realms:
            realm.customer = get_customer_by_realm(realm)

            current_plan = get_current_plan_by_realm(realm)
            if current_plan is not None:
                new_plan, last_ledger_entry = make_end_of_cycle_updates_if_needed(
                    current_plan, timezone_now())
                if last_ledger_entry is not None:
                    if new_plan is not None:
                        realm.current_plan = new_plan
                    else:
                        realm.current_plan = current_plan
                    realm.current_plan.licenses = last_ledger_entry.licenses
                    realm.current_plan.licenses_used = get_latest_seat_count(
                        realm)

        # full_names can have , in them
        users.update(UserProfile.objects.filter(full_name__iexact=query))

        context["users"] = users
        context["realms"] = realms

        confirmations: List[Dict[str, Any]] = []

        preregistration_users = PreregistrationUser.objects.filter(
            email__in=key_words)
        confirmations += get_confirmations(
            [
                Confirmation.USER_REGISTRATION, Confirmation.INVITATION,
                Confirmation.REALM_CREATION
            ],
            preregistration_users,
            hostname=request.get_host(),
        )

        multiuse_invites = MultiuseInvite.objects.filter(realm__in=realms)
        confirmations += get_confirmations([Confirmation.MULTIUSE_INVITE],
                                           multiuse_invites)

        confirmations += get_confirmations([Confirmation.REALM_REACTIVATION],
                                           [realm.id for realm in realms])

        context["confirmations"] = confirmations

    def get_realm_owner_emails_as_string(realm: Realm) -> str:
        return ", ".join(realm.get_human_owner_users().order_by(
            "delivery_email").values_list("delivery_email", flat=True))

    def get_realm_admin_emails_as_string(realm: Realm) -> str:
        return ", ".join(
            realm.get_human_admin_users(include_realm_owners=False).order_by(
                "delivery_email").values_list("delivery_email", flat=True))

    context[
        "get_realm_owner_emails_as_string"] = get_realm_owner_emails_as_string
    context[
        "get_realm_admin_emails_as_string"] = get_realm_admin_emails_as_string
    context["get_discount_for_realm"] = get_discount_for_realm
    context["get_org_type_display_name"] = get_org_type_display_name
    context["realm_icon_url"] = realm_icon_url
    context["Confirmation"] = Confirmation
    context["sorted_realm_types"] = sorted(Realm.ORG_TYPES.values(),
                                           key=lambda d: d["display_order"])

    return render(request, "analytics/support.html", context=context)
def do_convert_data(slack_zip_file: str, realm_subdomain: str,
                    output_dir: str) -> None:
    check_subdomain_available(realm_subdomain)
    slack_data_dir = slack_zip_file.replace('.zip', '')
    if not os.path.exists(slack_data_dir):
        os.makedirs(slack_data_dir)
    subprocess.check_call(
        ['unzip', '-q', slack_zip_file, '-d', slack_data_dir])
    # with zipfile.ZipFile(slack_zip_file, 'r') as zip_ref:
    #     zip_ref.extractall(slack_data_dir)

    # TODO fetch realm config from zulip config
    DOMAIN_NAME = "zulipchat.com"

    REALM_ID = get_model_id(Realm, 'zerver_realm', 1)
    NOW = float(timezone_now().timestamp())

    script_path = os.path.dirname(os.path.abspath(__file__)) + '/'
    fixtures_path = script_path + '../fixtures/'
    zerver_realm_skeleton = json.load(
        open(fixtures_path + 'zerver_realm_skeleton.json'))
    zerver_realm_skeleton[0]['id'] = REALM_ID

    zerver_realm_skeleton[0][
        'string_id'] = realm_subdomain  # subdomain / short_name of realm
    zerver_realm_skeleton[0]['name'] = realm_subdomain
    zerver_realm_skeleton[0]['date_created'] = NOW

    realm = dict(
        zerver_client=[{
            "name": "populate_db",
            "id": 1
        }, {
            "name": "website",
            "id": 2
        }, {
            "name": "API",
            "id": 3
        }],
        zerver_userpresence=
        [],  # shows last logged in data, which is not available in slack
        zerver_userprofile_mirrordummy=[],
        zerver_realmdomain=[{
            "realm": REALM_ID,
            "allow_subdomains": False,
            "domain": DOMAIN_NAME,
            "id": REALM_ID
        }],
        zerver_useractivity=[],
        zerver_realm=zerver_realm_skeleton,
        zerver_huddle=[],
        zerver_userprofile_crossrealm=[],
        zerver_useractivityinterval=[],
        zerver_realmfilter=[],
        zerver_realmemoji=[])

    zerver_userprofile, added_users = users_to_zerver_userprofile(
        slack_data_dir, REALM_ID, int(NOW), DOMAIN_NAME)
    realm['zerver_userprofile'] = zerver_userprofile

    channels_to_zerver_stream_fields = channels_to_zerver_stream(
        slack_data_dir, REALM_ID, added_users, zerver_userprofile)
    # See https://zulipchat.com/help/set-default-streams-for-new-users
    # for documentation on zerver_defaultstream
    realm['zerver_defaultstream'] = channels_to_zerver_stream_fields[0]
    realm['zerver_stream'] = channels_to_zerver_stream_fields[1]
    realm['zerver_subscription'] = channels_to_zerver_stream_fields[3]
    realm['zerver_recipient'] = channels_to_zerver_stream_fields[4]
    added_channels = channels_to_zerver_stream_fields[2]
    added_recipient = channels_to_zerver_stream_fields[5]

    # IO realm.json
    realm_file = output_dir + '/realm.json'
    json.dump(realm, open(realm_file, 'w'))

    # now for message.json
    message_json = {}
    zerver_message = []  # type: List[ZerverFieldsT]
    zerver_usermessage = []  # type: List[ZerverFieldsT]
    zerver_attachment = []  # type: List[ZerverFieldsT]

    print('######### IMPORTING MESSAGES STARTED #########\n')
    # To pre-compute the total number of messages and usermessages
    total_messages = 0
    total_usermessages = 0
    for channel in added_channels.keys():
        tm, tum = get_total_messages_and_usermessages(
            slack_data_dir, channel, zerver_userprofile,
            realm['zerver_subscription'], added_recipient)
        total_messages += tm
        total_usermessages += tum
    message_id_count = get_model_id(Message, 'zerver_message', total_messages)
    usermessage_id_count = get_model_id(UserMessage, 'zerver_usermessage',
                                        total_usermessages)

    constants = [slack_data_dir, REALM_ID]
    for channel in added_channels.keys():
        message_id = len(
            zerver_message) + message_id_count  # For the id of the messages
        usermessage_id = len(zerver_usermessage) + usermessage_id_count
        id_list = [message_id, usermessage_id]
        zm, zum = channel_message_to_zerver_message(
            constants, channel, added_users, added_recipient,
            zerver_userprofile, realm['zerver_subscription'], id_list)
        zerver_message += zm
        zerver_usermessage += zum
    print('######### IMPORTING MESSAGES FINISHED #########\n')

    message_json['zerver_message'] = zerver_message
    message_json['zerver_usermessage'] = zerver_usermessage
    # IO message.json
    message_file = output_dir + '/messages-000001.json'
    json.dump(message_json, open(message_file, 'w'))

    # IO avatar records
    avatar_records_file = output_dir + '/avatars/records.json'
    os.makedirs(output_dir + '/avatars', exist_ok=True)
    json.dump([], open(avatar_records_file, 'w'))

    # IO uploads TODO
    uploads_records_file = output_dir + '/uploads/records.json'
    os.makedirs(output_dir + '/uploads', exist_ok=True)
    json.dump([], open(uploads_records_file, 'w'))

    # IO attachments
    attachment_file = output_dir + '/attachment.json'
    attachment = {"zerver_attachment": zerver_attachment}
    json.dump(attachment, open(attachment_file, 'w'))

    # remove slack dir
    rm_tree(slack_data_dir)
    subprocess.check_call(
        ["tar", "-czf", output_dir + '.tar.gz', output_dir, '-P'])

    print('######### DATA CONVERSION FINISHED #########\n')
    print("Zulip data dump created at %s" % (output_dir))
    sys.exit(0)
def do_convert_data(slack_zip_file: str, realm_subdomain: str, output_dir: str) -> None:
    check_subdomain_available(realm_subdomain)
    slack_data_dir = slack_zip_file.replace('.zip', '')
    zip_file_dir = os.path.dirname(slack_data_dir)
    subprocess.check_call(['unzip', '-q', slack_zip_file, '-d', zip_file_dir])
    # with zipfile.ZipFile(slack_zip_file, 'r') as zip_ref:
    #     zip_ref.extractall(slack_data_dir)

    # TODO fetch realm config from zulip config
    DOMAIN_NAME = "zulipchat.com"

    REALM_ID = get_model_id(Realm)
    NOW = float(timezone_now().timestamp())

    script_path = os.path.dirname(os.path.abspath(__file__)) + '/'
    fixtures_path = script_path + '../fixtures/'
    zerver_realm_skeleton = json.load(open(fixtures_path + 'zerver_realm_skeleton.json'))
    zerver_realm_skeleton[0]['id'] = REALM_ID

    zerver_realm_skeleton[0]['string_id'] = realm_subdomain  # subdomain / short_name of realm
    zerver_realm_skeleton[0]['name'] = realm_subdomain
    zerver_realm_skeleton[0]['date_created'] = NOW

    realm = dict(zerver_client=[{"name": "populate_db", "id": 1},
                                {"name": "website", "id": 2},
                                {"name": "API", "id": 3}],
                 zerver_userpresence=[],  # shows last logged in data, which is not available in slack
                 zerver_userprofile_mirrordummy=[],
                 zerver_realmdomain=[{"realm": REALM_ID,
                                      "allow_subdomains": False,
                                      "domain": DOMAIN_NAME,
                                      "id": REALM_ID}],
                 zerver_useractivity=[],
                 zerver_realm=zerver_realm_skeleton,
                 zerver_huddle=[],
                 zerver_userprofile_crossrealm=[],
                 zerver_useractivityinterval=[],
                 zerver_realmfilter=[],
                 zerver_realmemoji=[])

    zerver_userprofile, added_users = users_to_zerver_userprofile(slack_data_dir,
                                                                  REALM_ID,
                                                                  int(NOW),
                                                                  DOMAIN_NAME)
    realm['zerver_userprofile'] = zerver_userprofile

    channels_to_zerver_stream_fields = channels_to_zerver_stream(slack_data_dir,
                                                                 REALM_ID,
                                                                 added_users,
                                                                 zerver_userprofile)
    # See https://zulipchat.com/help/set-default-streams-for-new-users
    # for documentation on zerver_defaultstream
    realm['zerver_defaultstream'] = channels_to_zerver_stream_fields[0]
    realm['zerver_stream'] = channels_to_zerver_stream_fields[1]
    realm['zerver_subscription'] = channels_to_zerver_stream_fields[3]
    realm['zerver_recipient'] = channels_to_zerver_stream_fields[4]
    added_channels = channels_to_zerver_stream_fields[2]
    added_recipient = channels_to_zerver_stream_fields[5]

    # IO realm.json
    realm_file = output_dir + '/realm.json'
    json.dump(realm, open(realm_file, 'w'))

    # now for message.json
    message_json = {}
    zerver_message = []  # type: List[ZerverFieldsT]
    zerver_usermessage = []  # type: List[ZerverFieldsT]
    zerver_attachment = []  # type: List[ZerverFieldsT]

    constants = [slack_data_dir, REALM_ID]
    message_id_count = get_model_id(Message)
    usermessage_id_count = get_model_id(UserMessage)

    print('######### IMPORTING MESSAGES STARTED #########\n')
    for channel in added_channels.keys():
        message_id = len(zerver_message) + message_id_count  # For the id of the messages
        usermessage_id = len(zerver_usermessage) + usermessage_id_count
        id_list = [message_id, usermessage_id]
        zm, zum = channel_message_to_zerver_message(constants, channel,
                                                    added_users, added_recipient,
                                                    zerver_userprofile,
                                                    realm['zerver_subscription'],
                                                    id_list)
        zerver_message += zm
        zerver_usermessage += zum
    print('######### IMPORTING MESSAGES FINISHED #########\n')

    message_json['zerver_message'] = zerver_message
    message_json['zerver_usermessage'] = zerver_usermessage
    # IO message.json
    message_file = output_dir + '/messages-000001.json'
    json.dump(message_json, open(message_file, 'w'))

    # IO avatar records
    avatar_records_file = output_dir + '/avatars/records.json'
    os.makedirs(output_dir + '/avatars', exist_ok=True)
    json.dump([], open(avatar_records_file, 'w'))

    # IO uploads TODO
    uploads_records_file = output_dir + '/uploads/records.json'
    os.makedirs(output_dir + '/uploads', exist_ok=True)
    json.dump([], open(uploads_records_file, 'w'))

    # IO attachments
    attachment_file = output_dir + '/attachment.json'
    attachment = {"zerver_attachment": zerver_attachment}
    json.dump(attachment, open(attachment_file, 'w'))

    # remove slack dir
    rm_tree(slack_data_dir)
    subprocess.check_call(["tar", "-czf", output_dir + '.tar.gz', output_dir, '-P'])

    print('######### DATA CONVERSION FINISHED #########\n')
    print("Zulip data dump created at %s" % (output_dir))
    print("Import Command: ./manage.py import --destroy-rebuild-database %s\n" % (output_dir))
    sys.exit(0)
def do_convert_data(slack_zip_file: str, realm_subdomain: str, output_dir: str,
                    token: str) -> None:
    check_subdomain_available(realm_subdomain)

    domain_name = settings.EXTERNAL_HOST

    slack_data_dir = slack_zip_file.replace('.zip', '')
    if not os.path.exists(slack_data_dir):
        os.makedirs(slack_data_dir)

    os.makedirs(output_dir, exist_ok=True)
    # output directory should be empty initially
    if os.listdir(output_dir):
        raise Exception('Output directory should be empty!')

    subprocess.check_call(
        ['unzip', '-q', slack_zip_file, '-d', slack_data_dir])
    # with zipfile.ZipFile(slack_zip_file, 'r') as zip_ref:
    #     zip_ref.extractall(slack_data_dir)

    script_path = os.path.dirname(os.path.abspath(__file__)) + '/'
    fixtures_path = script_path + '../fixtures/'

    realm_id = 0

    # We get the user data from the legacy token method of slack api, which is depreciated
    # but we use it as the user email data is provided only in this method
    user_list = get_slack_api_data(token, "https://slack.com/api/users.list",
                                   "members")
    # Get custom emoji from slack api
    custom_emoji_list = get_slack_api_data(token,
                                           "https://slack.com/api/emoji.list",
                                           "emoji")

    realm, added_users, added_recipient, added_channels, avatar_list, \
        emoji_url_map = slack_workspace_to_realm(domain_name, realm_id, user_list,
                                                 realm_subdomain, fixtures_path,
                                                 slack_data_dir, custom_emoji_list)

    message_json, uploads_list, zerver_attachment = convert_slack_workspace_messages(
        slack_data_dir, user_list, realm_id, added_users, added_recipient,
        added_channels, realm, realm['zerver_realmemoji'], domain_name)

    emoji_folder = os.path.join(output_dir, 'emoji')
    os.makedirs(emoji_folder, exist_ok=True)
    emoji_records = process_emojis(realm['zerver_realmemoji'], emoji_folder,
                                   emoji_url_map)

    avatar_folder = os.path.join(output_dir, 'avatars')
    avatar_realm_folder = os.path.join(avatar_folder, str(realm_id))
    os.makedirs(avatar_realm_folder, exist_ok=True)
    avatar_records = process_avatars(avatar_list, avatar_folder, realm_id)

    uploads_folder = os.path.join(output_dir, 'uploads')
    os.makedirs(os.path.join(uploads_folder, str(realm_id)), exist_ok=True)
    uploads_records = process_uploads(uploads_list, uploads_folder)
    attachment = {"zerver_attachment": zerver_attachment}

    # IO realm.json
    create_converted_data_files(realm, output_dir, '/realm.json')
    # IO message.json
    create_converted_data_files(message_json, output_dir,
                                '/messages-000001.json')
    # IO emoji records
    create_converted_data_files(emoji_records, output_dir,
                                '/emoji/records.json')
    # IO avatar records
    create_converted_data_files(avatar_records, output_dir,
                                '/avatars/records.json')
    # IO uploads TODO
    create_converted_data_files(uploads_records, output_dir,
                                '/uploads/records.json')
    # IO attachments
    create_converted_data_files(attachment, output_dir, '/attachment.json')

    # remove slack dir
    rm_tree(slack_data_dir)
    subprocess.check_call(
        ["tar", "-czf", output_dir + '.tar.gz', output_dir, '-P'])

    logging.info('######### DATA CONVERSION FINISHED #########\n')
    logging.info("Zulip data dump created at %s" % (output_dir))