Пример #1
0
def main():
    logger = get_logger('get_users_from_zulip',
                        get_users_from_zulip['logging_level'])

    data_path = path_data_directory / 'get_users_from_zulip'
    data_path.mkdir(parents=True, exist_ok=True)

    try:
        # client = get_client()
        #
        # users = get_all_users(client)['members']
        # users.sort(key=lambda x: x['user_id'])
        #
        # with open(data_path / 'users.pickle', 'wb') as file:
        #     pickle.dump(users, file)
        with open(data_path / 'users.pickle', 'rb') as file:
            users = pickle.load(file)

        [logger.debug(x) for x in users]

        with open(data_path / 'users.csv', 'w', newline='') as csvfile:
            fieldnames = [
                'avatar_url', 'is_admin', 'full_name', 'is_guest', 'bot_type',
                'is_bot', 'email', 'is_active', 'bot_owner', 'user_id'
            ]
            writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
            writer.writeheader()

            for u in users:
                writer.writerow(u)

    except Exception as exception:
        logger.error(exception, exc_info=True)
def connection_manager(event, context):
    log = get_logger('socket_manager_logger', event)
    connection_id = event["requestContext"].get("connectionId")

    if event["requestContext"]["eventType"] == CONNECT:
        log.info(f'Connect requested with id: {connection_id}')
        return ok_response()

    elif event["requestContext"]["eventType"] == DISCONNECT:
        log.info(f'Disconnect requested from {connection_id}')
        return ok_response()
    else:
        log.error(
            f'Connection manager received unrecognized eventType: {event}')
        return generate_error_response("unrecognized socket action",
                                       HTTPStatus.BAD_REQUEST)
Пример #3
0
def main():
    logger = get_logger('get_users_from_google',
                        get_users_from_google['logging_level'])

    data_path = path_data_directory / 'get_users_from_google'
    data_path.mkdir(parents=True, exist_ok=True)

    try:
        service = get_directory_service()

        # Get all students in domain filtered by query
        users = get_users_for_domain(
            service, get_users_from_google['google_domain'],
            get_users_from_google['user_filter_query'])

        with open(data_path / 'users.pickle', 'wb') as file:
            pickle.dump(users, file)
        with open(data_path / 'users.pickle', 'rb') as file:
            users = pickle.load(file)

        [logger.debug(x) for x in users]

        # Get unique orgUnitPaths from users
        groups = set()
        for user in users:
            groups.add(user['orgUnitPath'])

        with open(data_path / 'groups.pickle', 'wb') as file:
            pickle.dump(groups, file)
        with open(data_path / 'groups.pickle', 'rb') as file:
            groups = pickle.load(file)

        groups = sorted(groups)
        [logger.debug(x) for x in groups]

        with open(data_path / 'users.txt', 'w') as file:
            for group in groups:
                print(group + ':', file=file)
                for user in [x for x in users if x['orgUnitPath'] == group]:
                    print('    ' + json.dumps(user['primaryEmail']), file=file)
    except Exception as exception:
        logger.error(exception, exc_info=True)
Пример #4
0
def main():
    logger = get_logger('sync_groups_and_zulip', sync_groups_and_zulip['logging_level'])

    data_path = path_data_directory / 'sync_groups_and_zulip'
    data_path.mkdir(parents=True, exist_ok=True)

    synced_users_path = data_path / 'synced_users.pickle'

    while True:
        number_of_registered_users = 0
        synced_users_dictionary_creation = False

        # Getting a list of users who have already been synced
        if synced_users_path.exists():
            logger.debug('Reading synced users from: %s', synced_users_path)
            with open(synced_users_path, 'rb') as f:
                synced_users = pickle.load(f)
        else:
            logger.info('Creating synced users dictionary')
            synced_users = dict()
            synced_users_dictionary_creation = True

        try:
            service = get_directory_service()
            client = get_client()
            client_user = get_client_user()

            # Get all Google groups of a domain
            groups = get_groups_for_domain(service, sync_groups_and_zulip['google_domain'])

            # Get all current Zulip users
            zulip_users = get_all_users(client)['members']
            zulip_user_emails = set(
                [member['email'] for member in zulip_users]
            )

            zulip_stream_names = sorted(
                [stream['name'] for stream in get_all_streams(client)['streams']]
            )

            # Get members of Google Groups and remove those who aren't registered in Zulip,
            # then create streams and invite remaining users.
            for group in groups:
                logger.debug('Group: %s', group)

                name = get_current_stream_name(logger, zulip_stream_names, group['name'])

                # Create a set for the group if it doesn't exist yet
                if group['email'] not in synced_users:
                    synced_users[group['email']] = set()

                members = get_members_for_group(service, group['id'])
                member_emails = set([member['email'] for member in members])

                # Get emails only of those who are registered in Zulip
                # plus mandatory members'
                # minus users' who have already been subscribed
                member_emails &= zulip_user_emails
                member_emails |= set(sync_groups_and_zulip['mandatory_members'])
                member_emails -= synced_users[group['email']]

                # Update synced users set
                synced_users[group['email']] |= member_emails

                member_emails = list(member_emails)

                logger.debug('Emails to register: %s', member_emails)

                if member_emails and not synced_users_dictionary_creation:
                    # Creating or updating a stream
                    result = create_stream(
                        client,
                        name,
                        group['description'],
                        member_emails,
                        False,
                        sync_groups_and_zulip['mandatory_streams']
                    )

                    # Update a user group
                    update_user_group(logger, client_user, zulip_users, group['name'],
                                      group['description'], member_emails)

                    number_of_registered_users += len(member_emails)

                    logger.debug('Result: %s', result)
        except Exception as exception:
            logger.error(exception, exc_info=True)

        logger.debug('Writing synced users to: %s', synced_users_path)
        with open(synced_users_path, 'wb') as f:
            pickle.dump(synced_users, f)

        logger.info('Update finished. Registered %s users. Sleeping for %s seconds.',
                    number_of_registered_users, sync_groups_and_zulip['sleep_time'])
        sleep(sync_groups_and_zulip['sleep_time'])
Пример #5
0
def main():
    logger = get_logger('sync_sheets_and_zulip',
                        sync_sheets_and_zulip['logging_level'])

    data_path = path_data_directory / 'sync_sheets_and_zulip'
    data_path.mkdir(parents=True, exist_ok=True)

    synced_users_path = data_path / 'synced_users.pickle'

    while True:
        number_of_registered_users = 0
        synced_users_dictionary_creation = False

        # Getting a list of users who have already been synced
        if synced_users_path.exists():
            logger.debug('Reading synced users from: %s', synced_users_path)
            with open(synced_users_path, 'rb') as f:
                synced_users = pickle.load(f)
        else:
            logger.info('Creating synced users dictionary')
            synced_users = dict()
            synced_users_dictionary_creation = True

        try:
            service = get_sheets_service()
            client = get_client()

            ranges = get_multiple_ranges(
                service, sync_sheets_and_zulip['spreadsheet_id'],
                sync_sheets_and_zulip['range_names'])

            # with open(data_path / 'ranges.pickle', 'wb') as file:
            #     pickle.dump(ranges, file)
            # with open(data_path / 'ranges.pickle', 'rb') as file:
            #     ranges = pickle.load(file)

            [logger.debug(x) for x in ranges]

            students = dict(
                zip([i[0] if i else "" for i in ranges[1]['values']],
                    [i[0] if i else "" for i in ranges[2]['values']]))

            leaders = dict(
                zip([i[0] if i else "" for i in ranges[3]['values']],
                    [i[0] if i else "" for i in ranges[4]['values']]))

            zulip_user_emails = set([
                member['email'] for member in get_all_users(client)['members']
            ])

            logger.debug(zulip_user_emails)

            zulip_stream_names = sorted([
                stream['name'] for stream in get_all_streams(client)['streams']
            ])

            logger.debug(zulip_stream_names)

            groups = []
            for group in ranges[0]['values']:
                id = group[0].split(" ", 1)[0]
                description = group[0].split(" ", 1)[1]

                if id not in synced_users:
                    synced_users[id] = set()

                try:
                    name = get_current_stream_name(logger, zulip_stream_names,
                                                   id)
                except ValueError:
                    continue

                member_emails = set()

                # Leader email
                if leaders[group[1]] in zulip_user_emails:
                    member_emails.add(leaders[group[1]])

                # Member emails
                for i in range(2, len(group)):
                    if students[group[i]] in zulip_user_emails:
                        member_emails.add(students[group[i]])

                # Mandatory emails
                member_emails |= set(
                    sync_sheets_and_zulip['mandatory_members'])

                # Synced users
                member_emails -= synced_users[id]
                synced_users[id] |= member_emails

                member_emails = list(member_emails)

                logger.debug('Name: %s - Description: %s - Users: %s', name,
                             description, member_emails)

                if not synced_users_dictionary_creation:
                    result = create_stream(client, name, description,
                                           member_emails, True)
                    number_of_registered_users += len(member_emails)

                    logger.debug('Result: %s', result)
        except Exception as exception:
            logger.error(exception, exc_info=True)

        logger.debug('Writing synced users to: %s', synced_users_path)
        with open(synced_users_path, 'wb') as f:
            pickle.dump(synced_users, f)

        logger.info(
            'Update finished. Registered %s users. Sleeping for %s seconds.',
            number_of_registered_users, sync_sheets_and_zulip['sleep_time'])
        sleep(sync_sheets_and_zulip['sleep_time'])
Пример #6
0
def main():
    logger = get_logger('sync_trello_and_zulip',
                        sync_trello_and_zulip['logging_level'])

    data_path = path_data_directory / 'sync_trello_and_zulip'
    data_path.mkdir(parents=True, exist_ok=True)

    synced_users_path = data_path / 'synced_users.pickle'

    while True:
        number_of_registered_users = 0
        synced_users_dictionary_creation = False

        # Getting a list of users who have already been synced
        if synced_users_path.exists():
            logger.debug('Reading synced users from: %s', synced_users_path)
            with open(synced_users_path, 'rb') as f:
                synced_users = pickle.load(f)
        else:
            logger.info('Creating synced users dictionary')
            synced_users = dict()
            synced_users_dictionary_creation = True

        try:
            zulip_client = zulip_api.get_client()
            trello_client = trello_api.get_client()

            trello_api.get_boards_and_users(trello_client)

            # # Get all current Zulip users
            # zulip_user_emails = set(
            #     [member['email'] for member in zulip_api.get_all_users(zulip_client)['members']]
            # )
            #
            # # Get members of Google Groups and remove those who aren't registered in Zulip,
            # # then create streams and invite remaining users.
            # for group in groups:
            #     logger.debug('Group: %s', group)
            #
            #     # Create a set for the group if it doesn't exist yet
            #     if group['email'] not in synced_users:
            #         synced_users[group['email']] = set()
            #
            #     members = get_members_for_group(service, group['id'])
            #     member_emails = set([member['email'] for member in members])
            #
            #     logger.debug('Group members\' emails: %s', member_emails)
            #
            #     # Get emails only of those who are registered in Zulip
            #     # plus mandatory members'
            #     # minus users' who have already been subscribed
            #     member_emails &= zulip_user_emails
            #     member_emails |= set(sync_trello_and_zulip['mandatory_members'])
            #     member_emails -= synced_users[group['email']]
            #
            #     # Update synced users set
            #     synced_users[group['email']] |= member_emails
            #
            #     member_emails = list(member_emails)
            #
            #     logger.debug('Emails to register: %s', member_emails)
            #
            #     if not synced_users_dictionary_creation:
            #         result = zulip_api.create_stream(
            #             zulip_client,
            #             group['name'],
            #             group['description'],
            #             member_emails,
            #             True
            #         )
            #         number_of_registered_users += len(member_emails)
            #
            #         logger.debug('Result: %s', result)
        except Exception as exception:
            logger.error(exception, exc_info=True)

        logger.debug('Writing synced users to: %s', synced_users_path)
        with open(synced_users_path, 'wb') as f:
            pickle.dump(synced_users, f)

        logger.info(
            'Update finished. Registered %s users. Sleeping for %s seconds.',
            number_of_registered_users, sync_trello_and_zulip['sleep_time'])
        sleep(sync_trello_and_zulip['sleep_time'])
Пример #7
0
def main():
    logger = get_logger('create_google_groups',
                        create_google_groups['logging_level'])

    data_path = path_data_directory / 'create_google_groups'
    data_path.mkdir(parents=True, exist_ok=True)

    try:
        service = get_directory_service()

        # Get all students in domain filtered by query
        users = get_users_for_domain(service,
                                     create_google_groups['google_domain'],
                                     create_google_groups['user_filter_query'])

        with open(data_path / 'users.pickle', 'wb') as file:
            pickle.dump(users, file)
        with open(data_path / 'users.pickle', 'rb') as file:
            users = pickle.load(file)

        # Get unique orgUnitPaths from users
        groups = set()
        for user in users:
            groups.add(user['orgUnitPath'])

        with open(data_path / 'groups.pickle', 'wb') as file:
            pickle.dump(groups, file)
        with open(data_path / 'groups.pickle', 'rb') as file:
            groups = pickle.load(file)

        [logger.debug(x) for x in groups]

        # Get group names from orgUnitPaths
        group_names = []
        for group in groups:
            group_names.append(group.split("/")[-1])

        # Filter out existing groups
        existing_groups = get_groups_for_domain(
            service, create_google_groups['google_domain'])

        for group in existing_groups:
            if group['name'] in group_names:
                group_names.remove(group['name'])

        with open(data_path / 'new_groups.pickle', 'wb') as file:
            pickle.dump(group_names, file)
        with open(data_path / 'new_groups.pickle', 'rb') as file:
            group_names = pickle.load(file)

        [logger.debug(x) for x in group_names]

        # Create groups
        group_results = []
        for group_name in group_names:
            email = (translit(group_name, "ru", reversed=True)).lower() \
                    + "@" \
                    + create_google_groups['google_domain']
            group_results.append(create_group(service, email, group_name, ""))

        group_results.sort(key=lambda x: x['name'])
        users.sort(key=lambda x: x['orgUnitPath'])

        # A client should wait 1 minute before adding users or sending messages to a new group
        sleep(60)

        with open(data_path / 'group_results.pickle', 'wb') as file:
            pickle.dump(group_results, file)
        with open(data_path / 'group_results.pickle', 'rb') as file:
            group_results = pickle.load(file)

        [logger.debug(x) for x in group_results]

        group_users = {}
        for group_result in group_results:
            group_users[group_result['email']] = []

            for user in users:
                if group_result['name'] in user['orgUnitPath']:
                    group_users[group_result['email']].append(
                        [user['primaryEmail'], 'MEMBER'])

            # Mandatory user
            group_users[group_result['email']] += create_google_groups[
                'mandatory_members']

        with open(data_path / 'group_users.pickle', 'wb') as file:
            pickle.dump(group_users, file)
        with open(data_path / 'group_users.pickle', 'rb') as file:
            group_users = pickle.load(file)

        [logger.debug(x) for x in group_users]

        # Add users to groups
        user_results = []
        for group in group_users:
            for group_user in group_users[group]:
                user_results.append(
                    add_user_to_group(service, group, group_user[0],
                                      group_user[1]))

        with open(data_path / 'user_results.pickle', 'wb') as file:
            pickle.dump(user_results, file)
        with open(data_path / 'user_results.pickle', 'rb') as file:
            user_results = pickle.load(file)

        [logger.debug(x) for x in user_results]

        # # -----
        # # Might need rework
        # # -----
        #
        # service = get_groupsettings_service()
        #
        # group_emails = []
        # for group_name in group_names:
        #     group_emails.append(
        #         (translit(group_name, "ru", reversed=True)).lower() \
        #                     + "@" \
        #                     + create_google_groups['google_domain']
        #     )
        #
        # with open(data_path / 'group_emails.pickle', 'wb') as file:
        #     pickle.dump(group_emails, file)
        # with open(data_path / 'group_emails.pickle', 'rb') as file:
        #     group_emails = pickle.load(file)
        #
        # [logger.debug(x) for x in group_emails]
        #
        # settings_results = []
        # for group_email in group_emails:
        #     settings_results.append(
        #         update_group_settings(
        #             service,
        #             group_email,
        #             {
        #                 "whoCanJoin": "INVITED_CAN_JOIN",
        #                 "whoCanViewMembership": "ALL_IN_DOMAIN_CAN_VIEW",
        #                 "whoCanViewGroup": "ALL_IN_DOMAIN_CAN_VIEW",
        #                 "whoCanPostMessage": "ALL_IN_DOMAIN_CAN_POST",
        #                 "isArchived": "true"
        #             }
        #         )
        #     )
        #
        # with open(data_path / 'settings_results.pickle', 'wb') as file:
        #     pickle.dump(settings_results, file)
        # with open(data_path / 'settings_results.pickle', 'rb') as file:
        #     settings_results = pickle.load(file)
        #
        # [logger.debug(x) for x in settings_results]

    except Exception as exception:
        logger.error(exception, exc_info=True)
Пример #8
0
from api.responses import generate_error_response, ok_response, generate_response
from http import HTTPStatus
from logs.logging import get_logger
import boto3
import json

log = get_logger('socket_manager_logger')

CONNECT = 'CONNECT'
DISCONNECT = 'DISCONNECT'


def connection_manager(event, context):
    connection_id = event["requestContext"].get("connectionId")

    if event["requestContext"]["eventType"] == CONNECT:
        log.info(f'Connect requested with id: {connection_id}')
        return ok_response()

    elif event["requestContext"]["eventType"] == DISCONNECT:
        log.info(f'Disconnect requested from {connection_id}')
        return ok_response()
    else:
        log.error(
            f'Connection manager received unrecognized eventType: {event}')
        return generate_error_response("unrecognized socket action",
                                       HTTPStatus.BAD_REQUEST)


def default_message(event, context):
    return generate_error_response("unrecognized socket action",
def main():
    logger = get_logger('sync_sheets_and_groups',
                        sync_sheets_and_groups['logging_level'])

    data_path = path_data_directory / 'sync_sheets_and_groups'
    data_path.mkdir(parents=True, exist_ok=True)

    synced_users_path = data_path / 'synced_users.pickle'

    while True:
        # number_of_registered_users = 0
        # synced_users_dictionary_creation = False
        #
        # # Getting a list of users who have already been synced
        # if synced_users_path.exists():
        #     logger.debug('Reading synced users from: %s', synced_users_path)
        #     with open(synced_users_path, 'rb') as f:
        #         synced_users = pickle.load(f)
        # else:
        #     logger.info('Creating synced users dictionary')
        #     synced_users = dict()
        #     synced_users_dictionary_creation = True

        try:
            service_directory = get_directory_service()
            service_sheets = get_sheets_service()

            # ranges = get_multiple_ranges(
            #     service_sheets,
            #     sync_sheets_and_groups['spreadsheet_id'],
            #     sync_sheets_and_groups['range_names']
            # )
            #
            # with open(data_path / 'ranges.pickle', 'wb') as file:
            #     pickle.dump(ranges, file)
            with open(data_path / 'ranges.pickle', 'rb') as file:
                ranges = pickle.load(file)
            #
            # [logger.debug(x) for x in ranges]

            # group_results = []
            # for group in ranges[0]['values']:
            #     group_name = group[0].split(" ", 1)[0]
            #
            #     email = (translit(group_name, "ru", reversed=True)).lower() \
            #             + "@" \
            #             + sync_sheets_and_groups['google_domain']
            #
            #     try:
            #         group_results.append(create_group(service_directory, email, group_name, ""))
            #     except googleapiclient.errors.HttpError as exception:
            #         # If group already exists among other things
            #         logger.error(exception, exc_info=False)
            #
            #     logger.debug(group_name, email)
            #
            # group_results.sort(key=lambda x: x['name'])

            # with open(data_path / 'group_results.pickle', 'wb') as file:
            #     pickle.dump(group_results, file)
            with open(data_path / 'group_results.pickle', 'rb') as file:
                group_results = pickle.load(file)
            #
            # [logger.debug(x) for x in group_results]

            created_group_names = [x['name'] for x in group_results]

            [logger.debug(x) for x in created_group_names]

            # # A client should wait 1 minute before adding users or sending messages to a new group
            # sleep(60)

            students = dict(
                zip([i[0] if i else "" for i in ranges[1]['values']],
                    [i[0] if i else "" for i in ranges[2]['values']]))

            logger.debug(students.items())

            leaders = dict(
                zip([i[0] if i else "" for i in ranges[3]['values']],
                    [i[0] if i else "" for i in ranges[4]['values']]))

            logger.debug(leaders.items())

            group_users = {}
            for group in ranges[0]['values']:
                id = group[0].split(" ", 1)[0]

                if id not in created_group_names:
                    logger.debug("Skipping group: ", id)
                    continue
                else:
                    logger.debug("Adding users to group: ", id)

                group_users[id] = []

                # Leader email
                group_users[id].append([leaders[group[1]], 'MEMBER'])

                # Member emails
                for i in range(2, len(group)):
                    group_users[id].append([students[group[i]], 'MEMBER'])

                # Mandatory user
                group_users[id] += sync_sheets_and_groups['mandatory_members']

            with open(data_path / 'group_users.pickle', 'wb') as file:
                pickle.dump(group_users, file)
            with open(data_path / 'group_users.pickle', 'rb') as file:
                group_users = pickle.load(file)

            [logger.debug(x) for x in group_users]

            # # Add users to groups
            # user_results = []
            # for group in group_users:
            #     for group_user in group_users[group]:
            #         user_results.append(
            #             add_user_to_group(service, group, group_user[0], group_user[1])
            #         )
            #
            # with open(data_path / 'user_results.pickle', 'wb') as file:
            #     pickle.dump(user_results, file)
            # with open(data_path / 'user_results.pickle', 'rb') as file:
            #     user_results = pickle.load(file)
            #
            # [logger.debug(x) for x in user_results]

            # students = dict(zip(
            #     [i[0] if i else "" for i in ranges[1]['values']],
            #     [i[0] if i else "" for i in ranges[2]['values']]
            # ))
            #
            # leaders = dict(zip(
            #     [i[0] if i else "" for i in ranges[3]['values']],
            #     [i[0] if i else "" for i in ranges[4]['values']]
            # ))

            # if id not in synced_users:
            #     synced_users[id] = set()
            #
            # member_emails = set()
            #
            # # Leader email
            # member_emails.add(
            #     leaders[group[1]]
            # )
            #
            # # Member emails
            # for i in range(2, len(group)):
            #     member_emails.add(
            #         students[group[i]]
            #     )
            #
            # # Mandatory emails
            # member_emails |= set(sync_sheets_and_groups['mandatory_members'])
            #
            # # Synced users
            # member_emails -= synced_users[id]
            # synced_users[id] |= member_emails
            #
            # member_emails = list(member_emails)
            #
            # logger.debug('Name: %s - Description: %s - Users: %s',
            #              name, description, member_emails)
            #
            # if not synced_users_dictionary_creation:
            #     # TODO
            #     number_of_registered_users += len(member_emails)
            #
            #     logger.debug('Result: %s', result)

            # # -----
            # # Might need rework
            # # -----
            #
            # service = get_groupsettings_service()
            #
            # group_emails = []
            # for group_name in group_names:
            #     group_emails.append(
            #         (translit(group_name, "ru", reversed=True)).lower() \
            #                     + "@" \
            #                     + create_google_groups['google_domain']
            #     )
            #
            # with open(data_path / 'group_emails.pickle', 'wb') as file:
            #     pickle.dump(group_emails, file)
            # with open(data_path / 'group_emails.pickle', 'rb') as file:
            #     group_emails = pickle.load(file)
            #
            # [logger.debug(x) for x in group_emails]
            #
            # settings_results = []
            # for group_email in group_emails:
            #     settings_results.append(
            #         update_group_settings(
            #             service,
            #             group_email,
            #             {
            #                 "whoCanJoin": "INVITED_CAN_JOIN",
            #                 "whoCanViewMembership": "ALL_IN_DOMAIN_CAN_VIEW",
            #                 "whoCanViewGroup": "ALL_IN_DOMAIN_CAN_VIEW",
            #                 "whoCanPostMessage": "ALL_IN_DOMAIN_CAN_POST",
            #                 "isArchived": "true"
            #             }
            #         )
            #     )
            #
            # with open(data_path / 'settings_results.pickle', 'wb') as file:
            #     pickle.dump(settings_results, file)
            # with open(data_path / 'settings_results.pickle', 'rb') as file:
            #     settings_results = pickle.load(file)
            #
            # [logger.debug(x) for x in settings_results]
        except Exception as exception:
            logger.error(exception, exc_info=True)

        # logger.debug('Writing synced users to: %s', synced_users_path)
        # with open(synced_users_path, 'wb') as f:
        #     pickle.dump(synced_users, f)
        #
        # logger.info('Update finished. Registered %s users. Sleeping for %s seconds.',
        #             number_of_registered_users, sync_sheets_and_groups['sleep_time'])
        sleep(sync_sheets_and_groups['sleep_time'])