Beispiel #1
0
    def validate(cls, value_dict):
        """Validates customization args for a rich text component.

        Raises:
          TypeError: if any customization arg is invalid.
        """
        arg_names_to_obj_classes = {}
        customization_arg_specs = cls.rich_text_component_specs[
            cls.__name__]['customization_arg_specs']
        for customization_arg_spec in customization_arg_specs:
            arg_name = '%s-with-value' % customization_arg_spec['name']
            schema = customization_arg_spec['schema']
            if schema['type'] != 'custom':
                obj_type = schema['type']
            else:
                obj_type = schema['obj_type']
            obj_class = cls.obj_types_to_obj_classes[obj_type]
            arg_names_to_obj_classes[arg_name] = obj_class

        required_attr_names = list(arg_names_to_obj_classes.keys())
        attr_names = list(value_dict.keys())

        if set(attr_names) != set(required_attr_names):
            missing_attr_names = list(
                set(required_attr_names) - set(attr_names))
            extra_attr_names = list(set(attr_names) - set(required_attr_names))
            raise Exception('Missing attributes: %s, Extra attributes: %s' %
                            (python_utils.STR(missing_attr_names),
                             python_utils.STR(extra_attr_names)))

        for arg_name in required_attr_names:
            arg_obj_class = arg_names_to_obj_classes[arg_name]
            arg_obj_class.normalize(value_dict[arg_name])
    def test_put_error_with_transient_result(self):
        docs = [{
            'id': 'doc1',
            'prop': 'val1'
        }, {
            'id': 'doc2',
            'prop': 'val2'
        }, {
            'id': 'doc3',
            'prop': 'val3'
        }]
        error = self._get_put_error(3, transient=1)
        failing_put = test_utils.FailingFunction(search.Index.put, error, 4)

        add_docs_counter = test_utils.CallCounter(
            gae_search_services.add_documents_to_index)
        put_ctx = self.swap(search.Index, 'put', failing_put)
        add_docs_ctx = self.swap(gae_search_services, 'add_documents_to_index',
                                 add_docs_counter)

        with put_ctx, add_docs_ctx:
            gae_search_services.add_documents_to_index(docs,
                                                       'my_index',
                                                       retries=5)

        self.assertEqual(add_docs_counter.times_called, 5)
        for i in python_utils.RANGE(1, 4):
            result = search.Index('my_index').get('doc' + python_utils.STR(i))
            self.assertEqual(
                result.field('prop').value, 'val' + python_utils.STR(i))
Beispiel #3
0
    def setUp(self):
        super(NextJobHandlerTest, self).setUp()

        self.exp_id = 'exp_id1'
        self.title = 'Testing Classifier storing'
        self.category = 'Test'
        interaction_id = 'TextInput'
        self.algorithm_id = feconf.INTERACTION_CLASSIFIER_MAPPING[
            interaction_id]['algorithm_id']
        self.training_data = [{
            u'answer_group_index': 1,
            u'answers': [u'a1', u'a2']
        }, {
            u'answer_group_index': 2,
            u'answers': [u'a2', u'a3']
        }]
        self.job_id = classifier_models.ClassifierTrainingJobModel.create(
            self.algorithm_id, interaction_id, self.exp_id, 1,
            datetime.datetime.utcnow(), self.training_data, 'Home',
            feconf.TRAINING_JOB_STATUS_NEW, None, 1)

        self.expected_response = {
            u'job_id': python_utils.STR(self.job_id, 'utf-8'),
            u'training_data': self.training_data,
            u'algorithm_id': python_utils.STR(self.algorithm_id, 'utf-8')
        }

        self.payload = {}
        self.payload['vm_id'] = feconf.DEFAULT_VM_ID
        secret = feconf.DEFAULT_VM_SHARED_SECRET
        self.payload['message'] = json.dumps({})
        self.payload['signature'] = classifier.generate_signature(
            secret, self.payload['message'])
Beispiel #4
0
def assign_rating_to_exploration(user_id, exploration_id, new_rating):
    """Records the rating awarded by the user to the exploration in both the
    user-specific data and exploration summary.

    This function validates the exploration id but not the user id.

    Args:
        user_id: str. The id of the user assigning the rating.
        exploration_id: str. The id of the exploration that is
            assigned a rating.
        new_rating: int. Value of assigned rating, should be between
            1 and 5 inclusive.
    """

    if not isinstance(new_rating, int):
        raise ValueError(
            'Expected the rating to be an integer, received %s' % new_rating)

    if new_rating not in ALLOWED_RATINGS:
        raise ValueError('Expected a rating 1-5, received %s.' % new_rating)

    try:
        exp_fetchers.get_exploration_by_id(exploration_id)
    except:
        raise Exception('Invalid exploration id %s' % exploration_id)

    def _update_user_rating():
        """Updates the user rating of the exploration. Returns the old rating
        before updation.
        """
        exp_user_data_model = user_models.ExplorationUserDataModel.get(
            user_id, exploration_id)
        if exp_user_data_model:
            old_rating = exp_user_data_model.rating
        else:
            old_rating = None
            exp_user_data_model = user_models.ExplorationUserDataModel.create(
                user_id, exploration_id)
        exp_user_data_model.rating = new_rating
        exp_user_data_model.rated_on = datetime.datetime.utcnow()
        exp_user_data_model.put()
        return old_rating
    old_rating = transaction_services.run_in_transaction(_update_user_rating)

    exploration_summary = exp_fetchers.get_exploration_summary_by_id(
        exploration_id)
    if not exploration_summary.ratings:
        exploration_summary.ratings = feconf.get_empty_ratings()
    exploration_summary.ratings[python_utils.STR(new_rating)] += 1
    if old_rating:
        exploration_summary.ratings[python_utils.STR(old_rating)] -= 1

    event_services.RateExplorationEventHandler.record(
        exploration_id, user_id, new_rating, old_rating)

    exploration_summary.scaled_average_rating = (
        exp_services.get_scaled_average_rating(
            exploration_summary.ratings))

    exp_services.save_exploration_summary(exploration_summary)
    def test_validate_list_values(self):
        doc1 = {'f': ['a', 'b', ['c', 'd']]}
        doc2 = {'f': ['a', 'b', 3, set([4, 5, 6])]}

        # The python_utils.STR() of list and set are passed in to ensure that we
        # mention the type the user passed in, in our error message..
        with self.assertRaisesRegexp(ValueError, python_utils.STR(list)):
            gae_search_services.add_documents_to_index([doc1], 'my_index')

        with self.assertRaisesRegexp(ValueError, python_utils.STR(set)):
            gae_search_services.add_documents_to_index([doc2], 'my_index')
Beispiel #6
0
    def test_remove_collection_from_learner_playlist(self):
        self.login(self.VIEWER_EMAIL)

        # Add collections to the learner playlist.
        learner_progress_services.add_collection_to_learner_playlist(
            self.viewer_id, self.COL_ID_1)
        learner_progress_services.add_collection_to_learner_playlist(
            self.viewer_id, self.COL_ID_2)
        self.assertEqual(
            learner_playlist_services.
            get_all_collection_ids_in_learner_playlist(  # pylint: disable=line-too-long
                self.viewer_id),
            [self.COL_ID_1, self.COL_ID_2])

        # Remove a collection.
        self.delete_json(
            python_utils.STR(
                '%s/%s/%s' %
                (feconf.LEARNER_PLAYLIST_DATA_URL,
                 constants.ACTIVITY_TYPE_COLLECTION, self.COL_ID_1)))
        self.assertEqual(
            learner_playlist_services.
            get_all_collection_ids_in_learner_playlist(  # pylint: disable=line-too-long
                self.viewer_id),
            [self.COL_ID_2])

        # Removing the same collection again has no effect.
        self.delete_json(
            python_utils.STR(
                '%s/%s/%s' %
                (feconf.LEARNER_PLAYLIST_DATA_URL,
                 constants.ACTIVITY_TYPE_COLLECTION, self.COL_ID_1)))
        self.assertEqual(
            learner_playlist_services.
            get_all_collection_ids_in_learner_playlist(  # pylint: disable=line-too-long
                self.viewer_id),
            [self.COL_ID_2])

        # Remove the second collection.
        self.delete_json(
            python_utils.STR(
                '%s/%s/%s' %
                (feconf.LEARNER_PLAYLIST_DATA_URL,
                 constants.ACTIVITY_TYPE_COLLECTION, self.COL_ID_2)))
        self.assertEqual(
            learner_playlist_services.
            get_all_collection_ids_in_learner_playlist(  # pylint: disable=line-too-long
                self.viewer_id),
            [])

        self.logout()
Beispiel #7
0
    def map(item):
        if item.deleted:
            yield (CollectionMigrationOneOffJob._DELETED_KEY, 1)
            return

        # Note: the read will bring the collection up to the newest version.
        collection = collection_services.get_collection_by_id(item.id)
        try:
            collection.validate(strict=False)
        except Exception as e:
            logging.error('Collection %s failed validation: %s' % (item.id, e))
            yield (CollectionMigrationOneOffJob._ERROR_KEY,
                   'Collection %s failed validation: %s' % (item.id, e))
            return

        # Write the new collection into the datastore if it's different from
        # the old version.
        #
        # Note: to_version really should be int, but left as str to conform
        # with legacy data.
        if item.schema_version <= feconf.CURRENT_COLLECTION_SCHEMA_VERSION:
            commit_cmds = [{
                'cmd':
                collection_domain.CMD_MIGRATE_SCHEMA_TO_LATEST_VERSION,
                'from_version':
                item.schema_version,
                'to_version':
                python_utils.STR(feconf.CURRENT_COLLECTION_SCHEMA_VERSION)
            }]
            collection_services.update_collection(
                feconf.MIGRATION_BOT_USERNAME, item.id, commit_cmds,
                'Update collection schema version to %d.' %
                (feconf.CURRENT_COLLECTION_SCHEMA_VERSION))
            yield (CollectionMigrationOneOffJob._MIGRATED_KEY, 1)
Beispiel #8
0
    def test_parse_string(self):
        parsed_str = jinja_utils.parse_string('{{test}}', {'test': 'hi'})
        self.assertEqual(parsed_str, 'hi')

        # Some parameters are missing.
        parsed_str = jinja_utils.parse_string('{{test}} and {{test2}}',
                                              {'test2': 'hi'})
        self.assertEqual(parsed_str, ' and hi')

        # All parameters are missing.
        parsed_str = jinja_utils.parse_string('{{test}} and {{test2}}', {})
        self.assertEqual(parsed_str, ' and ')

        # The string has no parameters.
        parsed_str = jinja_utils.parse_string('no params', {'param': 'hi'})
        self.assertEqual(parsed_str, 'no params')

        # Integer parameters are used.
        parsed_str = jinja_utils.parse_string('int {{i}}', {'i': 2})
        self.assertEqual(parsed_str, 'int 2')

        # Invalid input string is used.
        with self.assertRaisesRegexp(Exception,
                                     'Unable to parse string with Jinja: {{'):
            jinja_utils.parse_string('{{', {'a': 3, 'b': 0})

        # Invalid expression is used.
        parsed_str = jinja_utils.parse_string('{{ a/b }}', {'a': 1, 'b': 0})
        self.assertEqual(parsed_str,
                         python_utils.STR('[CONTENT PARSING ERROR]'))
Beispiel #9
0
 def reduce(query_model_id, stringified_user_ids):
     query_model = user_models.UserQueryModel.get(query_model_id)
     user_ids = [ast.literal_eval(v) for v in stringified_user_ids]
     query_model.user_ids = [
         python_utils.STR(user_id) for user_id in user_ids
     ]
     query_model.put()
Beispiel #10
0
    def _generate_id(cls, exp_id):
        """Generates a unique id for the training job of the form
        {{exp_id}}.{{random_hash_of_16_chars}}

        Args:
            exp_id: str. ID of the exploration.

        Returns:
            ID of the new ClassifierTrainingJobModel instance.

        Raises:
            Exception: The id generator for ClassifierTrainingJobModel is
            producing too many collisions.
        """

        for _ in python_utils.RANGE(base_models.MAX_RETRIES):
            new_id = '%s.%s' % (exp_id,
                                utils.convert_to_hash(
                                    python_utils.STR(
                                        utils.get_random_int(
                                            base_models.RAND_RANGE)),
                                    base_models.ID_LENGTH))
            if not cls.get_by_id(new_id):
                return new_id

        raise Exception(
            'The id generator for ClassifierTrainingJobModel is producing '
            'too many collisions.')
    def test_delete_error_with_transient_result(self):
        error = self._get_delete_error(3, transient=1)
        failing_delete = test_utils.FailingFunction(search.Index.delete, error,
                                                    4)
        delete_docs_counter = test_utils.CallCounter(
            gae_search_services.delete_documents_from_index)
        index = search.Index('my_index')
        for i in python_utils.RANGE(3):
            index.put(
                search.Document(
                    doc_id='d' + python_utils.STR(i),
                    fields=[search.TextField(name='prop', value='value')]))

        delete_ctx = self.swap(search.Index, 'delete', failing_delete)
        delete_docs_ctx = self.swap(gae_search_services,
                                    'delete_documents_from_index',
                                    delete_docs_counter)
        with delete_ctx, delete_docs_ctx:
            gae_search_services.delete_documents_from_index(['d0', 'd1', 'd2'],
                                                            'my_index',
                                                            retries=5)

        self.assertEqual(delete_docs_counter.times_called, 5)
        for i in python_utils.RANGE(3):
            result = search.Index('my_index').get(
                bytes('doc' + python_utils.convert_to_bytes(i)))
            self.assertIsNone(result)
Beispiel #12
0
    def _generate_id(cls, intent):
        """Generates an ID for a new SentEmailModel instance.

        Args:
            intent: str. The intent string, i.e. the purpose of the email.
                Valid intent strings are defined in feconf.py.

        Returns:
            str. The newly-generated ID for the SentEmailModel instance.

        Raises:
            Exception: The id generator for SentEmailModel is producing
                too many collisions.
        """
        id_prefix = '%s.' % intent

        for _ in python_utils.RANGE(base_models.MAX_RETRIES):
            new_id = '%s.%s' % (id_prefix,
                                utils.convert_to_hash(
                                    python_utils.STR(
                                        utils.get_random_int(
                                            base_models.RAND_RANGE)),
                                    base_models.ID_LENGTH))
            if not cls.get_by_id(new_id):
                return new_id

        raise Exception(
            'The id generator for SentEmailModel is producing too many '
            'collisions.')
Beispiel #13
0
def test_manifest_syntax(dependency_type, dependency_dict):
    """This checks syntax of the manifest.json dependencies.

    Display warning message when there is an error and terminate the program.
    Args:
      dependency_type: str. Dependency download format.
      dependency_dict: dict. manifest.json dependency dict.
    """
    keys = list(dependency_dict.keys())
    mandatory_keys = DOWNLOAD_FORMATS_TO_MANIFEST_KEYS[dependency_type][
        'mandatory_keys']
    # Optional keys requires exactly one member of the pair
    # to be available as a key in the dependency_dict.
    optional_key_pairs = DOWNLOAD_FORMATS_TO_MANIFEST_KEYS[dependency_type][
        'optional_key_pairs']
    for key in mandatory_keys:
        if key not in keys:
            python_utils.PRINT('------------------------------------------')
            python_utils.PRINT('There is syntax error in this dependency')
            python_utils.PRINT(dependency_dict)
            python_utils.PRINT('This key is missing or misspelled: "%s".' %
                               key)
            python_utils.PRINT('Exiting')
            sys.exit(1)
    if optional_key_pairs:
        for optional_keys in optional_key_pairs:
            optional_keys_in_dict = [
                key for key in optional_keys if key in keys
            ]
            if len(optional_keys_in_dict) != 1:
                python_utils.PRINT(
                    '------------------------------------------')
                python_utils.PRINT('There is syntax error in this dependency')
                python_utils.PRINT(dependency_dict)
                python_utils.PRINT(
                    'Only one of these keys pair must be used: "%s".' %
                    python_utils.STR(optional_keys))
                python_utils.PRINT('Exiting')
                sys.exit(1)

    # Checks the validity of the URL corresponding to the file format.
    dependency_url = dependency_dict['url']
    if '#' in dependency_url:
        dependency_url = dependency_url.rpartition('#')[0]
    is_zip_file_format = dependency_type == _DOWNLOAD_FORMAT_ZIP
    is_tar_file_format = dependency_type == _DOWNLOAD_FORMAT_TAR
    if (dependency_url.endswith('.zip') and not is_zip_file_format
            or is_zip_file_format and not dependency_url.endswith('.zip')
            or dependency_url.endswith('.tar.gz') and not is_tar_file_format
            or is_tar_file_format and not dependency_url.endswith('.tar.gz')):
        python_utils.PRINT('------------------------------------------')
        python_utils.PRINT('There is syntax error in this dependency')
        python_utils.PRINT(dependency_dict)
        python_utils.PRINT('This url  %s is invalid for %s file format.' %
                           (dependency_url, dependency_type))
        python_utils.PRINT('Exiting.')
        sys.exit(1)
Beispiel #14
0
def regenerate_image_filename_using_dimensions(filename, height, width):
    """Returns the name of the image file with dimensions in it.

    Args:
        filename: str. The name of the image file to be renamed.
        height: int. Height of the image.
        width: int. Width of the image.

    Returns:
        str. The name of the image file with its dimensions in it.
    """
    filename_wo_filetype = filename[:filename.rfind('.')]
    filetype = filename[filename.rfind('.') + 1:]
    dimensions_suffix = '_height_%s_width_%s' % (
        python_utils.STR(height), python_utils.STR(width))
    new_filename = '%s%s.%s' % (
        filename_wo_filetype, dimensions_suffix, filetype)
    return new_filename
 def test_index_must_be_string(self):
     index = search.Index('test')
     # Check that the error message mentions the type the user passed in.
     with self.assertRaisesRegexp(ValueError,
                                  python_utils.STR(type(index))):
         gae_search_services.add_documents_to_index(
             {
                 'id': 'one',
                 'key': 'value'
             }, index)
Beispiel #16
0
    def get_incremented_node_id(cls, node_id):
        """Increments the next node id of the story.

        Returns:
            str. The new next node id.
        """
        current_number = StoryNode.get_number_from_node_id(node_id)
        incremented_node_id = NODE_ID_PREFIX + python_utils.STR(
            current_number + 1)
        return incremented_node_id
Beispiel #17
0
    def test_remove_exploration_from_learner_playlist(self):
        self.login(self.VIEWER_EMAIL)

        # Add explorations to the learner playlist.
        learner_progress_services.add_exp_to_learner_playlist(
            self.viewer_id, self.EXP_ID_1)
        learner_progress_services.add_exp_to_learner_playlist(
            self.viewer_id, self.EXP_ID_2)
        self.assertEqual(
            learner_playlist_services.get_all_exp_ids_in_learner_playlist(
                self.viewer_id), [self.EXP_ID_1, self.EXP_ID_2])

        # Remove an exploration.
        self.delete_json(
            python_utils.STR(
                '%s/%s/%s' %
                (feconf.LEARNER_PLAYLIST_DATA_URL,
                 constants.ACTIVITY_TYPE_EXPLORATION, self.EXP_ID_1)))
        self.assertEqual(
            learner_playlist_services.get_all_exp_ids_in_learner_playlist(
                self.viewer_id), [self.EXP_ID_2])

        # Removing the same exploration again has no effect.
        self.delete_json(
            python_utils.STR(
                '%s/%s/%s' %
                (feconf.LEARNER_PLAYLIST_DATA_URL,
                 constants.ACTIVITY_TYPE_EXPLORATION, self.EXP_ID_1)))
        self.assertEqual(
            learner_playlist_services.get_all_exp_ids_in_learner_playlist(
                self.viewer_id), [self.EXP_ID_2])

        # Remove the second exploration.
        self.delete_json(
            python_utils.STR(
                '%s/%s/%s' %
                (feconf.LEARNER_PLAYLIST_DATA_URL,
                 constants.ACTIVITY_TYPE_EXPLORATION, self.EXP_ID_2)))
        self.assertEqual(
            learner_playlist_services.get_all_exp_ids_in_learner_playlist(
                self.viewer_id), [])

        self.logout()
Beispiel #18
0
def base64_from_int(value):
    """Converts the number into base64 representation.

    Args:
        value: int. Integer value for conversion into base64.

    Returns:
        *. Returns the base64 representation of the number passed.
    """
    return base64.b64encode(python_utils.STR([value]))
Beispiel #19
0
    def get_full_message_id(self, message_id):
        """Returns the full id of the message.

        Args:
            message_id: int. The id of the message for which we have to fetch
                the complete message id.

        Returns:
            str. The full id corresponding to the given message id.
        """
        return '.'.join([self.id, python_utils.STR(message_id)])
Beispiel #20
0
    def map(item):
        if item.deleted:
            return

        exploration = exp_fetchers.get_exploration_from_model(item)
        for state_name, state in exploration.states.items():
            hints_length = len(state.interaction.hints)
            if hints_length > 0:
                exp_and_state_key = '%s %s' % (item.id,
                                               state_name.encode('utf-8'))
                yield (python_utils.STR(hints_length), exp_and_state_key)
 def test_delete_multiple_documents(self):
     index = search.Index('my_index')
     for i in python_utils.RANGE(10):
         field = search.TextField(name='k', value='v%d' % i)
         doc = search.Document(doc_id='doc%d' % i, fields=[field])
         index.put([doc])
     gae_search_services.delete_documents_from_index(
         ['doc' + python_utils.STR(i) for i in python_utils.RANGE(10)],
         'my_index')
     for i in python_utils.RANGE(10):
         self.assertIsNone(index.get('doc%d' % i))
Beispiel #22
0
def to_ascii(input_string):
    """Change unicode characters in a string to ascii if possible.

    Args:
        input_string: str. String to convert.

    Returns:
        str. String containing the ascii representation of the input string.
    """
    return unicodedata.normalize(
        'NFKD', python_utils.STR(input_string)).encode('ascii', 'ignore')
Beispiel #23
0
    def get(self):
        """Handles GET requests."""
        return_url = python_utils.STR(
            self.request.get('return_url', self.request.uri))
        # Validating return_url for no external redirections.
        if re.match('^/[^//]', return_url) is None:
            return_url = '/'
        if user_services.has_fully_registered(self.user_id):
            self.redirect(return_url)
            return

        self.render_template('dist/signup-page.mainpage.html')
Beispiel #24
0
    def _generate_id(cls, thread_id, message_id):
        """Generates full message ID given the thread ID and message ID.

        Args:
            thread_id: str. Thread ID of the thread to which the message
                belongs.
            message_id: int. Message ID of the message.

        Returns:
            str. Full message ID.
        """
        return '.'.join([thread_id, python_utils.STR(message_id)])
Beispiel #25
0
    def map(item):
        if item.deleted:
            return

        # Do not upgrade explorations that fail non-strict validation.
        old_exploration = exp_fetchers.get_exploration_by_id(item.id)
        try:
            old_exploration.validate()
        except Exception as e:
            logging.error('Exploration %s failed non-strict validation: %s' %
                          (item.id, e))
            return

        # If the exploration model being stored in the datastore is not the
        # most up-to-date states schema version, then update it.
        if (item.states_schema_version != feconf.CURRENT_STATE_SCHEMA_VERSION):
            # Note: update_exploration does not need to apply a change list in
            # order to perform a migration. See the related comment in
            # exp_services.apply_change_list for more information.
            #
            # Note: from_version and to_version really should be int, but left
            # as str to conform with legacy data.
            commit_cmds = [
                exp_domain.ExplorationChange({
                    'cmd':
                    exp_domain.CMD_MIGRATE_STATES_SCHEMA_TO_LATEST_VERSION,
                    'from_version':
                    python_utils.STR(item.states_schema_version),
                    'to_version':
                    python_utils.STR(feconf.CURRENT_STATE_SCHEMA_VERSION)
                })
            ]
            exp_services.update_exploration(
                feconf.MIGRATION_BOT_USERNAME, item.id, commit_cmds,
                'Update exploration states from schema version %d to %d.' %
                (item.states_schema_version,
                 feconf.CURRENT_STATE_SCHEMA_VERSION))
            yield ('SUCCESS', item.id)
Beispiel #26
0
    def get_new_id(cls, entity_name):
        """Overwrites superclass method.

        Args:
            entity_name: str. The name of the entity to create a new job id for.

        Returns:
            str. A job id.
        """
        job_type = entity_name
        current_time_str = python_utils.STR(
            int(utils.get_current_time_in_millisecs()))
        random_int = random.randint(0, 1000)
        return '%s-%s-%s' % (job_type, current_time_str, random_int)
Beispiel #27
0
    def _convert_collection_contents_v3_dict_to_v4_dict(
            cls, collection_contents):
        """Converts from version 3 to 4.

        Adds a skills dict and skill id counter. Migrates prerequisite_skills
        and acquired_skills to prerequistite_skill_ids and acquired_skill_ids.
        Then, gets skills in prerequisite_skill_ids and acquired_skill_ids in
        nodes, and assigns them IDs.

        Args:
            collection_contents: dict. A dict representing the collection
                contents object to convert.

        Returns:
            dict. The updated collection_contents dict.
        """

        skill_names = set()
        for node in collection_contents['nodes']:
            skill_names.update(node['acquired_skills'])
            skill_names.update(node['prerequisite_skills'])
        skill_names_to_ids = {
            name: _SKILL_ID_PREFIX + python_utils.STR(index)
            for index, name in enumerate(sorted(skill_names))
        }

        collection_contents['nodes'] = [{
            'exploration_id':
            node['exploration_id'],
            'prerequisite_skill_ids': [
                skill_names_to_ids[prerequisite_skill_name]
                for prerequisite_skill_name in node['prerequisite_skills']
            ],
            'acquired_skill_ids': [
                skill_names_to_ids[acquired_skill_name]
                for acquired_skill_name in node['acquired_skills']
            ]
        } for node in collection_contents['nodes']]

        collection_contents['skills'] = {
            skill_id: {
                'name': skill_name,
                'question_ids': []
            }
            for skill_name, skill_id in skill_names_to_ids.items()
        }

        collection_contents['next_skill_id'] = len(skill_names)

        return collection_contents
Beispiel #28
0
def _get_remote_name():
    """Get the remote name of the local repository.

    Returns:
        str. The remote name of the local repository.
    """
    remote_name = ''
    remote_num = 0
    get_remotes_name_cmd = 'git remote'.split()
    task = subprocess.Popen(get_remotes_name_cmd,
                            stdout=subprocess.PIPE,
                            stderr=subprocess.PIPE)
    out, err = task.communicate()
    remotes = python_utils.STR(out)[:-1].split('\n')
    if not err:
        for remote in remotes:
            get_remotes_url_cmd = ('git config --get remote.%s.url' %
                                   remote).split()
            task = subprocess.Popen(get_remotes_url_cmd,
                                    stdout=subprocess.PIPE,
                                    stderr=subprocess.PIPE)
            remote_url, err = task.communicate()
            if not err:
                if remote_url.endswith('oppia/oppia.git\n'):
                    remote_num += 1
                    remote_name = remote
            else:
                raise ValueError(err)
    else:
        raise ValueError(err)

    if not remote_num:
        raise Exception(
            'Error: Please set upstream for the lint checks to run '
            'efficiently. To do that follow these steps:\n'
            '1. Run the command \'git remote -v\'\n'
            '2a. If upstream is listed in the command output, then run the '
            'command \'git remote set-url upstream '
            'https://github.com/oppia/oppia.git\'\n'
            '2b. If upstream is not listed in the command output, then run the '
            'command \'git remote add upstream '
            'https://github.com/oppia/oppia.git\'\n')
    elif remote_num > 1:
        python_utils.PRINT(
            'Warning: Please keep only one remote branch for oppia:develop '
            'to run the lint checks efficiently.\n')
        return
    return remote_name
Beispiel #29
0
def _validate_customization_args_in_tag(tag):
    """Validates customization arguments of Rich Text Components in a soup.

    Args:
        tag: bs4.element.Tag. The html tag to be validated.

    Yields:
        Error message if the attributes of tag are invalid.
    """

    component_types_to_component_classes = rte_component_registry.Registry.get_component_types_to_component_classes() # pylint: disable=line-too-long
    simple_component_tag_names = (
        rte_component_registry.Registry.get_simple_component_tag_names())
    tag_name = tag.name
    value_dict = {}
    attrs = tag.attrs

    for attr in attrs:
        value_dict[attr] = json.loads(unescape_html(attrs[attr]))

    try:
        component_types_to_component_classes[tag_name].validate(value_dict)
        if tag_name == 'oppia-noninteractive-collapsible':
            content_html = value_dict['content-with-value']
            soup_for_collapsible = bs4.BeautifulSoup(
                content_html, 'html.parser')
            for component_name in simple_component_tag_names:
                for component_tag in soup_for_collapsible.findAll(
                        name=component_name):
                    for err_msg in _validate_customization_args_in_tag(
                            component_tag):
                        yield err_msg

        elif tag_name == 'oppia-noninteractive-tabs':
            tab_content_list = value_dict['tab_contents-with-value']
            for tab_content in tab_content_list:
                content_html = tab_content['content']
                soup_for_tabs = bs4.BeautifulSoup(
                    content_html, 'html.parser')
                for component_name in simple_component_tag_names:
                    for component_tag in soup_for_tabs.findAll(
                            name=component_name):
                        for err_msg in _validate_customization_args_in_tag(
                                component_tag):
                            yield err_msg
    except Exception as e:
        yield python_utils.STR(e)
Beispiel #30
0
    def delete(self, committer_id, commit_message, force_deletion=False):
        """Deletes this model instance.

        Args:
            committer_id: str. The user_id of the user who committed the change.
            commit_message: str.
            force_deletion: bool. If True this model is deleted
                completely from storage, otherwise it is only marked as deleted.
                Default is False.

        Raises:
            Exception: This model instance has been already deleted.
        """
        if force_deletion:
            current_version = self.version

            version_numbers = [
                python_utils.STR(num + 1)
                for num in python_utils.RANGE(current_version)
            ]
            snapshot_ids = [
                self._get_snapshot_id(self.id, version_number)
                for version_number in version_numbers
            ]

            metadata_keys = [
                ndb.Key(self.SNAPSHOT_METADATA_CLASS, snapshot_id)
                for snapshot_id in snapshot_ids
            ]
            ndb.delete_multi(metadata_keys)

            content_keys = [
                ndb.Key(self.SNAPSHOT_CONTENT_CLASS, snapshot_id)
                for snapshot_id in snapshot_ids
            ]
            ndb.delete_multi(content_keys)

            super(VersionedModel, self).delete()
        else:
            self._require_not_marked_deleted()  # pylint: disable=protected-access
            self.deleted = True

            commit_cmds = [{'cmd': self.CMD_DELETE_COMMIT}]

            self._trusted_commit(committer_id, self._COMMIT_TYPE_DELETE,
                                 commit_message, commit_cmds)