Esempio n. 1
0
def load_students_to_db(config):
    """
    Students are saved to the DB as we go -- we load a batch from the input file,
    convert them to students, perform cleanup, then pass them to the exporter.
    Any students that already existed in the DB when the task started are ignored --
    this allows us to make forward progress even if the task times out during import.

    There's a minor chance of data leakage if the task terminates while the unique visitors
    are still processing a student.  That won't cause issues though -- when the job resumes,
    that student will receive a fresh unique ID and the "zombie" record will simply be
    an unused number with no database footprint.
    """

    if config.estimated_student_count == STUDENT_COUNT_NOT_INITIALIZED:
        logging.info("Initializing student count")
        initialize_student_count(config)

    importer = ImporterFactory.get(config)
    exporter = AsyncNdbExporter.get_or_create(config)
    visitors = VisitorFactory.get_cleanup_visitors(config)

    existing_sids = Student.get_all_id_numbers()
    if len(existing_sids) > 0:
        logging.info("{} students already loaded, the importer will skip those records".format(
                len(existing_sids)))

    for student_batch in importer.generate_multi():
        # First, check if the student is already in the datastore (this means they've
        # already been cleaned up and are ready for export).
        # Then load their data from the stored cache (creating a fresh csd object if
        # this is a new student).
        students_not_in_db = [stu for stu in student_batch if stu.sid not in existing_sids]
        cached_student_data = CachedStudentData.get_or_create_multi(
                [stu.sid for stu in students_not_in_db])
        for student in students_not_in_db:
            student.copy_from_cache(cached_student_data[student.sid])

        # Now perform data cleanup and (when done) save the students to DB
        for i in visitors:
            i.accept_multi(students_not_in_db)
        for student in [stu for stu in students_not_in_db if stu.cache_is_dirty]:
            cache = cached_student_data[student.sid]
            student.copy_to_cache(cache)
            exporter.append(cache)
        for student in students_not_in_db:
            student.cleanup_complete = True
            exporter.append(student)
        # Manual flush after each batch instead of at the end of the run; the
        # uniquifier will also be dumping entities into the exporter, so we need
        # to make sure that everything's been committed before starting a fresh
        # batch (those entities are only cached within the scope of an
        # accept_multi run)
        exporter.flush()
    for i in visitors:
        i.close()
    logging.info("Filter results: {} accepted, {} rejected".format(
        importer.import_count(), importer.reject_count()))
    logging.info("All students loaded to database")
Esempio n. 2
0
    def test_student_caching(self):
        # Ensure empty starting DB
        result = Student().query().fetch(limit=None)
        self.assertFalse(result)
        result = CachedStudentData().query().fetch(limit=None)
        self.assertFalse(result)


        # Ensure one cache is created per student
        # Create initial student
        stu1 = get_dummy_student(1)
        stu1.put()
        cache1 = CachedStudentData.get_or_create(stu1.sid)
        cache1.put()
        result = Student().query().fetch(limit=None)
        self.assertEqual(1, len(result))
        result = CachedStudentData().query().fetch(limit=None)
        self.assertEqual(1, len(result))
        # Create second student
        stu2 = get_dummy_student(2)
        stu2.put()
        cache2 = CachedStudentData.get_or_create(stu2.sid)
        cache2.put()
        result = Student().query().fetch(limit=None)
        self.assertEqual(2, len(result))
        result = CachedStudentData().query().fetch(limit=None)
        self.assertEqual(2, len(result))
        # Recreate initial
        cache1 = CachedStudentData.get_or_create(stu1.sid)
        cache1.put()
        result = Student().query().fetch(limit=None)
        self.assertEqual(2, len(result))
        result = CachedStudentData().query().fetch(limit=None)
        self.assertEqual(2, len(result))

        # Ensure data is copied to cache correctly
        username = "******"
        anon_username = "******"
        password = "******"
        self.assertEqual(stu1.username, "")
        self.assertEqual(stu1.anon_username, "")
        self.assertEqual(stu1.password, "")
        self.assertEqual(cache1.username, "")
        self.assertEqual(cache1.anon_username, "")
        self.assertEqual(cache1.password, "")
        cache1.password = "******"
        stu1.username = username
        stu1.anon_username = anon_username
        stu1.password = password
        stu1.copy_to_cache(cache1)
        self.assertEqual(cache1.username, username)
        self.assertEqual(cache1.anon_username, anon_username)
        self.assertEqual(cache1.password, password)

        # Ensure data is copied from cache correctly
        username = "******"
        anon_username = "******"
        password = "******"
        self.assertEqual(stu2.username, "")
        self.assertEqual(stu2.anon_username, "")
        self.assertEqual(stu2.password, "")
        self.assertEqual(cache2.username, "")
        self.assertEqual(cache2.anon_username, "")
        self.assertEqual(cache2.password, "")
        stu2.password = "******"
        cache2.username = username
        cache2.anon_username = anon_username
        cache2.password = password
        stu2.copy_from_cache(cache2)
        self.assertEqual(stu2.username, username)
        self.assertEqual(stu2.anon_username, anon_username)
        self.assertEqual(stu2.password, password)