def test_get_from_cache(self): if settings.ENABLE_CACHING: s = Student().blocking_save() cached_value = "" clean_value = s._data try: cached_value = cache.get(s.key) except: pass if six.PY3: cached_value = cached_value.decode() cached_value_d = json.loads(cached_value) for key in cached_value_d.keys(): assert cached_value_d[key] == clean_value[key] # To make sure the data is read from cache while key is in it try: cache.set(s.key, "a") assert Student.objects.get(s.key) == "a" except: pass s.blocking_delete() try: assert not cache.get(s.key) except: pass
def test_escaping(self): Student.objects.delete() with BlockSave(Student): Student(name='jhon smith', surname='jr.').save() Student(name='jhon smith', surname='sr.').save() # assert Student.objects.filter(name__contains='on sm').count() == 2 assert Student.objects.filter(name='jhon smith').count() == 2
def test_json_to_model_to_json(): st = Student() st.set_data(data) clean_value = st.clean_value() clean_data['timestamp'] = clean_value['timestamp'] clean_data['updated_at'] = clean_value['updated_at'] clean_data['deleted_at'] = clean_value['deleted_at'] assert clean_data == clean_value
def test_collect_index_fields(): st = Student() result = st._collect_index_fields() sorted_result =sorted(result, key=lambda x: x[0]) sorted_data = sorted(test_data_solr_fields, key=lambda x: x[0]) # pprint(sorted_data) # pprint(sorted_result) assert sorted_result == sorted_data
def test_create_solr_schema(): st = Student() fields = st._collect_index_fields() result = SchemaUpdater.get_schema_fields(fields) if not settings.SOLR['store']: assert sorted(result) == sorted(test_data_solr_schema_debug_zero) else: assert sorted(result) == sorted(test_data_solr_schema_debug_not_zero)
def test_create_solr_schema(): st = Student() fields = st._collect_index_fields() result = SchemaUpdater.get_schema_fields(fields) if not settings.DEBUG: assert sorted(result) == sorted(test_data_solr_schema_debug_zero) else: assert sorted(result) == sorted(test_data_solr_schema_debug_not_zero)
def test_listnode_values(self): st = Student() l = st.Lectures(code='Mat101') l.credit = 4 assert l.code == 'Mat101' assert l.credit == 4 st.save() db_st = Student.objects.get(st.key) db_l = db_st.Lectures[0] assert l.credit == db_l.credit assert l.code == db_l.code
def test_collect_index_fields(): st = Student() result = st._collect_index_fields() sorted_result = sorted(result, key=lambda x: x[0]) if not settings.SOLR['store']: sorted_data = sorted(test_data_solr_fields_debug_zero, key=lambda x: x[0]) assert sorted_result == sorted_data else: sorted_data = sorted(test_data_solr_fields_debug_not_zero, key=lambda x: x[0]) assert sorted_result == sorted_data
def test_collect_index_fields(): st = Student() result = st._collect_index_fields() sorted_result = sorted(result, key=lambda x: x[0]) if not settings.DEBUG: sorted_data = sorted(test_data_solr_fields_debug_zero, key=lambda x: x[0]) assert sorted_result == sorted_data else: sorted_data = sorted(test_data_solr_fields_debug_not_zero, key=lambda x: x[0]) assert sorted_result == sorted_data
def test_delete_model(self): self.prepare_testbed(True) s2 = Student(name="Foo").save() sleep(1) assert Student.objects.filter(name="Foo").count() == 1 assert Student.objects.filter(deleted=True).count() == 0 assert Student.objects.count() == 2 s2.delete() sleep(1) assert Student.objects.filter(name="Foo").count() == 0 assert Student.objects.filter(deleted=True).count() == 1 assert Student.objects.count() == 1
def test_delete_model(self): self.prepare_testbed(True) s2 = Student(name='Foo').save() sleep(1) assert Student.objects.filter(name='Foo').count() == 1 assert Student.objects.filter(deleted=True).count() == 0 assert Student.objects.count() == 2 s2.delete() sleep(1) assert Student.objects.filter(name='Foo').count() == 0 assert Student.objects.filter(deleted=True).count() == 1 assert Student.objects.count() == 1
def test_model_to_json_compact(): st = Student(**data) st.join_date = data["join_date"] st.AuthInfo(**data["auth_info"]) for lct_data in data["lectures"]: lecture = st.Lectures(**lct_data) lecture.NodeInListNode(**lct_data["node_in_list_node"]) for atd in lct_data["attendance"]: lecture.Attendance.add(**atd) for exam in lct_data["exams"]: lecture.Exams(**exam) # print st.clean_value() clean_value = st.clean_value() clean_data["timestamp"] = clean_value["timestamp"] assert clean_data == clean_value
def test_model_to_json_compact(): st = Student(**data) st.join_date = data['join_date'] st.AuthInfo(**data['auth_info']) for lct_data in data['lectures']: lecture = st.Lectures(**lct_data) lecture.NodeInListNode(**lct_data['node_in_list_node']) for atd in lct_data['attendance']: lecture.Attendance(**atd) for exam in lct_data['exams']: lecture.Exams(**exam) # print st.clean_value() clean_value = st.clean_value() clean_data['timestamp'] = clean_value['timestamp'] assert clean_data == clean_value
def test_delete_model(self): self.prepare_testbed(True) s2 = Student(name='Foo').blocking_save() qs = Student.objects.filter(deleted=True) # len is to test __iter__ assert len([i for i in qs]) == qs.count() == 0 assert Student.objects.filter(name='Foo').count() == 1 assert Student.objects.count() == 2 s2.blocking_delete() qs = Student.objects.filter(deleted=True) assert Student.objects.filter(name='Foo').count() == 0 # len is to test __iter__ assert len([i for i in qs]) == qs.count() == 1 assert Student.objects.count() == 1
def test_m2m_missing(): validator = ModelValidator(Student(name='tim')) valid = validator.validate({'courses': [1, 33]}) assert not valid assert validator.errors['courses'] == DEFAULT_MESSAGES['related'].format( field='id', values=[1, 33])
def test_or_queries(self): Student.objects.delete() d = { 's1': ['ali', 'veli'], 's2': ['joe', 'roby'], 's3': ['rob', 'zombie'], 's4': ['go', 'jira'] } if not Student.objects.filter(name=d['s2'][0]): for k, v in d.items(): Student(name=v[0], surname=v[1]).save() sleep(1) assert 3 == Student.objects.filter(name__in=(d['s1'][0], d['s2'][0], d['s3'][0])).count() assert 3 == Student.objects.filter( name__in=(d['s1'][0], d['s2'][0], d['s3'][0])).filter(surname__in=(d['s1'][1], d['s2'][1], d['s3'][1])).count() assert 2 == Student.objects.search_on('name', 'surname', contains='rob').count() assert 2 == Student.objects.or_filter( name__contains='rob', surname__startswith='rob').count()
def get_or_create_new_obj(cls, reset): if cls.new_obj is None or reset: cls.new_obj = Student() cls.new_obj.set_data(data) cls.new_obj.save() sleep(1) # wait for Riak -> Solr sync return cls.new_obj
def test_m2m_empty(): validator = ModelValidator(Student(name='tim')) valid = validator.validate() assert valid valid = validator.validate({'courses': []}) assert valid
def test_m2m_dicts_blank(): validator = ModelValidator(Student(name='tim')) valid = validator.validate({'courses': [{}, {}]}) assert valid valid = validator.validate({'courses': {}}) assert valid
def test_json_to_model_to_json_partial(): st = Student() partial_data = deepcopy(data) partial_data_clean = deepcopy(clean_data) partial_data_clean['auth_info']['password'] = None partial_data_clean['bio'] = None partial_data_clean['lectures'][0]['exams'] = [] partial_data_clean['lectures'][1]['exams'] = [] partial_data['auth_info']['password'] = None partial_data['bio'] = None partial_data['lectures'][0]['exams'] = [] partial_data['lectures'][1]['exams'] = [] st._load_data(partial_data) clean_value = st.clean_value() partial_data_clean['timestamp'] = clean_value['timestamp'] assert partial_data_clean == clean_value
def test_json_to_model_to_json_partial(): st = Student() partial_data = deepcopy(clean_data) partial_data_clean = deepcopy(clean_data) partial_data_clean['auth_info']['password'] = None partial_data_clean['bio'] = None partial_data_clean['lectures'][0]['exams'] = [] partial_data_clean['lectures'][1]['exams'] = [] partial_data['auth_info']['password'] = None partial_data['bio'] = None partial_data['lectures'][0]['exams'] = [] partial_data['lectures'][1]['exams'] = [] st.set_data(partial_data) clean_value = st.clean_value() partial_data_clean['timestamp'] = clean_value['timestamp'] partial_data_clean['timestamp'] = clean_value['timestamp'] assert partial_data_clean == clean_value
def test_m2m_save_blank(): obj = Student(name='tim') validator = ModelValidator(obj) valid = validator.validate({'courses': [{}, {}]}) assert valid validator.save() assert obj.id
def test_overrides(): class CustomValidator(ModelValidator): students = ManyModelChoiceField(Student.select(), Student.name) Student.create(name='tim') Student.create(name='bob') obj = Course.create(name='course1') validator = CustomValidator(obj) data = {'students': [{'name': 'tim'}, 'bob']} valid = validator.validate(data) print(validator.errors) assert valid validator.save() assert obj.id assert len(obj.students) == 2
def test_m2m_dicts(): validator = ModelValidator(Student(name='tim')) c1 = Course.create(name='course1') c2 = Course.create(name='course2') valid = validator.validate({'courses': [{'id': c1.id}, {'id': c2.id}]}) assert valid valid = validator.validate({'courses': {'id': c1.id}}) assert valid
def test_m2m_instances(): validator = ModelValidator(Student(name='tim')) c1 = Course.create(name='course1') c2 = Course.create(name='course2') valid = validator.validate({'courses': [c1, c2]}) assert valid valid = validator.validate({'courses': c1}) assert valid
def random_student(): first_name = f.first_name() last_name = f.last_name() s = Student() s.number = f.random_int(10000000000, 19999999999) s.deleted = f.random_element(False, False, False, False, False, False, True) s.archived = f.random_element(False, False, False, False, False, False, True) s.bio = '\n'.join(f.paragraphs()) s.name = first_name s.surname = last_name s.pno = str(f.random_int(10000000000, 19999999999)) s.join_date = f.date_time_between('-2000d', '-180d').strftime(DATE_FORMAT) ai = s.AuthInfo() ai.email = f.email() ai.password = f.sha256() ai.username = f.simple_profile()['username'] return s
def test_load_dump_data(): Student.objects._clear() for i in range(5): Student(name = str(i)).blocking_save() path = '/tmp/load_dump.csv' ManagementCommands(args=['dump_data', '--model', 'Student', '--path', path]) with codecs.open(path, encoding='utf-8') as file: out = file.read() ManagementCommands(args=['load_data', '--update', '--path', path]) sleep(1) ManagementCommands(args=['dump_data', '--model', 'Student', '--path', path]) with codecs.open(path, encoding='utf-8') as file: assert len(out) > 0 and len(out) == len(file.read())
def test_model_to_json_expanded(): d = data s = Student() s.number = d['number'] s.deleted = d['deleted'] s.archived = d['archived'] # s.timestamp = d['timestamp'] s.bio = d['bio'] s.name = d['name'] s.surname = d['surname'] s.pno = d['pno'] s.join_date = data['join_date'] d = data['auth_info'] ai = s.AuthInfo() ai.email = d['email'] ai.password = d['password'] ai.username = d['username'] for ld in data['lectures']: lecture = s.Lectures() lecture.code = ld['code'] lecture.credit = ld['credit'] lecture.name = ld['name'] milm = lecture.NodeInListNode() milm.foo = ld['node_in_list_node']['foo'] for atd in ld['attendance']: attendance = lecture.Attendance() attendance.attended = atd['attended'] attendance.date = atd['date'] attendance.hour = atd['hour'] for exam in ld['exams']: exm = lecture.Exams() exm.date = exam['date'] exm.point = exam['point'] exm.type = exam['type'] clean_value = s.clean_value() clean_data['timestamp'] = clean_value['timestamp'] assert clean_data == clean_value
def test_m2m_save(): obj = Student(name='tim') validator = ModelValidator(obj) c1 = Course.create(name='course1') c2 = Course.create(name='course2') valid = validator.validate({'courses': [c1, c2]}) assert valid validator.save() assert obj.id assert c1 in obj.courses assert c2 in obj.courses
def test_m2m_ints(): validator = ModelValidator(Student(name='tim')) c1 = Course.create(name='course1') c2 = Course.create(name='course2') valid = validator.validate({'courses': [c1.id, c2.id]}) print(validator.errors) assert valid valid = validator.validate({'courses': c1.id}) assert valid valid = validator.validate({'courses': str(c1.id)}) assert valid
def test_count(self): mb = client.bucket_type('pyoko_models').bucket('student') Student.objects._clear() results = mb.search('-deleted:True', 'pyoko_models_student', **{'rows': 0}) assert Student.objects.count() == results['num_found'] == 0 # 770 records will be saved. for i in range(770): Student(number=str(i % 3)).save() # wait until 770 records are saved. while mb.search('-deleted:True', 'pyoko_models_student', ** {'rows': 0})['num_found'] != 770: time.sleep(0.3) # total count assert Student.objects.count() == 770 # number '2' results count results = mb.search('-deleted:True AND number:2', 'pyoko_models_student', **{'rows': 0}) assert Student.objects.filter( number='2').count() == results['num_found'] == 256 # total count assert Student.objects.filter( number='2').count() + Student.objects.filter( number='1').count() + Student.objects.filter( number='0').count() == 770 # set_params and count tests: assert Student.objects.filter(number='2').set_params( start=0).count() == 256 assert Student.objects.filter(number='2').set_params( start=0, rows=35).count() == 35 assert Student.objects.filter(number='2').set_params( start=125, rows=35).count() == 35 assert Student.objects.filter(number='2').set_params( start=0, rows=0).count() == 0 assert Student.objects.filter(number='2').set_params( rows=100).count() == 100 assert Student.objects.filter(number='2').set_params( start=250, rows=100).count() == 6 assert Student.objects.filter(number='2').set_params( start=300, rows=100).count() == 0 self.prepare_testbed(reset=True)
def test_model_to_json_expanded(): d = data s = Student() s.number = d["number"] s.deleted = d["deleted"] # s.timestamp = d['timestamp'] s.bio = d["bio"] s.name = d["name"] s.surname = d["surname"] s.pno = d["pno"] s.join_date = data["join_date"] d = data["auth_info"] ai = s.AuthInfo() ai.email = d["email"] ai.password = d["password"] ai.username = d["username"] for ld in data["lectures"]: lecture = s.Lectures() lecture.code = ld["code"] lecture.idx = ld["idx"] lecture.credit = ld["credit"] lecture.name = ld["name"] milm = lecture.NodeInListNode() milm.foo = ld["node_in_list_node"]["foo"] for atd in ld["attendance"]: attendance = lecture.Attendance() attendance.attended = atd["attended"] attendance.date = atd["date"] attendance.idx = atd["idx"] attendance.hour = atd["hour"] for exam in ld["exams"]: exm = lecture.Exams() exm.date = exam["date"] exm.idx = exam["idx"] exm.point = exam["point"] exm.type = exam["type"] clean_value = s.clean_value() clean_data["timestamp"] = clean_value["timestamp"] assert clean_data == clean_value
def test_slicing_indexing(self): Student.objects.delete() with BlockSave(Student): Student(name='Olavi', surname='Mikkonen').save() Student(name='Johan', surname='Hegg').save() Student(name='Johan', surname='Soderberg').save() Student(name='Ted', surname='Lundstrom').save() Student(name='Michael', surname='Amott').save() Student(name='Daniel', surname='Erlandsson').save() Student(name='Sharlee', surname='D\'Angelo').save() Student(name='Alissa', surname='White-Gluz').save() Student(name='Jeff', surname='Loomis').save() # Check regular slices assert Student.objects.count() == 9 assert Student.objects[2:5].count() == 3 assert Student.objects[1:5].count() == 4 assert Student.objects[1:6].count() == 5 assert Student.objects[0:10].count() == 9 assert Student.objects[0:11].count() == 9 assert Student.objects[1:11].count() == 8 assert Student.objects[1:12].count() == 8 # Check multi-slicing assert Student.objects[1:6][2:4].count() == 2 assert Student.objects[0:7][2:4].count() == 2 assert Student.objects[0:7][2:5].count() == 3 # Check get & indexing s1 = Student.objects[3:4].get() s2 = Student.objects[3:4][0] assert s1 == s2 s1 = Student.objects[3:9][4:5].get() s2 = Student.objects[3:9][4:5][0] assert s1 == s2 # Check slicing with filters assert Student.objects.filter(name__startswith='J')[1:3].count() == 2 assert Student.objects.filter( name__startswith='J')[2:3].get() is not None
def test_model_to_json_compact(): st = Student(**data) st.join_date = data['join_date'] st.AuthInfo(**data['auth_info']) st.Lecturer(**data['lecturer'][0]) for lct_data in data['lectures']: lecture = st.Lectures(**lct_data) lecture.NodeInListNode(**lct_data['node_in_list_node']) for atd in lct_data['attendance']: lecture.Attendance.add(**atd) for exam in lct_data['exams']: lecture.Exams(**exam) # print st.clean_value() clean_value = st.clean_value() clean_data['timestamp'] = clean_value['timestamp'] clean_data['updated_at'] = clean_value['updated_at'] assert clean_data == clean_value
def test_all(self): mb = client.bucket_type('pyoko_models').bucket('student') row_size = BaseAdapter()._cfg['row_size'] Student.objects._clear() assert Student.objects.count() == 0 for i in range(row_size + 100): Student(name=str(i)).save() while Student.objects.count() != row_size + 100: time.sleep(0.3) # Wanted result from filter method much than default row_size. # It should raise an exception. with pytest.raises(Exception): Student.objects.filter() # Results are taken from solr in ordered with 'timestamp' sort parameter. results = mb.search( '-deleted:True', 'pyoko_models_student', **{ 'sort': 'timestamp desc', 'fl': '_yz_rk, score', 'rows': row_size + 100 }) # Ordered key list is created. ordered_key_list = [doc['_yz_rk'] for doc in results['docs']] # Getting data from riak with unordered way is tested. students = Student.objects.all() assert len(students) == row_size + 100 assert students.adapter.ordered == False # Getting data from riak with ordered way is tested. temp_key_list = [] students = Student.objects.order_by().all() assert students.adapter.ordered == True for student in students: temp_key_list.append(student.key) assert len(temp_key_list) == row_size + 100 assert temp_key_list == ordered_key_list self.prepare_testbed(reset=True)
def test_json_to_model_to_json(): st = Student() st.set_data(data) clean_value = st.clean_value() clean_data['timestamp'] = clean_value['timestamp'] assert clean_data == clean_value
def test_model_to_json_expanded(): d = data s = Student() s.number = d['number'] s.deleted = d['deleted'] # s.timestamp = d['timestamp'] s.bio = d['bio'] s.name = d['name'] s.surname = d['surname'] s.pno = d['pno'] s.join_date = data['join_date'] d = data['auth_info'] ai = s.AuthInfo() ai.email = d['email'] ai.password = d['password'] ai.username = d['username'] for ld in data['lectures']: lecture = s.Lectures() lecture.code = ld['code'] lecture.idx = ld['idx'] lecture.credit = ld['credit'] lecture.name = ld['name'] milm = lecture.NodeInListNode() milm.foo = ld['node_in_list_node']['foo'] for atd in ld['attendance']: attendance = lecture.Attendance() attendance.attended = atd['attended'] attendance.date = atd['date'] attendance.idx = atd['idx'] attendance.hour = atd['hour'] for exam in ld['exams']: exm = lecture.Exams() exm.date = exam['date'] exm.idx = exam['idx'] exm.point = exam['point'] exm.type = exam['type'] clean_value = s.clean_value() clean_data['timestamp'] = clean_value['timestamp'] assert clean_data == clean_value
def test_create_solr_schema(): st = Student() fields = st._collect_index_fields() result = SchemaUpdater.get_schema_fields(fields) assert sorted(result) == sorted(test_data_solr_schema)
class CustomValidator(ModelValidator): students = ManyModelChoiceField(Student.select(), Student.name)
def test_get_multiple_objects_exception(self): self.prepare_testbed() s2 = Student(name='Foo').save() sleep(2) with pytest.raises(MultipleObjectsReturned): Student.objects.get()
def test_collect_index_fields(): st = Student() result = st._collect_index_fields() sorted_result = sorted(result, key=lambda x: x[0]) sorted_data = sorted(test_data_solr_fields, key=lambda x: x[0]) assert sorted_result == sorted_data