def test_log_student_updated_added_event(self): """Check logging signal for newly created student""" # add own root handler to catch student signals output out = StringIO() handler = logging.StreamHandler(out) logging.root.addHandler(handler) # now create student, this should raise new message inside # our logger output file student = Student(first_name='Demo', last_name='Student') student.save() # check output file content out.seek(0) self.assertEqual(out.readlines()[-1], 'Student added: Demo Student (ID: %d)\n' % student.id) # now update existing student and check last line in out student.ticket = '12345' student.save() out.seek(0) self.assertEqual(out.readlines()[-1], 'Student updated: Demo Student (ID: %d)\n' % student.id) # remove our handler from root logger logging.root.removeHandler(handler)
def test_sync_print(self, get_tags): get_tags.side_effect = self._get_tags io = StringIO() call_command('syncnextcloudreleases', '--oldest-supported=11.0.0', '--print', stdout=io) expected = '\n'.join([ '12.0.5', '12.0.4', '12.0.3', '12.0.2', '12.0.1', '12.0.0', '11.0.7', '11.0.6', '11.0.5', '11.0.4', '11.0.3', '11.0.2', '11.0.1', '11.0.0', ]) + '\n' self.assertEquals(0, NextcloudRelease.objects.count()) io.seek(0) self.assertEquals(expected, io.read())
def test_log_group_updated_added_event(self): """Check logging signal for newly created group""" # add own root handler to catch group signals output out = StringIO() handler = logging.StreamHandler(out) logging.root.addHandler(handler) # now create group, this should raise new message inside # our logger output file group = Group(title='Demo Group') group.save() # check output file content out.seek(0) self.assertEqual(out.readlines()[-1], "Group created: Demo Group (ID: %d)\n" % group.id) # now update existing student and check last line in out group.notes = 'some group note' group.save() out.seek(0) self.assertEqual(out.readlines()[-1], "Group updated: Demo Group (ID: %d)\n" % group.id) # remove our handler from root logger logging.root.removeHandler(handler)
def connect(self): try: self.accept() query_string = self.scope['query_string'] connet_argv = QueryDict(query_string=query_string, encoding='utf-8') unique = connet_argv.get('unique') width = connet_argv.get('width') height = connet_argv.get('height') width = int(width) height = int(height) connect_info = models.HostTmp.objects.get(unique=unique) host = connect_info.host port = connect_info.port user = connect_info.user auth = connect_info.auth pwd = connect_info.password pkey = connect_info.pkey connect_info.delete() if pwd: password = base64.b64decode(pwd).decode('utf-8') else: password = None self.ssh = SSH(websocker=self, message=self.message) if auth == 'key': pkey = pkey obj = StringIO() obj.write(pkey) obj.flush() obj.seek(0) self.pkey = obj self.ssh.connect(host=host, user=user, password=password, pkey=self.pkey, port=port, pty_width=width, pty_height=height) else: self.ssh.connect(host=host, user=user, password=password, port=port, pty_width=width, pty_height=height) except Exception as e: self.message['status'] = 1 self.message['message'] = str(e) message = json.dumps(self.message) self.send(message) self.close()
def test_filter_media(self): stdout = StringIO() with patch('sys.stdout', stdout): execute_from_command_line(['', 'listbackups', '--content-type', 'media', '-q']) stdout.seek(0) stdout.readline() for line in stdout.readlines(): self.assertIn('.tar', line)
def run_command(self, **options): options.setdefault('interactive', False) output = StringIO() management.call_command('fixtree', stdout=output, **options) output.seek(0) return output
def test_filter_encrypted(self): stdout = StringIO() with patch('sys.stdout', stdout): execute_from_command_line(['', 'listbackups', '--encrypted', '-q']) stdout.seek(0) stdout.readline() for line in stdout.readlines(): self.assertIn('.gpg', line)
def test_filter_media(self): stdout = StringIO() with patch('sys.stdout', stdout): execute_from_command_line(['', 'listbackups', '--content-type', 'media', '-q']) stdout.seek(0) stdout.readline() for line in stdout.readlines(): self.assertIn('.media', line)
def test_logging(self): output = StringIO() logging_view = decorate(log("Got a request", out=output), string_view) self.assertEqual("Hello World", logging_view(self.get_req)) output.seek(0) self.assertEqual("Got a request\n", output.read())
def capture_stdout(target=None): original = sys.stdout if target is None: target = StringIO() sys.stdout = target yield target target.seek(0) sys.stdout = original
def test_filter_not_compressed(self): stdout = StringIO() with patch('sys.stdout', stdout): execute_from_command_line(['', 'listbackups', '--not-compressed', '-q']) stdout.seek(0) stdout.readline() for line in stdout.readlines(): self.assertNotIn('.gz', line)
def connect(self): """ 打开 websocket 连接, 通过前端传入的参数尝试连接 ssh 主机 :return: """ self.accept() query_string = self.scope.get('query_string') ssh_args = QueryDict(query_string=query_string, encoding='utf-8') width = ssh_args.get('width') height = ssh_args.get('height') port = ssh_args.get('port') width = int(width) height = int(height) port = int(port) auth = ssh_args.get('auth') ssh_key_name = ssh_args.get('ssh_key') passwd = ssh_args.get('password') host = ssh_args.get('host') user = ssh_args.get('user') if passwd: passwd = base64.b64decode(passwd).decode('utf-8') else: passwd = None self.ssh = SSH(websocker=self, message=self.message) ssh_connect_dict = { 'host': host, 'user': user, 'port': port, 'timeout': 30, 'pty_width': width, 'pty_height': height, 'password': passwd } if auth == 'key': # ssh_key_file = os.path.join(settings.MEDIA_ROOT, ssh_key_name) # with open(ssh_key_file, 'r') as f: # ssh_key = f.read() #从数据库获取秘钥信息 ssh_key = SshUser.objects.filter(username=user).values_list( 'ssh_key', flat=True)[0] string_io = StringIO() string_io.write(ssh_key) string_io.flush() string_io.seek(0) ssh_connect_dict['ssh_key'] = string_io # os.remove(ssh_key_file) self.ssh.connect(**ssh_connect_dict)
def test_compile_all(settings): out = StringIO() management.call_command('compilejsi18n', verbosity=1, stdout=out) out.seek(0) lines = [l.strip() for l in out.readlines()] assert len(lines) == len(settings.LANGUAGES) for locale, _ in settings.LANGUAGES: assert "processing language %s" % to_locale(locale) in lines
def connect(self): """ 打开 websocket 连接, 通过前端传入的参数尝试连接 ssh 主机 :return: """ self.accept() query_string = self.scope.get('query_string') ssh_args = QueryDict(query_string=query_string, encoding='utf-8') width = ssh_args.get('width') height = ssh_args.get('height') port = ssh_args.get('port') width = int(width) height = int(height) port = int(port) auth = ssh_args.get('auth') ssh_key_name = ssh_args.get('ssh_key') passwd = ssh_args.get('password') host = ssh_args.get('host') user = ssh_args.get('user') terminal_id = ssh_args.get("terminal_id") if passwd: passwd = base64.b64decode(passwd).decode('utf-8') else: passwd = None self.ssh = SSH(websocker=self, message=self.message) ssh_connect_dict = { 'host': host, 'user': user, 'port': port, 'timeout': 30, 'pty_width': width, 'pty_height': height, 'password': passwd, 'terminal_id': terminal_id } if auth == 'key': ssh_key_file = os.path.join(TMP_DIR, ssh_key_name) with open(ssh_key_file, 'r') as f: ssh_key = f.read() string_io = StringIO() string_io.write(ssh_key) string_io.flush() string_io.seek(0) ssh_connect_dict['ssh_key'] = string_io os.remove(ssh_key_file) self.ssh.connect(**ssh_connect_dict)
def test_obtain_token_command_should_produce_valid_token(monkeypatch, user): output = StringIO() monkeypatch.setattr(settings, "DEBUG", True) call_command('obtain_token', str(user.pk), stdout=output) output.seek(0) printed_token = output.read() payload = JSONWebTokenAuthentication.jwt_decode_token( printed_token.strip().encode()) assert payload['user_id'] == user.pk
def dump_model_data(self): # Make sure to remove the model from the app cache because we're # actually testing it's correctly loaded. output = StringIO() remove_from_app_cache(self.model_cls) call_command( 'dumpdata', str(self.model_def), stdout=output, commit=False ) output.seek(0) return json.load(output)
def test_stringio(self): # Test passing StringIO instance as content argument to save output = StringIO() output.write('content') output.seek(0) # Save it and read written file temp_storage.save('tests/stringio', output) self.assertTrue(temp_storage.exists('tests/stringio')) with temp_storage.open('tests/stringio') as f: self.assertEqual(f.read(), b'content')
def test_invalid_tenant_user_model_format(self): stream = StringIO() handler = logging.StreamHandler(stream) logger.addHandler(handler) with self.settings(TENANCY_TENANT_MODEL='invalid'): pass logger.removeHandler(handler) stream.seek(0) self.assertIn( "TENANCY_TENANT_MODEL must be of the form 'app_label.model_name'", stream.read())
def load_fixtures(fixtures): stream = StringIO() error_stream = StringIO() call_command( "loaddata", *fixtures, **{"stdout": stream, "stderr": error_stream, "ignore": True, "database": DEFAULT_DB_ALIAS, "verbosity": 1} ) stream.seek(0) result = stream.read() return int(re.match(r"Installed\s([0-9]+)\s.*", result).groups()[0])
def test_invalid_tenant_user_model_format(self): stream = StringIO() handler = logging.StreamHandler(stream) logger.addHandler(handler) with self.settings(TENANCY_TENANT_MODEL='invalid'): pass logger.removeHandler(handler) stream.seek(0) self.assertIn( "TENANCY_TENANT_MODEL must be of the form 'app_label.model_name'", stream.read() )
def load_fixtures(fixtures): stream = StringIO() error_stream = StringIO() call_command('loaddata', *fixtures, **{ 'stdout': stream, 'stderr': error_stream, 'ignore': True, 'database': DEFAULT_DB_ALIAS, 'verbosity': 1 }) stream.seek(0) result = stream.read() return int(re.match(r'Installed\s([0-9]+)\s.*', result).groups()[0])
def test_compile_with_output_format(settings, locale, output_format): out = StringIO() management.call_command('compilejsi18n', verbosity=1, stdout=out, locale=locale, outputformat=output_format) out.seek(0) lines = [l.strip() for l in out.readlines()] assert len(lines) == 1 assert lines[0] == "processing language %s" % to_locale(locale) assert os.path.exists( os.path.join(settings.STATIC_ROOT, "jsi18n", locale, "djangojs.%s" % output_format))
def test_verbosity(self): stdout = StringIO() call_command('createtenant', 'tenant', stdout=stdout, verbosity=3) tenant = Tenant.objects.get(name='tenant') stdout.seek(0) connection = connections[tenant._state.db] try: if connection.vendor == 'postgresql': self.assertIn(tenant.db_schema, stdout.readline()) for model in TenantModelBase.references: self.assertIn(model._meta.object_name, stdout.readline()) self.assertIn(model._meta.db_table, stdout.readline()) self.assertIn('Installing indexes ...', stdout.readline()) finally: tenant.delete()
def connect(self): self.accept() query_string = self.scope.get('query_string') # print(query_string) ssh_args = QueryDict(query_string=query_string, encoding='utf-8') width = int(ssh_args.get('width')) height = int(ssh_args.get('height')) port = int(ssh_args.get('port')) auth = ssh_args.get('auth') ssh_key_name = ssh_args.get('ssh_key') passwd = ssh_args.get('password') host = ssh_args.get('host') user = ssh_args.get('user') if passwd: passwd = base64.b64decode(passwd).decode('utf-8') # password = password else: passwd = None self.ssh = SSH(websocket=self, message=self.message) ssh_connect_dict = { 'host': host, 'user': user, 'port': port, 'timeout': 30, 'pty_width': width, 'pty_height': height, 'password': passwd } if auth == 'key': ssh_key_file = os.path.join(TMP_DIR, ssh_key_name) with open(ssh_key_file, 'r') as fp: ssh_key = fp.read() string_io = StringIO() string_io.write(ssh_key) string_io.flush() string_io.seek() ssh_connect_dict['ssh_key'] = string_io os.remove(ssh_key_file) # print(ssh_connect_dict) self.ssh.connect(**ssh_connect_dict)
class InvalidModelTestCase(unittest.TestCase): """Import an appliation with invalid models and test the exceptions.""" def setUp(self): # Make sure sys.stdout is not a tty so that we get errors without # coloring attached (makes matching the results easier). We restore # sys.stderr afterwards. self.old_stdout = sys.stdout self.stdout = StringIO() sys.stdout = self.stdout # This test adds dummy applications to the app cache. These # need to be removed in order to prevent bad interactions # with the flush operation in other tests. self.old_app_models = copy.deepcopy(cache.app_models) self.old_app_store = copy.deepcopy(cache.app_store) def tearDown(self): cache.app_models = self.old_app_models cache.app_store = self.old_app_store cache._get_models_cache = {} sys.stdout = self.old_stdout # Technically, this isn't an override -- TEST_SWAPPED_MODEL must be # set to *something* in order for the test to work. However, it's # easier to set this up as an override than to require every developer # to specify a value in their test settings. @override_settings( TEST_SWAPPED_MODEL='invalid_models.Target', TEST_SWAPPED_MODEL_BAD_VALUE='not-a-model', TEST_SWAPPED_MODEL_BAD_MODEL='not_an_app.Target', ) def test_invalid_models(self): try: module = load_app("modeltests.invalid_models.invalid_models") except Exception: self.fail('Unable to load invalid model module') get_validation_errors(self.stdout, module) self.stdout.seek(0) error_log = self.stdout.read() actual = error_log.split('\n') expected = module.model_errors.split('\n') unexpected = [err for err in actual if err not in expected] missing = [err for err in expected if err not in actual] self.assertFalse(unexpected, "Unexpected Errors: " + '\n'.join(unexpected)) self.assertFalse(missing, "Missing Errors: " + '\n'.join(missing))
class InvalidModelTestCase(unittest.TestCase): """Import an appliation with invalid models and test the exceptions.""" def setUp(self): # Make sure sys.stdout is not a tty so that we get errors without # coloring attached (makes matching the results easier). We restore # sys.stderr afterwards. self.old_stdout = sys.stdout self.stdout = StringIO() sys.stdout = self.stdout # This test adds dummy applications to the app cache. These # need to be removed in order to prevent bad interactions # with the flush operation in other tests. self.old_app_models = copy.deepcopy(cache.app_models) self.old_app_store = copy.deepcopy(cache.app_store) def tearDown(self): cache.app_models = self.old_app_models cache.app_store = self.old_app_store cache._get_models_cache = {} sys.stdout = self.old_stdout # Technically, this isn't an override -- TEST_SWAPPED_MODEL must be # set to *something* in order for the test to work. However, it's # easier to set this up as an override than to require every developer # to specify a value in their test settings. @override_settings( TEST_SWAPPED_MODEL='invalid_models.ReplacementModel', TEST_SWAPPED_MODEL_BAD_VALUE='not-a-model', TEST_SWAPPED_MODEL_BAD_MODEL='not_an_app.Target', ) def test_invalid_models(self): try: module = load_app("modeltests.invalid_models.invalid_models") except Exception: self.fail('Unable to load invalid model module') get_validation_errors(self.stdout, module) self.stdout.seek(0) error_log = self.stdout.read() actual = error_log.split('\n') expected = module.model_errors.split('\n') unexpected = [err for err in actual if err not in expected] missing = [err for err in expected if err not in actual] self.assertFalse(unexpected, "Unexpected Errors: " + '\n'.join(unexpected)) self.assertFalse(missing, "Missing Errors: " + '\n'.join(missing))
def webhook_pull(request, remote='origin'): if request.method == 'POST': try: log = Git().pull(remote) s = StringIO() call_command('sync_waliki', stdout=s) s.seek(0) r = {'pull': log, 'sync': s.read()} status_code = 200 except Exception as e: r = {'error': text_type(e)} status_code = 500 return HttpResponse(json.dumps(r), status=status_code, content_type="application/json") return HttpResponse("POST to %s" % reverse("waliki_webhook_pull", args=(remote,)))
def test_verbosity(self): stdout = StringIO() call_command('createtenant', 'tenant', stdout=stdout, verbosity=3) tenant = Tenant.objects.get(name='tenant') stdout.seek(0) connection = connections[tenant._state.db] try: if connection.vendor == 'postgresql': self.assertIn(tenant.db_schema, stdout.readline()) for model in TenantModelBase.references: self.assertIn(model._meta.object_name, stdout.readline()) if not model._meta.proxy: self.assertIn(model._meta.db_table, stdout.readline()) self.assertIn('Installing indexes ...', stdout.readline()) finally: tenant.delete()
def validate(self, app=None, display_num_errors=False): """ Validates the given app, raising CommandError for any errors. If app is None, then this will validate all installed apps. """ from django.core.management.validation import get_validation_errors s = StringIO() num_errors = get_validation_errors(s, app) if num_errors: s.seek(0) error_text = s.read() raise CommandError("One or more models did not validate:\n%s" % error_text) if display_num_errors: self.stdout.write("%s error%s found" % (num_errors, '' if num_errors == 1 else 's'))
def validate(self, app_config=None, display_num_errors=False): """ Validates the given app, raising CommandError for any errors. If app_config is None, then this will validate all installed apps. """ from django.core.management.validation import get_validation_errors s = StringIO() num_errors = get_validation_errors(s, app_config) if num_errors: s.seek(0) error_text = s.read() raise CommandError("One or more models did not validate:\n%s" % error_text) if display_num_errors: self.stdout.write("%s error%s found" % (num_errors, '' if num_errors == 1 else 's'))
def connect(self): self.accept() print(self.user) # user = SshUser.objects.get(username =self.user) user = User.objects.filter(username=self.user).first() # print(user) sshuser = SshUser.objects.filter(username_id=user.id).first() # print(sshuser) try: self.ssh.load_system_host_keys() self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) if sshuser.is_key: string_io = StringIO() string_io.write(sshuser.ssh_key) string_io.flush() string_io.seek(0) ssh_key = string_io passwd = sshuser.password if passwd: password = base64.b64decode(passwd).decode('utf-8') else: password = None key = get_key_obj(paramiko.RSAKey, pkey_obj=ssh_key, password=password) or \ get_key_obj(paramiko.DSSKey, pkey_obj=ssh_key, password=password) or \ get_key_obj(paramiko.ECDSAKey, pkey_obj=ssh_key, password=password) or \ get_key_obj(paramiko.Ed25519Key, pkey_obj=ssh_key, password=password) self.ssh.connect(username=user.username, hostname=self.ssh_server_ip, port=22, pkey=key,timeout=5) else: self.ssh.connect(username=user.username, hostname=self.ssh_server_ip, port=22, password=user.password,timeout=5) transport = self.ssh.get_transport() self.chan = transport.open_session() self.chan.get_pty(term='xterm', width=1400, height=self.height) self.chan.invoke_shell() # 设置如果3分钟没有任何输入,就断开连接 self.chan.settimeout(60 * 3) except Exception as e: print('用户{}通过webssh连接{}失败!原因:{}'.format(user.username, self.ssh_server_ip, e)) self.send('用户{}通过webssh连接{}失败!原因:{}'.format(user.username, self.ssh_server_ip, e)) self.close() self.t1.setDaemon(True) self.t1.start()
def test_compile(settings, locale): out = StringIO() management.call_command('compilejsi18n', verbosity=1, stdout=out, locale=to_locale(locale)) out.seek(0) lines = [l.strip() for l in out.readlines()] assert len(lines) == 1 assert lines[0] == "processing language %s" % to_locale(locale) filename = os.path.join(settings.STATICI18N_ROOT, "jsi18n", to_locale(locale), "djangojs.js") assert os.path.exists(filename) with io.open(filename, "r", encoding="utf-8") as fp: content = fp.read() assert LOCALIZED_CONTENT[locale] in content
def test_upload_file_valid(self): """Test upload of an ABC file.""" from django.contrib.auth.models import User from django.utils.six import StringIO self.client.force_login(User.objects.get(username='******')) file = StringIO('X:2\nT:Fragment\nK:Bb\nbdfdbdfd|b8||\n\n') response = self.client.post('/upload/', {'file': file}) self.assertContains(response, 'processing complete.') self.assertContains(response, '1 new song') self.assertContains(response, "Adding new collection 'upload testuser") # upload it again to exercise 'existing' branches file.seek(0) response = self.client.post('/upload/', {'file': file}) self.assertContains(response, 'processing complete.') self.assertContains(response, '1 existing song') self.assertContains(response, "Adding new collection 'upload testuser")
def test_compile_with_namespace(settings, locale, namespace): out = StringIO() management.call_command('compilejsi18n', verbosity=1, stdout=out, locale=locale, outputformat='js', namespace=namespace) out.seek(0) lines = [l.strip() for l in out.readlines()] assert len(lines) == 1 assert lines[0] == "processing language %s" % to_locale(locale) file_path = os.path.join(settings.STATIC_ROOT, "jsi18n", locale, "djangojs.js") assert os.path.exists(file_path) generated_content = open(file_path).read() assert 'global.MyBlock = MyBlock;' in generated_content
def test_log_student_deleted_event(self): """Check logging signals for deleted student""" student = Student(first_name='Demo', last_name='Student') student.save() # now override signal # add own root handler to catch student signals output out = StringIO() handler = logging.StreamHandler(out) logging.root.addHandler(handler) # delete existing student and check logger output sid = student.id student.delete() out.seek(0) self.assertEqual(out.readlines()[-1], 'Student deleted: Demo Student (ID: %d)\n' % sid) # remove our handler from root logger logging.root.removeHandler(handler)
def test_feedback_applicant_by_command(self, mock_request): # PREPARE DATA self.init_mock(mock_request) # Opp requested with deadline for today opp = self.create_opportunity() # SELECTED end_date for today user = self.get_user() request_mock_account.add_mock( user, is_consultant=True, is_superuser=False) app_first = models.Applicant.objects.create_open_applicant( user, user, opp, faker.text()) sow_data = self.get_sow_data() sow_data['end_date'] = timezone.now().date() opp.assign(self.super_user, app_first, faker.text(), **sow_data) user = self.get_user() request_mock_account.add_mock( user, is_consultant=True, is_superuser=False) app = models.Applicant.objects.create_open_applicant( user, user, opp, faker.text()) sow_data = self.get_sow_data() opp.assign(self.super_user, app, faker.text(), **sow_data) out = StringIO() err = StringIO() # ACTION management.call_command( 'opportunity_feedback', stdout=out, stderr=err) print(err) # ASSERTS out.seek(0) applicant_pk = out.readline().replace('\n', '') self.assertEqual( applicant_pk, 'Completed: {}'.format(app_first.pk.__str__())) app.refresh_from_db() self.assertFalse(app.is_completed) app_first.refresh_from_db() self.assertTrue(app_first.is_completed)
def test_verbosity(self): stdout = StringIO() call_command('createtenant', 'tenant', stdout=stdout, verbosity=3) tenant = Tenant.objects.get(name='tenant') stdout.seek(0) connection = connections[tenant._state.db] try: if connection.vendor == 'postgresql': self.assertIn(tenant.db_schema, stdout.readline()) for model in TenantModelBase.references: if not model._meta.proxy and not model._meta.auto_created: self.assertIn(model._meta.object_name, stdout.readline()) self.assertIn(model._meta.db_table, stdout.readline()) for m2m in model._meta.many_to_many: through_opts = get_remote_field(m2m).through._meta if through_opts.auto_created: self.assertIn(through_opts.db_table, stdout.readline()) finally: tenant.delete()
def test_log_student_deleted_event(self): """Check logging signals for deleted student""" student = Student(first_name='Demo', last_name='Student') student.save() # now override signal # add own root handler to catch student signals output out = StringIO() handler = logging.StreamHandler(out) logging.root.addHandler(handler) # delete existing student and check logger output sid = student.id student.delete() out.seek(0) self.assertEqual(out.readlines()[-1], 'Student Demo Student deleted (ID: %d)\n' % sid) # remove our handler from root logger logging.root.removeHandler(handler)
def test_log_group_deleted_event(self): """Check logging group for deleted student""" group = Group(title='Demo Group 1') group.save() # now override signal # add own root handler to catch student signals output out = StringIO() handler = logging.StreamHandler(out) logging.root.addHandler(handler) # delete existing group and check logger output sid = group.id group.delete() out.seek(0) self.assertEqual(out.readlines()[-1], 'Group Demo Group 1 deleted (ID: %d)\n' % sid) # remove our handler from root logger logging.root.removeHandler(handler)
def test_log_student_updated_added_event(self): out = StringIO() handler = logging.StreamHandler(out) logging.root.addHandler(handler) student = Student(first_name='Demo', last_name='Student') student.save() out.seek(0) self.assertEqual(out.readlines()[-1], 'Student added: Demo Student (ID: %d)\n' % student.id) student.ticker = '12345' student.save() out.seek(0) self.assertEqual( out.readlines()[-1], 'Student updated: Demo Student (ID: %d)\n' % student.id) logging.root.removeHandler(handler)
def test_compile_no_use_i18n(settings, locale): """Tests compilation when `USE_I18N = False`. In this scenario, only the `settings.LANGUAGE_CODE` locale is processed (it defaults to `en-us` for Django projects). """ settings.USE_I18N = False out = StringIO() management.call_command('compilejsi18n', verbosity=1, stdout=out, locale=to_locale(locale)) out.seek(0) lines = [l.strip() for l in out.readlines()] assert len(lines) == 1 assert lines[0] == "processing language %s" % to_locale(locale) assert os.path.exists( os.path.join(settings.STATIC_ROOT, "jsi18n", to_locale(locale), "djangojs.js"))
def test_command_data(self): """testing result of command execute""" HttpReqFactory() stdout_cmd = StringIO() stderr_cmd = StringIO() call_command('print_objects_count', stdout=stdout_cmd, stderr=stderr_cmd) stdout_cmd.seek(0) stderr_cmd.seek(0) for app in models.get_apps(): for model in models.get_models(app): objects_count = model.objects.count() out_message = ('%s: objects: %s\n' % (model.__name__, objects_count)) error_message = ('error: %s: objects: %s\n' % (model.__name__, objects_count)) self.assertEqual(stdout_cmd.readline(), out_message) self.assertEqual(stderr_cmd.readline(), error_message)
def test_log_monthjournal_changes(self): """Test logging signal for changes in monthjournal""" # add own root handler to catch group signals output out = StringIO() handler = logging.StreamHandler(out) logging.root.addHandler(handler) student, created = Student.objects.get_or_create( first_name='Demo', last_name='Student', ticket='123') monthjournal, created = MonthJournal.objects.get_or_create( student=student, date=datetime.today()) monthjournal.present_day1 = True # check output file content out.seek(0) self.assertEqual(out.readlines()[-1], "MonthJournal updated: Demo Student (Journal ID: %d)\n" % monthjournal.id) # remove our handler from root logger logging.root.removeHandler(handler)
def test_log_group_deleted_event(self): """Check logging signal for deleted group""" # add own root handler to catch group signals output out = StringIO() handler = logging.StreamHandler(out) logging.root.addHandler(handler) # now create and delete group, this should raise new message inside # our logger output file group = Group(title='Demo Group') group.save() group_id = group.id group.delete() # check output file content out.seek(0) self.assertEqual(out.readlines()[-1], "Group deleted: Demo Group (ID: %d)\n" % group_id) # remove our handler from root logger logging.root.removeHandler(handler)
class InvalidModelTestCase(unittest.TestCase): """Import an appliation with invalid models and test the exceptions.""" def setUp(self): # Make sure sys.stdout is not a tty so that we get errors without # coloring attached (makes matching the results easier). We restore # sys.stderr afterwards. self.old_stdout = sys.stdout self.stdout = StringIO() sys.stdout = self.stdout # This test adds dummy applications to the app cache. These # need to be removed in order to prevent bad interactions # with the flush operation in other tests. self.old_app_models = copy.deepcopy(cache.app_models) self.old_app_store = copy.deepcopy(cache.app_store) def tearDown(self): cache.app_models = self.old_app_models cache.app_store = self.old_app_store cache._get_models_cache = {} sys.stdout = self.old_stdout def test_invalid_models(self): try: module = load_app("modeltests.invalid_models.invalid_models") except Exception: self.fail('Unable to load invalid model module') count = get_validation_errors(self.stdout, module) self.stdout.seek(0) error_log = self.stdout.read() actual = error_log.split('\n') expected = module.model_errors.split('\n') unexpected = [err for err in actual if err not in expected] missing = [err for err in expected if err not in actual] self.assertFalse(unexpected, "Unexpected Errors: " + '\n'.join(unexpected)) self.assertFalse(missing, "Missing Errors: " + '\n'.join(missing))
class InvalidModelTestCase(unittest.TestCase): """Import an appliation with invalid models and test the exceptions.""" def setUp(self): # Make sure sys.stdout is not a tty so that we get errors without # coloring attached (makes matching the results easier). We restore # sys.stderr afterwards. self.old_stdout = sys.stdout self.stdout = StringIO() sys.stdout = self.stdout def tearDown(self): sys.stdout = self.old_stdout # Technically, this isn't an override -- TEST_SWAPPED_MODEL must be # set to *something* in order for the test to work. However, it's # easier to set this up as an override than to require every developer # to specify a value in their test settings. @override_settings( INSTALLED_APPS=['invalid_models_tests.invalid_models'], TEST_SWAPPED_MODEL='invalid_models.ReplacementModel', TEST_SWAPPED_MODEL_BAD_VALUE='not-a-model', TEST_SWAPPED_MODEL_BAD_MODEL='not_an_app.Target', ) def test_invalid_models(self): app_config = apps.get_app_config("invalid_models") get_validation_errors(self.stdout, app_config) self.stdout.seek(0) error_log = self.stdout.read() actual = error_log.split('\n') expected = app_config.models_module.model_errors.split('\n') unexpected = [err for err in actual if err not in expected] missing = [err for err in expected if err not in actual] self.assertFalse(unexpected, "Unexpected Errors: " + '\n'.join(unexpected)) self.assertFalse(missing, "Missing Errors: " + '\n'.join(missing))
def test_log_contact_admin(self): """Test log message when mail was sent""" # add own root handler to catch group signals output out = StringIO() handler = logging.StreamHandler(out) logging.root.addHandler(handler) # prepare client and login as administrator client = Client() client.login(username='******', password='******') # make form submit client.post(reverse('contact_admin'), { 'from_email': '*****@*****.**', 'subject': 'test email', 'message': 'test email message', }) # check log message out.seek(0) self.assertIn("A message via Contact Form was sent", out.readlines()[-1]) # remove our handler from root logger logging.root.removeHandler(handler)
class DebugSQLTextTestResult(unittest.TextTestResult): def __init__(self, stream, descriptions, verbosity): self.logger = logging.getLogger('django.db.backends') self.logger.setLevel(logging.DEBUG) super(DebugSQLTextTestResult, self).__init__(stream, descriptions, verbosity) def startTest(self, test): self.debug_sql_stream = StringIO() self.handler = logging.StreamHandler(self.debug_sql_stream) self.logger.addHandler(self.handler) super(DebugSQLTextTestResult, self).startTest(test) def stopTest(self, test): super(DebugSQLTextTestResult, self).stopTest(test) self.logger.removeHandler(self.handler) if self.showAll: self.debug_sql_stream.seek(0) self.stream.write(self.debug_sql_stream.read()) self.stream.writeln(self.separator2) def addError(self, test, err): super(DebugSQLTextTestResult, self).addError(test, err) self.debug_sql_stream.seek(0) self.errors[-1] = self.errors[-1] + (self.debug_sql_stream.read(),) def addFailure(self, test, err): super(DebugSQLTextTestResult, self).addFailure(test, err) self.debug_sql_stream.seek(0) self.failures[-1] = self.failures[-1] + (self.debug_sql_stream.read(),) def printErrorList(self, flavour, errors): for test, err, sql_debug in errors: self.stream.writeln(self.separator1) self.stream.writeln("%s: %s" % (flavour, self.getDescription(test))) self.stream.writeln(self.separator2) self.stream.writeln("%s" % err) self.stream.writeln(self.separator2) self.stream.writeln("%s" % sql_debug)
def test_superuser_creation_prompt(self): stdout = StringIO() call_command('createtenant', 'tenant', stdout=stdout, interactive=True) stdout.seek(0) self.assertNotIn('Superuser created successfully.', stdout.read()) Tenant.objects.get(name='tenant').delete()
import collections