def _document_api_PUT(request, document_slug, document_locale): """ Handle PUT requests for the document_api view. """ # Try parsing one of the supported content types from the request try: content_type = request.META.get('CONTENT_TYPE', '') if content_type.startswith('application/json'): data = json.loads(request.body) elif content_type.startswith('multipart/form-data'): parser = MultiPartParser(request.META, StringIO(request.body), request.upload_handlers, request.encoding) data, files = parser.parse() elif content_type.startswith('text/html'): # TODO: Refactor this into wiki.content ? # First pass: Just assume the request body is an HTML fragment. html = request.body data = dict(content=html) # Second pass: Try parsing the body as a fuller HTML document, # and scrape out some of the interesting parts. try: doc = pq(html) head_title = doc.find('head title') if head_title.length > 0: data['title'] = head_title.text() body_content = doc.find('body') if body_content.length > 0: data['content'] = to_html(body_content) except Exception: pass else: resp = HttpResponse() resp.status_code = 400 resp.content = ugettext( "Unsupported content-type: %s") % content_type return resp except Exception as e: resp = HttpResponse() resp.status_code = 400 resp.content = ugettext("Request parsing error: %s") % e return resp try: # Look for existing document to edit: doc = Document.objects.get(locale=document_locale, slug=document_slug) section_id = request.GET.get('section', None) is_new = False # Use ETags to detect mid-air edit collision # see: http://www.w3.org/1999/04/Editing/ if_match = request.META.get('HTTP_IF_MATCH') if if_match: try: expected_etags = parse_etags(if_match) except ValueError: expected_etags = [] # Django's parse_etags returns a list of quoted rather than # un-quoted ETags starting with version 1.11. current_etag = quote_etag(calculate_etag(doc.get_html(section_id))) if current_etag not in expected_etags: resp = HttpResponse() resp.status_code = 412 resp.content = ugettext('ETag precondition failed') return resp except Document.DoesNotExist: # TODO: There should be a model utility for creating a doc... # Let's see if this slug path implies a parent... slug_parts = split_slug(document_slug) if not slug_parts['parent']: # Apparently, this is a root page! parent_doc = None else: # There's a parent implied, so make sure we can find it. parent_doc = get_object_or_404(Document, locale=document_locale, slug=slug_parts['parent']) # Create and save the new document; we'll revise it immediately. doc = Document(slug=document_slug, locale=document_locale, title=data.get('title', document_slug), parent_topic=parent_doc) doc.save() section_id = None # No section editing for new document! is_new = True new_rev = doc.revise(request.user, data, section_id) doc.schedule_rendering('max-age=0') request.authkey.log('created' if is_new else 'updated', new_rev, data.get('summary', None)) resp = HttpResponse() if is_new: resp['Location'] = request.build_absolute_uri(doc.get_absolute_url()) resp.status_code = 201 else: resp.status_code = 205 return resp
def test_no_args_provided(self): out = StringIO() call_command('init_test_data', stdout=out) self.assertIn('No load argument supplied. Use --help to get load options', out.getvalue())
def setUp(self): self.jar = Jar.objects.all()[0] self.out = StringIO()
def test_command_with_invalid_database(self): args = ['path/to/file.tab', '--database', 'INVALID'] with self.assertRaises(CommandError): call_command('load_entries', *args, stdout=StringIO())
def test_install_labels_command(self): out = StringIO() call_command('install_labels', stdout=out) self.assertIn( 'Creating unique constraint for title on label Book for class tests.someapp.models.Book', out.getvalue())
def test_required_fields_is_list(self): "REQUIRED_FIELDS should be a list." new_io = StringIO() get_validation_errors(new_io, get_app('auth')) self.assertIn("The REQUIRED_FIELDS must be a list or tuple.", new_io.getvalue())
def templatize(src, origin=None): """ Turns a Django template into something that is understood by xgettext. It does so by translating the Django translation tags into standard gettext function invocations. """ from django.template.base import (Lexer, TOKEN_TEXT, TOKEN_VAR, TOKEN_BLOCK, TOKEN_COMMENT, TRANSLATOR_COMMENT_MARK) src = force_text(src, settings.FILE_CHARSET) out = StringIO('') message_context = None intrans = False inplural = False trimmed = False singular = [] plural = [] incomment = False comment = [] lineno_comment_map = {} comment_lineno_cache = None # Adding the u prefix allows gettext to recognize the Unicode string # (#26093). raw_prefix = 'u' if six.PY3 else '' def join_tokens(tokens, trim=False): message = ''.join(tokens) if trim: message = trim_whitespace(message) return message for t in Lexer(src).tokenize(): if incomment: if t.token_type == TOKEN_BLOCK and t.contents == 'endcomment': content = ''.join(comment) translators_comment_start = None for lineno, line in enumerate(content.splitlines(True)): if line.lstrip().startswith(TRANSLATOR_COMMENT_MARK): translators_comment_start = lineno for lineno, line in enumerate(content.splitlines(True)): if translators_comment_start is not None and lineno >= translators_comment_start: out.write(' # %s' % line) else: out.write(' #\n') incomment = False comment = [] else: comment.append(t.contents) elif intrans: if t.token_type == TOKEN_BLOCK: endbmatch = endblock_re.match(t.contents) pluralmatch = plural_re.match(t.contents) if endbmatch: if inplural: if message_context: out.write( ' npgettext({p}{!r}, {p}{!r}, {p}{!r},count) '. format( message_context, join_tokens(singular, trimmed), join_tokens(plural, trimmed), p=raw_prefix, )) else: out.write( ' ngettext({p}{!r}, {p}{!r}, count) '.format( join_tokens(singular, trimmed), join_tokens(plural, trimmed), p=raw_prefix, )) for part in singular: out.write(blankout(part, 'S')) for part in plural: out.write(blankout(part, 'P')) else: if message_context: out.write(' pgettext({p}{!r}, {p}{!r}) '.format( message_context, join_tokens(singular, trimmed), p=raw_prefix, )) else: out.write(' gettext({p}{!r}) '.format( join_tokens(singular, trimmed), p=raw_prefix, )) for part in singular: out.write(blankout(part, 'S')) message_context = None intrans = False inplural = False singular = [] plural = [] elif pluralmatch: inplural = True else: filemsg = '' if origin: filemsg = 'file %s, ' % origin raise SyntaxError( "Translation blocks must not include other block tags: " "%s (%sline %d)" % (t.contents, filemsg, t.lineno)) elif t.token_type == TOKEN_VAR: if inplural: plural.append('%%(%s)s' % t.contents) else: singular.append('%%(%s)s' % t.contents) elif t.token_type == TOKEN_TEXT: contents = t.contents.replace('%', '%%') if inplural: plural.append(contents) else: singular.append(contents) else: # Handle comment tokens (`{# ... #}`) plus other constructs on # the same line: if comment_lineno_cache is not None: cur_lineno = t.lineno + t.contents.count('\n') if comment_lineno_cache == cur_lineno: if t.token_type != TOKEN_COMMENT: for c in lineno_comment_map[comment_lineno_cache]: filemsg = '' if origin: filemsg = 'file %s, ' % origin warn_msg = ( "The translator-targeted comment '%s' " "(%sline %d) was ignored, because it wasn't the last item " "on the line.") % (c, filemsg, comment_lineno_cache) warnings.warn(warn_msg, TranslatorCommentWarning) lineno_comment_map[comment_lineno_cache] = [] else: out.write( '# %s' % ' | '.join(lineno_comment_map[comment_lineno_cache])) comment_lineno_cache = None if t.token_type == TOKEN_BLOCK: imatch = inline_re.match(t.contents) bmatch = block_re.match(t.contents) cmatches = constant_re.findall(t.contents) if imatch: g = imatch.group(1) if g[0] == '"': g = g.strip('"') elif g[0] == "'": g = g.strip("'") g = g.replace('%', '%%') if imatch.group(2): # A context is provided context_match = context_re.match(imatch.group(2)) message_context = context_match.group(1) if message_context[0] == '"': message_context = message_context.strip('"') elif message_context[0] == "'": message_context = message_context.strip("'") out.write(' pgettext({p}{!r}, {p}{!r}) '.format( message_context, g, p=raw_prefix)) message_context = None else: out.write(' gettext({p}{!r}) '.format(g, p=raw_prefix)) elif bmatch: for fmatch in constant_re.findall(t.contents): out.write(' _(%s) ' % fmatch) if bmatch.group(1): # A context is provided context_match = context_re.match(bmatch.group(1)) message_context = context_match.group(1) if message_context[0] == '"': message_context = message_context.strip('"') elif message_context[0] == "'": message_context = message_context.strip("'") intrans = True inplural = False trimmed = 'trimmed' in t.split_contents() singular = [] plural = [] elif cmatches: for cmatch in cmatches: out.write(' _(%s) ' % cmatch) elif t.contents == 'comment': incomment = True else: out.write(blankout(t.contents, 'B')) elif t.token_type == TOKEN_VAR: parts = t.contents.split('|') cmatch = constant_re.match(parts[0]) if cmatch: out.write(' _(%s) ' % cmatch.group(1)) for p in parts[1:]: if p.find(':_(') >= 0: out.write(' %s ' % p.split(':', 1)[1]) else: out.write(blankout(p, 'F')) elif t.token_type == TOKEN_COMMENT: if t.contents.lstrip().startswith(TRANSLATOR_COMMENT_MARK): lineno_comment_map.setdefault(t.lineno, []).append(t.contents) comment_lineno_cache = t.lineno else: out.write(blankout(t.contents, 'X')) return out.getvalue()
def test_percent_symbol_in_po_file(self): call_command('compilemessages', locale=[self.LOCALE], stdout=StringIO()) self.assertTrue(os.path.exists(self.MO_FILE))
def test_spawn_appserver(self, manage_instance_services): """ Provision an instance and spawn an AppServer, complete with custom theme (colors) """ # Mock the execution of the manage_instance_services playbook as the celery workers aren't # set up in the playbook used for setting up the instance for this test. manage_instance_services.return_value = True OpenEdXInstanceFactory( name='Integration - test_spawn_appserver', deploy_simpletheme=True, static_content_overrides={ 'version': 0, 'static_template_about_content': 'Hello world!', 'homepage_overlay_html': '<h1>Welcome to the LMS!</h1>', }, ) instance = OpenEdXInstance.objects.get() # Add an lms user, as happens with beta registration user, _ = get_user_model().objects.get_or_create( username='******', email='*****@*****.**') instance.lms_users.add(user) # Create user profile and update user model from db UserProfile.objects.create( user=user, full_name="Test user 1", accepted_privacy_policy=datetime.now(), accept_domain_condition=True, subscribe_to_updates=True, ) user.refresh_from_db() # Simulate that the application form was filled. This doesn't create another instance nor user application = BetaTestApplication.objects.create( user=user, subdomain='betatestdomain', instance_name=instance.name, public_contact_email='*****@*****.**', project_description='I want to beta test OpenCraft IM', status=BetaTestApplication.PENDING, # The presence of these colors will be checked later # Note: avoid string like #ffbb66 because it would be shortened to #fb6 and therefore # much harder to detect ("#ffbb66" wouldn't appear in CSS). Use e.g. #ffbb67 main_color='#13709b', link_color='#14719c', header_bg_color='#ffbb67', footer_bg_color='#ddff89', instance=instance, ) deployment = OpenEdXDeployment.objects.create( instance_id=instance.ref.id, creator=user.profile, type=DeploymentType.user.name, changes=None, ) # We don't want to simulate e-mail verification of the user who submitted the application, # because that would start provisioning. Instead, we provision ourselves here. spawn_appserver( instance.ref.pk, mark_active_on_success=True, num_attempts=2, deployment_id=deployment.id, target_count=1, old_server_ids=[], ) self.assert_server_ready(instance) self.assert_instance_up(instance) self.assert_bucket_configured(instance) self.assert_appserver_firewalled(instance) self.assertTrue(instance.successfully_provisioned) for appserver in instance.appserver_set.all(): self.assert_secret_keys(instance, appserver) self.assert_lms_users_provisioned(user, appserver) self.assert_theme_provisioned(instance, appserver, application) self.assert_static_content_overrides_work(instance, appserver, page='about') self.assert_load_balanced_domains(instance) # Test external databases if settings.DEFAULT_INSTANCE_MYSQL_URL and settings.DEFAULT_INSTANCE_MONGO_URL: self.assertFalse(instance.require_user_creation_success()) self.assert_mysql_db_provisioned(instance) self.assert_mongo_db_provisioned(instance) # Test activity CSV # Run the management command and collect the CSV from stdout. out = StringIO() call_command('activity_csv', stdout=out) out_lines = out.getvalue().split('\r\n') # The output should look similar to this when one instance is launched: # # "Appserver IP","Internal LMS Domain","Name","Contact Email","Unique Hits","Total Users","Total Courses", # "Age (Days)" # "213.32.77.49","test.example.com","Instance","*****@*****.**","87","6","1",1 self.assertEqual( '"Appserver IP","Internal LMS Domain","Name","Contact Email","Unique Hits","Total Users","Total Courses",' '"Age (Days)"', out_lines[0]) self.assertIn('"Integration - test_spawn_appserver"', out_lines[1]) self.assertIn('"*****@*****.**"', out_lines[1]) self.assertNotIn('N/A', out_lines[1]) # stdout should contain 3 lines (as opposed to 2) to account for the last newline. self.assertEqual(len(out_lines), 3)
def test_one_locale_excluded(self): call_command('compilemessages', exclude=['it'], stdout=StringIO()) self.assertTrue(os.path.exists(self.MO_FILE % 'en')) self.assertTrue(os.path.exists(self.MO_FILE % 'fr')) self.assertFalse(os.path.exists(self.MO_FILE % 'it'))
def test_error_reported_by_msgfmt(self): with self.assertRaises(CommandError): call_command('compilemessages', locale=[self.LOCALE], stdout=StringIO())
def test_one_locale(self): with override_settings( LOCALE_PATHS=(os.path.join(self.test_dir, 'locale'), )): call_command('compilemessages', locale=['hr'], stdout=StringIO()) self.assertTrue(os.path.exists(self.MO_FILE_HR))
def test_update_index_command(self): # Reset the index, this should clear out the index self.reset_index() # Give Elasticsearch some time to catch up... time.sleep(1) results = self.backend.search(None, models.SearchTest) self.assertEqual(set(results), set()) # Run update_index command with self.ignore_deprecation_warnings(): # ignore any DeprecationWarnings thrown by models with old-style indexed_fields definitions management.call_command( 'update_index', backend_name=self.backend_name, interactive=False, stdout=StringIO() ) results = self.backend.search(None, models.SearchTest) self.assertEqual(set(results), {self.testa, self.testb, self.testc.searchtest_ptr, self.testd.searchtest_ptr})
def templatize(src, origin=None): """ Turns a Django template into something that is understood by xgettext. It does so by translating the Django translation tags into standard gettext function invocations. """ from django.conf import settings from django.template import (Lexer, TOKEN_TEXT, TOKEN_VAR, TOKEN_BLOCK, TOKEN_COMMENT, TRANSLATOR_COMMENT_MARK) src = force_text(src, settings.FILE_CHARSET) out = StringIO() message_context = None intrans = False inplural = False singular = [] plural = [] incomment = False comment = [] for t in Lexer(src, origin).tokenize(): if incomment: if t.token_type == TOKEN_BLOCK and t.contents == 'endcomment': content = ''.join(comment) translators_comment_start = None for lineno, line in enumerate(content.splitlines(True)): if line.lstrip().startswith(TRANSLATOR_COMMENT_MARK): translators_comment_start = lineno for lineno, line in enumerate(content.splitlines(True)): if translators_comment_start is not None and lineno >= translators_comment_start: out.write(' # %s' % line) else: out.write(' #\n') incomment = False comment = [] else: comment.append(t.contents) elif intrans: if t.token_type == TOKEN_BLOCK: endbmatch = endblock_re.match(t.contents) pluralmatch = plural_re.match(t.contents) if endbmatch: if inplural: if message_context: out.write(' npgettext(%r, %r, %r,count) ' % (message_context, ''.join(singular), ''.join(plural))) else: out.write(' ngettext(%r, %r, count) ' % (''.join(singular), ''.join(plural))) for part in singular: out.write(blankout(part, 'S')) for part in plural: out.write(blankout(part, 'P')) else: if message_context: out.write(' pgettext(%r, %r) ' % (message_context, ''.join(singular))) else: out.write(' gettext(%r) ' % ''.join(singular)) for part in singular: out.write(blankout(part, 'S')) message_context = None intrans = False inplural = False singular = [] plural = [] elif pluralmatch: inplural = True else: filemsg = '' if origin: filemsg = 'file %s, ' % origin raise SyntaxError( "Translation blocks must not include other block tags: %s (%sline %d)" % (t.contents, filemsg, t.lineno)) elif t.token_type == TOKEN_VAR: if inplural: plural.append('%%(%s)s' % t.contents) else: singular.append('%%(%s)s' % t.contents) elif t.token_type == TOKEN_TEXT: contents = one_percent_re.sub('%%', t.contents) if inplural: plural.append(contents) else: singular.append(contents) else: if t.token_type == TOKEN_BLOCK: imatch = inline_re.match(t.contents) bmatch = block_re.match(t.contents) cmatches = constant_re.findall(t.contents) if imatch: g = imatch.group(1) if g[0] == '"': g = g.strip('"') elif g[0] == "'": g = g.strip("'") g = one_percent_re.sub('%%', g) if imatch.group(2): # A context is provided context_match = context_re.match(imatch.group(2)) message_context = context_match.group(1) if message_context[0] == '"': message_context = message_context.strip('"') elif message_context[0] == "'": message_context = message_context.strip("'") out.write(' pgettext(%r, %r) ' % (message_context, g)) message_context = None else: out.write(' gettext(%r) ' % g) elif bmatch: for fmatch in constant_re.findall(t.contents): out.write(' _(%s) ' % fmatch) if bmatch.group(1): # A context is provided context_match = context_re.match(bmatch.group(1)) message_context = context_match.group(1) if message_context[0] == '"': message_context = message_context.strip('"') elif message_context[0] == "'": message_context = message_context.strip("'") intrans = True inplural = False singular = [] plural = [] elif cmatches: for cmatch in cmatches: out.write(' _(%s) ' % cmatch) elif t.contents == 'comment': incomment = True else: out.write(blankout(t.contents, 'B')) elif t.token_type == TOKEN_VAR: parts = t.contents.split('|') cmatch = constant_re.match(parts[0]) if cmatch: out.write(' _(%s) ' % cmatch.group(1)) for p in parts[1:]: if p.find(':_(') >= 0: out.write(' %s ' % p.split(':', 1)[1]) else: out.write(blankout(p, 'F')) elif t.token_type == TOKEN_COMMENT: out.write(' # %s' % t.contents) else: out.write(blankout(t.contents, 'X')) return force_str(out.getvalue())
def download(modeladmin, request, selected): buf = StringIO('This is the content of the file') return StreamingHttpResponse(FileWrapper(buf))
def test_user_doesnt_exist(db): with pytest.raises(CommandError) as excinfo: call_command('ihavepower', 'fordprefect', stdout=StringIO()) assert str(excinfo.value) == 'User fordprefect does not exist.'
def test_import_stops_txt_none(self): stops_txt = StringIO("""\ stop_id,stop_name,stop_desc,stop_lat,stop_lon """) Stop.import_txt(stops_txt, self.feed) self.assertFalse(Stop.objects.exists())
def setUp(self): super(ConsoleBackendTests, self).setUp() self.__stdout = sys.stdout self.stream = sys.stdout = StringIO()
def setUp(self): self.user = models.User.objects.create_user(username='******', password='******') self.stdout = StringIO() self.stderr = StringIO()
def flush_mailbox(self): self.stream = sys.stdout = StringIO()
def test_command_without_args(self): with self.assertRaises(CommandError): call_command('load_entries', stdout=StringIO())
def connect(self): """ 打开 websocket 连接, 通过前端传入的参数尝试连接 ssh 主机 :return: """ self.accept() async_to_sync(self.channel_layer.group_add)(self.group, self.channel_name) # 加入组 self.start_time = timezone.now() self.session = self.scope.get('session', None) if not self.session.get('islogin', None): # 未登录直接断开 websocket 连接 self.message['status'] = 2 self.message['message'] = 'You are not login in...' message = json.dumps(self.message) if self.send_flag == 0: self.send(message) elif self.send_flag == 1: async_to_sync(self.channel_layer.group_send)(self.group, { "type": "chat.message", "text": message, }) self.close(3001) self.check_login() query_string = self.scope.get('query_string').decode() ssh_args = QueryDict(query_string=query_string, encoding='utf-8') width = ssh_args.get('width') height = ssh_args.get('height') width = int(width) height = int(height) auth = None ssh_key_name = '123456' hostid = int(ssh_args.get('hostid')) try: if not self.session['issuperuser']: # 普通用户判断是否有相关主机或者权限 hosts = RemoteUserBindHost.objects.filter( Q(id=hostid), Q(user__username=self.session['username']) | Q(group__user__username=self.session['username']), ).distinct() if not hosts: self.message['status'] = 2 self.message['message'] = 'Host is not exist...' message = json.dumps(self.message) if self.send_flag == 0: self.send(message) elif self.send_flag == 1: async_to_sync(self.channel_layer.group_send)( self.group, { "type": "chat.message", "text": message, }) self.close(3001) self.remote_host = RemoteUserBindHost.objects.get(id=hostid) if not self.remote_host.enabled: try: self.message['status'] = 2 self.message['message'] = 'Host is disabled...' message = json.dumps(self.message) if self.send_flag == 0: self.send(message) elif self.send_flag == 1: async_to_sync(self.channel_layer.group_send)( self.group, { "type": "chat.message", "text": message, }) self.close(3001) except BaseException: pass except BaseException: self.message['status'] = 2 self.message['message'] = 'Host is not exist...' message = json.dumps(self.message) if self.send_flag == 0: self.send(message) elif self.send_flag == 1: async_to_sync(self.channel_layer.group_send)(self.group, { "type": "chat.message", "text": message, }) self.close(3001) host = self.remote_host.ip port = self.remote_host.port user = self.remote_host.remote_user.username passwd = self.remote_host.remote_user.password timeout = 15 self.ssh = SSH(websocker=self, message=self.message) ssh_connect_dict = { 'host': host, 'user': user, 'port': port, 'timeout': timeout, 'pty_width': width, 'pty_height': height, 'password': passwd, } if auth == 'key': ssh_key_file = os.path.join(TMP_DIR, ssh_key_name) with open(ssh_key_file, 'r') as f: ssh_key = f.read() string_io = StringIO() string_io.write(ssh_key) string_io.flush() string_io.seek(0) ssh_connect_dict['ssh_key'] = string_io os.remove(ssh_key_file) self.ssh.connect(**ssh_connect_dict) if self.remote_host.remote_user.enabled: if self.session.get('issuperuser', None): # 超级管理员才能使用 su 跳转功能 if self.remote_host.remote_user.superusername: self.ssh.su_root( self.remote_host.remote_user.superusername, self.remote_host.remote_user.superpassword, 0.3, ) data = { 'name': self.channel_name, 'group': self.group, 'user': self.session.get('username'), 'host': host, 'username': user, 'protocol': self.remote_host.protocol, 'port': port, 'type': 1, # 1 webssh } TerminalSession.objects.create(**data)
def test_clear_neo4j_command(self): out = StringIO() call_command('clear_neo4j', stdout=out) self.assertIn('Done', out.getvalue())
def test_one_locale_excluded(self): management.call_command('makemessages', exclude=['it'], stdout=StringIO()) self.assertRecentlyModified(self.PO_FILE % 'en') self.assertRecentlyModified(self.PO_FILE % 'fr') self.assertNotRecentlyModified(self.PO_FILE % 'it')
def _get_pk_values(serial_str): ret_list = [] stream = StringIO(serial_str) for obj_dict in yaml.safe_load(stream): ret_list.append(obj_dict["pk"]) return ret_list
def test_multiple_locales_excluded_with_locale(self): management.call_command('makemessages', locale=['en', 'fr', 'it'], exclude=['fr', 'it'], stdout=StringIO()) self.assertRecentlyModified(self.PO_FILE % 'en') self.assertNotRecentlyModified(self.PO_FILE % 'fr') self.assertNotRecentlyModified(self.PO_FILE % 'it')
def test_management_command(self): shp_file = os.path.join(TEST_DATA, 'cities', 'cities.shp') out = StringIO() call_command('ogrinspect', shp_file, 'City', stdout=out) output = out.getvalue() self.assertIn('class City(models.Model):', output)
def test_refresh_tokens_command(self): """Test the refresh_tokens command.""" out = StringIO() management.call_command('refresh_tokens', stdout=out) self.assertIn('Successfully refreshed 0 tokens', out.getvalue()) self.fbuser.expires_at -= 300 self.fbuser.save() out = StringIO() with requests_mock.mock() as m: m.post(FitbitOauth2Client.refresh_token_url, text=json.dumps({ 'access_token': 'fake_access_token', 'refresh_token': 'fake_refresh_token', 'expires_at': time.time() + 300, })) management.call_command('refresh_tokens', stdout=out) self.fbuser = UserFitbit.objects.get() self.assertIn('Successfully refreshed 1 tokens', out.getvalue()) self.assertEqual('fake_access_token', self.fbuser.access_token) self.assertEqual('fake_refresh_token', self.fbuser.refresh_token) self.assertTrue(self.fbuser.expires_at > time.time()) out = StringIO() with requests_mock.mock() as m: m.post(FitbitOauth2Client.refresh_token_url, text=json.dumps({ 'access_token': 'fake_access_token2', 'refresh_token': 'fake_refresh_token2', 'expires_at': time.time() + 300, })) management.call_command('refresh_tokens', all=True, stdout=out) self.fbuser = UserFitbit.objects.get() self.assertIn('Successfully refreshed 1 tokens', out.getvalue()) self.assertEqual('fake_access_token2', self.fbuser.access_token) self.assertEqual('fake_refresh_token2', self.fbuser.refresh_token) self.assertTrue(self.fbuser.expires_at > time.time()) out = StringIO() with requests_mock.mock() as m: m.post(FitbitOauth2Client.refresh_token_url, text=json.dumps({ 'errors': [{ 'errorType': 'invalid_grant' }], })) management.call_command('refresh_tokens', all=True, stdout=out) self.assertIn('Successfully refreshed 0 tokens', out.getvalue()) self.assertIn('Failed to refresh 1 tokens', out.getvalue()) out = StringIO() with requests_mock.mock() as m: m.post(FitbitOauth2Client.refresh_token_url, text=json.dumps({ 'errors': [{ 'errorType': 'invalid_grant' }], })) management.call_command('refresh_tokens', all=True, deauth=True, stdout=out) self.assertIn('Successfully refreshed 0 tokens', out.getvalue()) self.assertIn('Failed to refresh 1 tokens', out.getvalue()) self.assertIn('Deauthenticated 1 users', out.getvalue()) self.assertEqual(0, UserFitbit.objects.count())
def setUp(self): self.out = StringIO()
def test_warnings_capture_debug_false(self): output = StringIO() self.logger.handlers[0].stream = output warnings.warn('Foo Deprecated', DeprecationWarning) self.assertFalse('Foo Deprecated' in output.getvalue())