def GoToReferences_test( self ): filepath = self._PathToTestFile( 'test.ts' ) contents = ReadFile( filepath ) event_data = self._BuildRequest( filepath = filepath, filetype = 'typescript', contents = contents, event_name = 'BufferVisit' ) self._app.post_json( '/event_notification', event_data ) references_data = self._BuildRequest( completer_target = 'filetype_default', command_arguments = [ 'GoToReferences' ], line_num = 28, column_num = 6, contents = contents, filetype = 'typescript', filepath = filepath ) expected = contains_inanyorder( has_entries( { 'description': 'var bar = new Bar();', 'line_num' : 28, 'column_num' : 5 } ), has_entries( { 'description': 'bar.testMethod();', 'line_num' : 29, 'column_num' : 1 } ) ) actual = self._app.post_json( '/run_completer_command', references_data ).json assert_that( actual, expected )
def DebugInfo_ServerIsRunning_test( app ): filepath = PathToTestFile( 'testy', 'Program.cs' ) contents = ReadFile( filepath ) event_data = BuildRequest( filepath = filepath, filetype = 'cs', contents = contents, event_name = 'FileReadyToParse' ) app.post_json( '/event_notification', event_data ) WaitUntilCompleterServerReady( app, 'cs' ) request_data = BuildRequest( filepath = filepath, filetype = 'cs' ) assert_that( app.post_json( '/debug_info', request_data ).json, has_entry( 'completer', has_entries( { 'name': 'C#', 'servers': contains( has_entries( { 'name': 'OmniSharp', 'is_running': True, 'executable': instance_of( str ), 'pid': instance_of( int ), 'address': instance_of( str ), 'port': instance_of( int ), 'logfiles': contains( instance_of( str ), instance_of( str ) ), 'extras': contains( has_entries( { 'key': 'solution', 'value': instance_of( str ) } ) ) } ) ), 'items': empty() } ) ) )
def Diagnostics_SimpleLocationExtent_test(app): contents = """ void foo() { baz = 5; } // Padding to 5 lines // Padding to 5 lines """ event_data = BuildRequest( compilation_flags=["-x", "c++"], event_name="FileReadyToParse", contents=contents, filetype="cpp" ) results = app.post_json("/event_notification", event_data).json assert_that( results, contains( has_entries( { "location_extent": has_entries( { "start": has_entries({"line_num": 3, "column_num": 3}), "end": has_entries({"line_num": 3, "column_num": 6}), } ) } ) ), )
def GetCompletions_NonForcedReturnsNoResults_test( app ): filepath = PathToTestFile( 'testy', 'ContinuousTest.cs' ) with WrapOmniSharpServer( app, filepath ): contents = ReadFile( filepath ) event_data = BuildRequest( filepath = filepath, filetype = 'cs', contents = contents, event_name = 'FileReadyToParse' ) app.post_json( '/event_notification', event_data ) completion_data = BuildRequest( filepath = filepath, filetype = 'cs', contents = contents, line_num = 9, column_num = 21, force_semantic = False, query = 'Date' ) results = app.post_json( '/completions', completion_data ).json # There are no semantic completions. However, we fall back to identifier # completer in this case. assert_that( results, has_entries( { 'completions': has_item( has_entries( { 'insertion_text' : 'String', 'extra_menu_info': '[ID]', } ) ), 'errors': empty(), } ) )
def EventNotification_FileReadyToParse_TagFiles_UnicodeWorkingDirectory_test( ycm, *args ): unicode_dir = PathToTestFile( 'uni¢𐍈d€' ) current_buffer_file = PathToTestFile( 'uni¢𐍈d€', 'current_buffer' ) current_buffer = VimBuffer( name = current_buffer_file, contents = [ 'current_buffer_contents' ], filetype = 'some_filetype' ) with patch( 'ycm.client.event_notification.EventNotification.' 'PostDataToHandlerAsync' ) as post_data_to_handler_async: with CurrentWorkingDirectory( unicode_dir ): with MockVimBuffers( [ current_buffer ], [ current_buffer ], ( 1, 5 ) ): ycm.OnFileReadyToParse() assert_that( # Positional arguments passed to PostDataToHandlerAsync. post_data_to_handler_async.call_args[ 0 ], contains( has_entries( { 'filepath': current_buffer_file, 'line_num': 1, 'column_num': 6, 'file_data': has_entries( { current_buffer_file: has_entries( { 'contents': 'current_buffer_contents\n', 'filetypes': [ 'some_filetype' ] } ) } ), 'event_name': 'FileReadyToParse', 'tag_files': has_item( PathToTestFile( 'uni¢𐍈d€', 'tags' ) ) } ), 'event_notification' ) )
def Subcommands_RefactorRename_Unicode_test( app ): filepath = PathToTestFile( 'unicode.js' ) RunTest( app, { 'description': 'RefactorRename works with unicode identifiers', 'request': { 'command': 'RefactorRename', 'arguments': [ '†es†' ], 'filepath': filepath, 'line_num': 11, 'column_num': 3, }, 'expect': { 'response': requests.codes.ok, 'data': has_entries( { 'fixits': contains( has_entries( { 'chunks': contains( ChunkMatcher( '†es†', LocationMatcher( filepath, 5, 5 ), LocationMatcher( filepath, 5, 13 ) ), ChunkMatcher( '†es†', LocationMatcher( filepath, 9, 1 ), LocationMatcher( filepath, 9, 9 ) ), ChunkMatcher( '†es†', LocationMatcher( filepath, 11, 1 ), LocationMatcher( filepath, 11, 9 ) ) ), 'location': LocationMatcher( filepath, 11, 3 ) } ) ) } ) } } )
def test_build_document_set(): def build_gapy_response(visits, name, start_date): return { "metrics": {"visits": visits}, "dimensions": {"customVarValue1": name}, "start_date": start_date, } mappings = { "customVarValue1": "name" } results = [ build_gapy_response("12345", "Jane", date(2013, 4, 1)), build_gapy_response("2313", "John", date(2013, 4, 1)), build_gapy_response("4323", "Joanne", date(2013, 4, 8)), ] docs = build_document_set(results, "people", mappings) assert_that(docs, has_item(has_entries({ "name": "Jane", "_timestamp": dt(2013, 4, 1, 0, 0, 0, "UTC"), "dataType": "people", "visits": 12345, }))) assert_that(docs, has_item(has_entries({ "name": "John", "_timestamp": dt(2013, 4, 1, 0, 0, 0, "UTC"), "visits": 2313, }))) assert_that(docs, has_item(has_entries({ "name": "Joanne", "_timestamp": dt(2013, 4, 8, 0, 0, 0, "UTC"), "visits": 4323, })))
def Subcommands_GoToReferences_test( app ): RunTest( app, { 'description': 'GoToReferences works', 'request': { 'command': 'GoToReferences', 'line_num': 30, 'column_num': 5, 'filepath': PathToTestFile( 'test.js' ), }, 'expect': { 'response': requests.codes.ok, 'data': contains_inanyorder( has_entries( { 'description': 'var bar = new Bar();', 'line_num' : 30, 'column_num' : 5, 'filepath' : PathToTestFile( 'test.js' ) } ), has_entries( { 'description': 'bar.testMethod();', 'line_num' : 31, 'column_num' : 1, 'filepath' : PathToTestFile( 'test.js' ) } ), has_entries( { 'description': 'bar.nonExistingMethod();', 'line_num' : 32, 'column_num' : 1, 'filepath' : PathToTestFile( 'test.js' ) } ), has_entries( { 'description': 'var bar = new Bar();', 'line_num' : 1, 'column_num' : 5, 'filepath' : PathToTestFile( 'file3.js' ) } ), has_entries( { 'description': 'bar.testMethod();', 'line_num' : 2, 'column_num' : 1, 'filepath' : PathToTestFile( 'file3.js' ) } ) ) } } )
def Subcommands_FixIt_test( app ): filepath = PathToTestFile( 'test.js' ) RunTest( app, { 'description': 'FixIt works on a non-existing method', 'request': { 'command': 'FixIt', 'line_num': 32, 'column_num': 19, 'filepath': filepath, }, 'expect': { 'response': requests.codes.ok, 'data': has_entries( { 'fixits': contains_inanyorder( has_entries( { 'text': "Declare method 'nonExistingMethod'", 'chunks': contains( ChunkMatcher( matches_regexp( '^\r?\n' ' nonExistingMethod\(\) {\r?\n' ' throw new Error\("Method not implemented."\);\r?\n' ' }$', ), LocationMatcher( filepath, 22, 12 ), LocationMatcher( filepath, 22, 12 ) ) ), 'location': LocationMatcher( filepath, 32, 19 ) } ) ) } ) } } )
def SimpleLocationExtent_test( self ): contents = """ void foo() { baz = 5; } // Padding to 5 lines // Padding to 5 lines """ event_data = self._BuildRequest( compilation_flags = ['-x', 'c++'], event_name = 'FileReadyToParse', contents = contents, filetype = 'cpp' ) results = self._app.post_json( '/event_notification', event_data ).json assert_that( results, contains( has_entries( { 'location_extent': has_entries( { 'start': has_entries( { 'line_num': 3, 'column_num': 3, } ), 'end': has_entries( { 'line_num': 3, 'column_num': 6, } ), } ) } ) ) )
def test_matrix_config_product(): raw = StringIO( """ language: python python: - "3.3" - "3.4" environment: - FOO=bar - FOO=baz image: node """ ) config = parse_config(raw, defaults) assert_that( config, contains( has_entries(image="python:3.3", environment="FOO=bar"), has_entries(image="python:3.3", environment="FOO=baz"), has_entries(image="python:3.4", environment="FOO=bar"), has_entries(image="python:3.4", environment="FOO=baz"), ), )
def FixIt_Available_test( self ): contents = open( self._PathToTestFile( 'FixIt_Clang_cpp11.cpp' ) ).read() event_data = self._BuildRequest( contents = contents, event_name = 'FileReadyToParse', filetype = 'cpp', compilation_flags = [ '-x', 'c++', '-std=c++03', '-Wall', '-Wextra', '-pedantic' ] ) response = self._app.post_json( '/event_notification', event_data ).json pprint( response ) assert_that( response, has_items( has_entries( { 'location': has_entries( { 'line_num': 16, 'column_num': 3 } ), 'text': equal_to( 'switch condition type \'A\' ' 'requires explicit conversion to \'int\''), 'fixit_available': True } ), has_entries( { 'location': has_entries( { 'line_num': 11, 'column_num': 3 } ), 'text': equal_to( 'explicit conversion functions are a C++11 extension' ), 'fixit_available': False } ), ) )
def Diagnostics_MultipleMissingIncludes_test( app ): contents = ReadFile( PathToTestFile( 'multiple_missing_includes.cc' ) ) event_data = BuildRequest( contents = contents, event_name = 'FileReadyToParse', filetype = 'cpp', compilation_flags = [ '-x', 'c++' ] ) response = app.post_json( '/event_notification', event_data ).json pprint( response ) assert_that( response, has_items( has_entries( { 'kind': equal_to( 'ERROR' ), 'location': has_entries( { 'line_num': 1, 'column_num': 10 } ), 'text': equal_to( "'first_missing_include' file not found" ), 'fixit_available': False } ), has_entries( { 'kind': equal_to( 'ERROR' ), 'location': has_entries( { 'line_num': 2, 'column_num': 10 } ), 'text': equal_to( "'second_missing_include' file not found" ), 'fixit_available': False } ), ) )
def RawResponse_ConvertedFromOmniCompleter_test(): vim_results = [ { "word": "WORD", "abbr": "ABBR", "menu": "MENU", "kind": "KIND", "info": "INFO" }, { "word": "WORD2", "abbr": "ABBR2", "menu": "MENU2", "kind": "KIND2", "info": "INFO" }, { "word": "WORD", "abbr": "ABBR", }, { }, ] expected_results = [ has_entries( { "insertion_text": "WORD", "menu_text": "ABBR", "extra_menu_info": "MENU", "kind": [ "KIND" ], "detailed_info": "INFO" } ), has_entries( { "insertion_text": "WORD2", "menu_text": "ABBR2", "extra_menu_info": "MENU2", "kind": [ "KIND2" ], "detailed_info": "INFO" } ), has_entries( { "insertion_text": "WORD", "menu_text": "ABBR", } ), has_entries( { } ), ] request = BuildOmnicompletionRequest( vim_results ) results = request.RawResponse() eq_( len( results ), len( expected_results ) ) for result, expected_result in zip( results, expected_results ): assert_that( result, expected_result )
def Subcommands_GoToReferences_test( app ): RunTest( app, { 'description': 'GoToReferences works within file', 'request': { 'command': 'GoToReferences', 'line_num': 17, 'column_num': 29, 'filepath': PathToTestFile( 'coollib', 'cool_object.js' ), }, 'expect': { 'response': requests.codes.ok, 'data': contains_inanyorder( has_entries( { 'filepath': PathToTestFile( 'coollib', 'cool_object.js' ), 'line_num': 17, 'column_num': 29, } ), has_entries( { 'filepath': PathToTestFile( 'coollib', 'cool_object.js' ), 'line_num': 12, 'column_num': 9, } ) ) } } )
def Subcommands_OrganizeImports_test( app ): filepath = PathToTestFile( 'imports.js' ) RunTest( app, { 'description': 'OrganizeImports removes unused imports, ' 'coalesces imports from the same module, and sorts them', 'request': { 'command': 'OrganizeImports', 'filepath': filepath, }, 'expect': { 'response': requests.codes.ok, 'data': has_entries( { 'fixits': contains( has_entries( { 'chunks': contains( ChunkMatcher( matches_regexp( 'import \* as lib from "library";\r?\n' 'import func, { func1, func2 } from "library";\r?\n' ), LocationMatcher( filepath, 1, 1 ), LocationMatcher( filepath, 2, 1 ) ), ChunkMatcher( '', LocationMatcher( filepath, 5, 1 ), LocationMatcher( filepath, 6, 1 ) ), ChunkMatcher( '', LocationMatcher( filepath, 9, 1 ), LocationMatcher( filepath, 10, 1 ) ), ) } ) ) } ) } } )
def Subcommands_GoToReferences_Unicode_test( app ): RunTest( app, { 'description': 'GoToReferences works within file with unicode chars', 'request': { 'command': 'GoToReferences', 'line_num': 11, 'column_num': 5, 'filepath': PathToTestFile( 'unicode.js' ), }, 'expect': { 'response': requests.codes.ok, 'data': contains_inanyorder( has_entries( { 'filepath': PathToTestFile( 'unicode.js' ), 'line_num': 5, 'column_num': 5, } ), has_entries( { 'filepath': PathToTestFile( 'unicode.js' ), 'line_num': 9, 'column_num': 1, } ), has_entries( { 'filepath': PathToTestFile( 'unicode.js' ), 'line_num': 11, 'column_num': 1, } ) ) } } )
def Subcommands_RefactorRename_Simple_test( app ): RunTest( app, { 'description': 'RefactorRename works on a class name', 'request': { 'command': 'RefactorRename', 'arguments': [ 'test' ], 'line_num': 1, 'column_num': 7, 'filepath': PathToTestFile( 'test.js' ), }, 'expect': { 'response': requests.codes.ok, 'data': has_entries( { 'fixits': contains( has_entries( { 'chunks': contains_inanyorder( ChunkMatcher( 'test', LocationMatcher( PathToTestFile( 'test.js' ), 11, 15 ), LocationMatcher( PathToTestFile( 'test.js' ), 11, 18 ) ), ChunkMatcher( 'test', LocationMatcher( PathToTestFile( 'test.js' ), 1, 7 ), LocationMatcher( PathToTestFile( 'test.js' ), 1, 10 ) ), ), 'location': LocationMatcher( PathToTestFile( 'test.js' ), 1, 7 ) } ) ) } ) } } )
def has_joel_miller(): return has_entry("identities", contains(has_entries(id="0000000123456789", firstName="Joel", lastName="Miller", type="individual", works=contains(has_entries(title="Chevere!")))))
def test_reorder_modules_on_new_applies_changes_when_form_is_valid( self, mock_validate, mock_create_dashboard, mock_list_organisations, mock_list_data_sets, mock_list_module_types, client): form_data = { 'slug': 'valid-slug', 'modules-0-module_type': '', 'modules-0-slug': 'foo', 'modules-1-module_type': '', 'modules-1-slug': 'bar', 'modules_order': '2,1', } mock_validate.return_value = True client.post('/admin/dashboards', data=form_data) assert_that(len(mock_create_dashboard.call_args_list), equal_to(1)) post_json = mock_create_dashboard.call_args[0][0] assert_that(post_json['modules'][0], has_entries({ 'slug': 'bar', 'order': 1 })) assert_that(post_json['modules'][1], has_entries({ 'slug': 'foo', 'order': 2 }))
def ZeroBasedLineAndColumn_test(self): filepath = self._PathToTestFile("testy", "Program.cs") with self._WrapOmniSharpServer(filepath): contents = open(filepath).read() results = {} for _ in (0, 1): # First call always returns blank for some reason event_data = self._BuildRequest( filepath=filepath, event_name="FileReadyToParse", filetype="cs", contents=contents ) results = self._app.post_json("/event_notification", event_data).json assert_that( results, contains( has_entries( { "kind": equal_to("ERROR"), "text": contains_string("Unexpected symbol `}'', expecting identifier"), "location": has_entries({"line_num": 11, "column_num": 2}), "location_extent": has_entries( { "start": has_entries({"line_num": 11, "column_num": 2}), "end": has_entries({"line_num": 11, "column_num": 2}), } ), } ) ), )
def DebugInfo_FlagsWhenNoExtraConfAndCompilationDatabaseLoaded_test( app ): with TemporaryTestDir() as tmp_dir: compile_commands = [ { 'directory': tmp_dir, 'command': 'clang++ -I. -I/absolute/path -Wall', 'file': os.path.join( tmp_dir, 'test.cc' ), }, ] with TemporaryClangProject( tmp_dir, compile_commands ): request_data = BuildRequest( filepath = os.path.join( tmp_dir, 'test.cc' ), filetype = 'cpp' ) assert_that( app.post_json( '/debug_info', request_data ).json, has_entry( 'completer', has_entries( { 'name': 'C-family', 'servers': empty(), 'items': contains( has_entries( { 'key': 'compilation database path', 'value': instance_of( str ) } ), has_entries( { 'key': 'flags', 'value': matches_regexp( "\['clang\+\+', '-x', 'c\+\+', .*, '-Wall', .*\]" ) } ) ) } ) ) )
def FixIt_Check_cpp11_Note( results ): assert_that( results, has_entries( { 'fixits': contains( # First note: put parens around it has_entries( { 'text': contains_string( 'parentheses around the assignment' ), 'chunks': contains( ChunkMatcher( '(', LineColMatcher( 59, 8 ), LineColMatcher( 59, 8 ) ), ChunkMatcher( ')', LineColMatcher( 61, 12 ), LineColMatcher( 61, 12 ) ) ), 'location': LineColMatcher( 60, 8 ), } ), # Second note: change to == has_entries( { 'text': contains_string( '==' ), 'chunks': contains( ChunkMatcher( '==', LineColMatcher( 60, 8 ), LineColMatcher( 60, 9 ) ) ), 'location': LineColMatcher( 60, 8 ), } ) ) } ) )
def DebugInfo_FlagsWhenNoExtraConfAndInvalidCompilationDatabase_test( app ): with TemporaryTestDir() as tmp_dir: compile_commands = 'garbage' with TemporaryClangProject( tmp_dir, compile_commands ): request_data = BuildRequest( filepath = os.path.join( tmp_dir, 'test.cc' ), filetype = 'cpp' ) assert_that( app.post_json( '/debug_info', request_data ).json, has_entry( 'completer', has_entries( { 'name': 'C-family', 'servers': empty(), 'items': contains( has_entries( { 'key': 'compilation database path', 'value': 'None' } ), has_entries( { 'key': 'flags', 'value': '[]' } ) ) } ) ) )
def Subcommands_RefactorRename_MultipleFiles_OnFileReadyToParse_test( app ): file1 = PathToTestFile( 'file1.js' ) file2 = PathToTestFile( 'file2.js' ) file3 = PathToTestFile( 'file3.js' ) # This test is roughly the same as the previous one, except here file4.js is # pushed into the Tern engine via 'opening it in the editor' (i.e. # FileReadyToParse event). The first 3 are loaded into the tern server # because they are listed in the .tern-project file's loadEagerly option. file4 = PathToTestFile( 'file4.js' ) app.post_json( '/event_notification', BuildRequest( **{ 'filetype': 'javascript', 'event_name': 'FileReadyToParse', 'contents': ReadFile( file4 ), 'filepath': file4, } ), expect_errors = False ) RunTest( app, { 'description': 'FileReadyToParse loads files into tern server', 'request': { 'command': 'RefactorRename', 'arguments': [ 'a-quite-long-string' ], 'filepath': file1, 'line_num': 3, 'column_num': 14, }, 'expect': { 'response': http.client.OK, 'data': has_entries( { 'fixits': contains( has_entries( { 'chunks': contains( ChunkMatcher( 'a-quite-long-string', LocationMatcher( file1, 1, 5 ), LocationMatcher( file1, 1, 11 ) ), ChunkMatcher( 'a-quite-long-string', LocationMatcher( file1, 3, 14 ), LocationMatcher( file1, 3, 20 ) ), ChunkMatcher( 'a-quite-long-string', LocationMatcher( file2, 2, 14 ), LocationMatcher( file2, 2, 20 ) ), ChunkMatcher( 'a-quite-long-string', LocationMatcher( file3, 3, 12 ), LocationMatcher( file3, 3, 18 ) ), ChunkMatcher( 'a-quite-long-string', LocationMatcher( file4, 4, 22 ), LocationMatcher( file4, 4, 28 ) ) ), 'location': LocationMatcher( file1, 3, 14 ) } ) ) } ) } } )
def GetCompletions_Basic_test( app ): filepath = PathToTestFile( 'basic.py' ) completion_data = BuildRequest( filepath = filepath, filetype = 'python', contents = ReadFile( filepath ), line_num = 7, column_num = 3 ) results = app.post_json( '/completions', completion_data ).json[ 'completions' ] assert_that( results, has_items( CompletionEntryMatcher( 'a', 'self.a = 1', { 'extra_data': has_entry( 'location', has_entries( { 'line_num': 3, 'column_num': 10, 'filepath': filepath } ) ) } ), CompletionEntryMatcher( 'b', 'self.b = 2', { 'extra_data': has_entry( 'location', has_entries( { 'line_num': 4, 'column_num': 10, 'filepath': filepath } ) ) } ) ) ) completion_data = BuildRequest( filepath = filepath, filetype = 'python', contents = ReadFile( filepath ), line_num = 7, column_num = 4 ) results = app.post_json( '/completions', completion_data ).json[ 'completions' ] assert_that( results, all_of( has_item( CompletionEntryMatcher( 'a', 'self.a = 1', { 'extra_data': has_entry( 'location', has_entries( { 'line_num': 3, 'column_num': 10, 'filepath': filepath } ) ) } ) ), is_not( has_item( CompletionEntryMatcher( 'b' ) ) ) ) )
def Diagnostics_FixIt_Available_test(app): contents = ReadFile(PathToTestFile("FixIt_Clang_cpp11.cpp")) event_data = BuildRequest( contents=contents, event_name="FileReadyToParse", filetype="cpp", compilation_flags=["-x", "c++", "-std=c++03", "-Wall", "-Wextra", "-pedantic"], ) response = app.post_json("/event_notification", event_data).json pprint(response) assert_that( response, has_items( has_entries( { "location": has_entries({"line_num": 16, "column_num": 3}), "text": equal_to("switch condition type 'A' " "requires explicit conversion to 'int'"), "fixit_available": True, } ), has_entries( { "location": has_entries({"line_num": 11, "column_num": 3}), "text": equal_to("explicit conversion functions are a C++11 extension"), "fixit_available": False, } ), ), )
def ZeroBasedLineAndColumn_test( self ): filepath = self._PathToTestFile( 'testy', 'Program.cs' ) with self._WrapOmniSharpServer( filepath ): contents = ReadFile( filepath ) results = {} for _ in ( 0, 1 ): # First call always returns blank for some reason event_data = self._BuildRequest( filepath = filepath, event_name = 'FileReadyToParse', filetype = 'cs', contents = contents ) results = self._app.post_json( '/event_notification', event_data ).json assert_that( results, contains( has_entries( { 'kind': equal_to( 'ERROR' ), 'text': contains_string( "Unexpected symbol `}'', expecting identifier" ), 'location': has_entries( { 'line_num': 11, 'column_num': 2 } ), 'location_extent': has_entries( { 'start': has_entries( { 'line_num': 11, 'column_num': 2, } ), 'end': has_entries( { 'line_num': 11, 'column_num': 2, } ), } ) } ) ) )
def Diagnostics_ClangCompleter_SimpleLocationExtent_test(): app = TestApp( handlers.app ) contents = """ void foo() { baz = 5; } // Padding to 5 lines // Padding to 5 lines """ event_data = BuildRequest( compilation_flags = ['-x', 'c++'], event_name = 'FileReadyToParse', contents = contents, filetype = 'cpp' ) results = app.post_json( '/event_notification', event_data ).json assert_that( results, contains( has_entries( { 'location_extent': has_entries( { 'start': has_entries( { 'line_num': 2, 'column_num': 2, } ), 'end': has_entries( { 'line_num': 2, 'column_num': 5, } ), } ) } ) ) )
def GoToReferences_test( self ): self._RunTest( { 'description': 'GoToReferences works within file', 'request': { 'command': 'GoToReferences', 'line_num': 17, 'column_num': 29, 'filepath': self._PathToTestFile( 'coollib', 'cool_object.js' ), }, 'expect': { 'response': httplib.OK, 'data': contains_inanyorder( has_entries( { 'filepath': self._PathToTestFile( 'coollib', 'cool_object.js' ), 'line_num': 17, 'column_num': 29, } ), has_entries( { 'filepath': self._PathToTestFile( 'coollib', 'cool_object.js' ), 'line_num': 12, 'column_num': 9, } ) ) } } )
def test_pagination(self, google_api): mario = has_entries(name='Mario Bros') luigi = has_entries(name='Luigi Bros') assert_that( self.list_(self.client, self.source_uuid, order='name'), has_entries(items=contains(luigi, mario)), ) assert_that( self.list_(self.client, self.source_uuid, order='name', direction='desc'), has_entries(items=contains(mario, luigi)), ) assert_that( self.list_(self.client, self.source_uuid, order='name', limit=1), has_entries(items=contains(luigi)), ) assert_that( self.list_(self.client, self.source_uuid, order='name', offset=1), has_entries(items=contains(mario)), )
def test_DebugInfo_ExtraConf_Global( self, app ): request_data = BuildRequest( filepath = PathToTestFile( 'foo.cpp' ), contents = '', filetype = 'cpp' ) test = { 'request': request_data } request_data[ 'contents' ] = '' RunAfterInitialized( app, test ) assert_that( app.post_json( '/debug_info', request_data ).json, has_entry( 'completer', has_entries( { 'name': 'C-family', 'servers': contains_exactly( has_entries( { 'name': 'Clangd', 'is_running': True, 'extras': contains_exactly( has_entries( { 'key': 'Server State', 'value': 'Initialized', } ), has_entries( { 'key': 'Project Directory', 'value': PathToTestFile(), } ), has_entries( { 'key': 'Settings', 'value': '{}', } ), has_entries( { 'key': 'Compilation Command', 'value': has_items( '-I', 'test' ), } ), ), } ) ), 'items': empty() } ) ) )
def test_override_policies(self): self._copy_override_filename('override.yml') self._service_update(recreate=True) users = self.auth.users.list()['items'] assert_that( users, has_items( has_entries(username='******'), has_entries(username='******'), has_entries(username='******'), has_entries(username='******'), ), ) policies = self.auth.policies.list()['items'] assert_that( policies, has_items( has_entries( name='service-hashtag-internal', acl=contains_inanyorder( '#', 'additional.acl', ), ), has_entries( name='service-additional-internal', acl=contains_inanyorder('#'), ), ), ) self._delete_override_filename('override.yml') self._service_update(recreate=True)
def _expected_needles(needles): matcher = [] for needle in needles: matcher.append(h.has_entries(needle)) return matcher
def test_get_multi_tenant(main, sub): response = confd.outcalls(main['id']).get(wazo_tenant=SUB_TENANT) response.assert_match(404, e.not_found(resource='Outcall')) response = confd.outcalls(sub['id']).get(wazo_tenant=MAIN_TENANT) assert_that(response.item, has_entries(**sub))
def all_connections_ok(): result = webhookd.status.get() assert_that(result['bus_consumer'], has_entries({'status': 'ok'}))
def Diagnostics_CUDA_Kernel_test(app): filepath = PathToTestFile('cuda', 'kernel_call.cu') contents = ReadFile(filepath) request = {'contents': contents, 'filepath': filepath, 'filetype': 'cuda'} test = {'request': request, 'route': '/receive_messages'} response = RunAfterInitialized(app, test) pprint(response) assert_that( response, contains_exactly( has_entries({ 'diagnostics': has_items( has_entries({ 'kind': equal_to('ERROR'), 'location': LocationMatcher(filepath, 59, 5), 'location_extent': RangeMatcher(filepath, (59, 5), (59, 6)), 'ranges': contains_exactly( RangeMatcher(filepath, (59, 5), (59, 6))), 'text': equal_to( 'Call to global function \'g1\' not configured' ' [global_call_not_config]'), 'fixit_available': False }), has_entries({ 'kind': equal_to('ERROR'), 'location': LocationMatcher(filepath, 60, 9), 'location_extent': RangeMatcher(filepath, (60, 9), (60, 12)), 'ranges': contains_exactly( RangeMatcher(filepath, (60, 9), (60, 12))), 'text': equal_to( 'Too few execution configuration arguments to kernel ' 'function call, expected at least 2, have 1' ' [typecheck_call_too_few_args_at_least]'), 'fixit_available': False }), has_entries({ 'kind': equal_to('ERROR'), 'location': LocationMatcher(filepath, 61, 20), 'location_extent': RangeMatcher(filepath, (61, 20), (61, 21)), 'ranges': contains_exactly( RangeMatcher(filepath, (61, 20), (61, 21))), 'text': equal_to( 'Too many execution configuration arguments to ' 'kernel function call, expected at most 4, have 5' ' [typecheck_call_too_many_args_at_most]'), 'fixit_available': False }), has_entries({ 'kind': equal_to('ERROR'), 'location': LocationMatcher(filepath, 65, 15), 'location_extent': RangeMatcher(filepath, (65, 15), (65, 16)), 'ranges': contains_exactly( RangeMatcher(filepath, (65, 15), (65, 16))), 'text': equal_to('Kernel call to non-global function \'h1\'' ' [kern_call_not_global_function]'), 'fixit_available': False }), has_entries({ 'kind': equal_to('ERROR'), 'location': LocationMatcher(filepath, 68, 15), 'location_extent': RangeMatcher(filepath, (68, 15), (68, 16)), 'ranges': contains_exactly( RangeMatcher(filepath, (68, 15), (68, 16))), 'text': equal_to( "Kernel function type 'int (*)(int)' must have " "void return type [kern_type_not_void_return]"), 'fixit_available': False }), has_entries({ 'kind': equal_to('ERROR'), 'location': LocationMatcher(filepath, 70, 8), 'location_extent': RangeMatcher(filepath, (70, 8), (70, 18)), 'ranges': contains_exactly( RangeMatcher(filepath, (70, 8), (70, 18))), 'text': equal_to("Use of undeclared identifier 'undeclared'" ' [undeclared_var_use]'), 'fixit_available': False }), ) })))
def test_get_schedule_relation(schedule, outcall): with a.outcall_schedule(outcall, schedule): response = confd.schedules(schedule['id']).get() assert_that( response.item, has_entries(outcalls=contains(has_entries(id=outcall['id']))))
def _FixIt_Check_cpp11_DelAdd( self, results ): assert_that( results, has_entries( { 'fixits': contains( has_entries( { 'chunks': contains( has_entries( { 'replacement_text': equal_to(''), 'range': has_entries( { 'start': has_entries( { 'line_num': 48, 'column_num': 3 } ), 'end' : has_entries( { 'line_num': 48, 'column_num': 4 } ), } ), } ), has_entries( { 'replacement_text': equal_to('~'), 'range': has_entries( { 'start': has_entries( { 'line_num': 48, 'column_num': 9 } ), 'end' : has_entries( { 'line_num': 48, 'column_num': 9 } ), } ), } ), ), 'location': has_entries( { 'line_num': 48, 'column_num': 3 } ) } ) ) } ) )
def _FixIt_Check_cpp11_Ins( self, results ): # First fixit # switch(A()) { // expected-error{{explicit conversion to}} assert_that( results, has_entries( { 'fixits': contains( has_entries( { 'chunks': contains( has_entries( { 'replacement_text': equal_to('static_cast<int>('), 'range': has_entries( { 'start': has_entries( { 'line_num': 16, 'column_num': 10 } ), 'end' : has_entries( { 'line_num': 16, 'column_num': 10 } ), } ), } ), has_entries( { 'replacement_text': equal_to(')'), 'range': has_entries( { 'start': has_entries( { 'line_num': 16, 'column_num': 13 } ), 'end' : has_entries( { 'line_num': 16, 'column_num': 13 } ), } ), } ) ), 'location': has_entries( { 'line_num': 16, 'column_num': 3 } ) } ) ) } ) )
def Poll_Diagnostics_ChangeFileContents_test( app ): StartJavaCompleterServerInDirectory( app, PathToTestFile( DEFAULT_PROJECT_DIR ) ) filepath = youcompleteme_Test old_contents = """package com.youcompleteme; public class Test { public String test; }""" messages_for_filepath = [] def PollForMessagesInAnotherThread( filepath, contents ): try: for message in PollForMessages( app, { 'filepath': filepath, 'contents': contents, 'filetype': 'java' } ): if 'filepath' in message and message[ 'filepath' ] == filepath: messages_for_filepath.append( message ) except PollForMessagesTimeoutException: pass StartThread( PollForMessagesInAnotherThread, filepath, old_contents ) new_contents = """package com.youcompleteme; public class Test { public String test; public String test; }""" event_data = BuildRequest( event_name = 'FileReadyToParse', contents = new_contents, filepath = filepath, filetype = 'java' ) app.post_json( '/event_notification', event_data ).json expiration = time.time() + 10 while True: try: assert_that( messages_for_filepath, has_item( has_entries( { 'filepath': filepath, 'diagnostics': contains( has_entries( { 'kind': 'ERROR', 'text': 'Duplicate field Test.test', 'location': LocationMatcher( youcompleteme_Test, 4, 17 ), 'location_extent': RangeMatcher( youcompleteme_Test, ( 4, 17 ), ( 4, 21 ) ), 'ranges': contains( RangeMatcher( youcompleteme_Test, ( 4, 17 ), ( 4, 21 ) ) ), 'fixit_available': False } ), has_entries( { 'kind': 'ERROR', 'text': 'Duplicate field Test.test', 'location': LocationMatcher( youcompleteme_Test, 5, 17 ), 'location_extent': RangeMatcher( youcompleteme_Test, ( 5, 17 ), ( 5, 21 ) ), 'ranges': contains( RangeMatcher( youcompleteme_Test, ( 5, 17 ), ( 5, 21 ) ) ), 'fixit_available': False } ) ) } ) ) ) break except AssertionError: if time.time() > expiration: raise time.sleep( 0.25 )
TestLauncher = ProjectPath( 'TestLauncher.java' ) TestWidgetImpl = ProjectPath( 'TestWidgetImpl.java' ) youcompleteme_Test = PathToTestFile( DEFAULT_PROJECT_DIR, 'src', 'com', 'youcompleteme', 'Test.java' ) DIAG_MATCHERS_PER_FILE = { ProjectRoot: [], InternalNonProjectFile: [], TestFactory: contains_inanyorder( has_entries( { 'kind': 'WARNING', 'text': 'The value of the field TestFactory.Bar.testString is not used', 'location': LocationMatcher( TestFactory, 15, 19 ), 'location_extent': RangeMatcher( TestFactory, ( 15, 19 ), ( 15, 29 ) ), 'ranges': contains( RangeMatcher( TestFactory, ( 15, 19 ), ( 15, 29 ) ) ), 'fixit_available': False } ), has_entries( { 'kind': 'ERROR', 'text': 'Wibble cannot be resolved to a type', 'location': LocationMatcher( TestFactory, 18, 24 ), 'location_extent': RangeMatcher( TestFactory, ( 18, 24 ), ( 18, 30 ) ), 'ranges': contains( RangeMatcher( TestFactory, ( 18, 24 ), ( 18, 30 ) ) ), 'fixit_available': False } ), has_entries( { 'kind': 'ERROR', 'text': 'Wibble cannot be resolved to a variable', 'location': LocationMatcher( TestFactory, 19, 15 ),
def webhookd_is_connected(): try: status = webhookd.status.get() except RequestException: raise AssertionError('wazo-webhookd is not up yet') assert_that(status['connections'], has_entries({'bus_consumer': 'ok'}))
def GetCompletions_Basic_test( app ): RunTest( app, { 'description': 'Extra and detailed info when completions are methods', 'request': { 'line_num': 17, 'column_num': 6, 'filepath': PathToTestFile( 'test.ts' ) }, 'expect': { 'response': requests.codes.ok, 'data': has_entries( { 'completions': contains_inanyorder( CompletionEntryMatcher( 'methodA', '(method) Foo.methodA(): void', extra_params = { 'kind': 'method', 'detailed_info': '(method) Foo.methodA(): void\n\n' 'Unicode string: 说话' } ), CompletionEntryMatcher( 'methodB', '(method) Foo.methodB(): void', extra_params = { 'kind': 'method', 'detailed_info': '(method) Foo.methodB(): void' } ), CompletionEntryMatcher( 'methodC', '(method) Foo.methodC(a: { foo: string; bar: number; }): void', extra_params = { 'kind': 'method', 'detailed_info': '(method) Foo.methodC(a: {\n' ' foo: string;\n' ' bar: number;\n' '}): void' } ) ) } ) } } ) RunTest( app, { 'description': 'Filtering works', 'request': { 'line_num': 17, 'column_num': 7, 'filepath': PathToTestFile( 'test.ts' ) }, 'expect': { 'response': requests.codes.ok, 'data': has_entries( { 'completions': contains_inanyorder( CompletionEntryMatcher( 'methodA', '(method) Foo.methodA(): void', extra_params = { 'kind': 'method', 'detailed_info': '(method) Foo.methodA(): void\n\n' 'Unicode string: 说话' } ) ) } ) } } )
def assert_does_not_have_any_dicts(haystack, needles): for needle in needles: h.assert_that(haystack, h.is_not(h.has_item(h.has_entries(needle))))
def Subcommands_Format_WholeFile_Spaces_test(app): filepath = PathToTestFile('test.ts') RunTest( app, { 'description': 'Formatting is applied on the whole file ' 'with tabs composed of 4 spaces', 'request': { 'command': 'Format', 'filepath': filepath, 'options': { 'tab_size': 4, 'insert_spaces': True } }, 'expect': { 'response': requests.codes.ok, 'data': has_entries({ 'fixits': contains_exactly( has_entries({ 'chunks': contains_exactly( ChunkMatcher(' ', LocationMatcher(filepath, 3, 1), LocationMatcher(filepath, 3, 3)), ChunkMatcher(' ', LocationMatcher(filepath, 4, 1), LocationMatcher(filepath, 4, 3)), ChunkMatcher(' ', LocationMatcher(filepath, 4, 14), LocationMatcher(filepath, 4, 14)), ChunkMatcher(' ', LocationMatcher(filepath, 5, 1), LocationMatcher(filepath, 5, 3)), ChunkMatcher(' ', LocationMatcher(filepath, 5, 14), LocationMatcher(filepath, 5, 14)), ChunkMatcher(' ', LocationMatcher(filepath, 6, 1), LocationMatcher(filepath, 6, 3)), ChunkMatcher(' ', LocationMatcher(filepath, 7, 1), LocationMatcher(filepath, 7, 5)), ChunkMatcher(' ', LocationMatcher(filepath, 8, 1), LocationMatcher(filepath, 8, 7)), ChunkMatcher(' ', LocationMatcher(filepath, 9, 1), LocationMatcher(filepath, 9, 7)), ChunkMatcher(' ', LocationMatcher(filepath, 10, 1), LocationMatcher(filepath, 10, 5)), ChunkMatcher(' ', LocationMatcher(filepath, 11, 1), LocationMatcher(filepath, 11, 3)), ChunkMatcher(' ', LocationMatcher(filepath, 11, 6), LocationMatcher(filepath, 11, 6)), ChunkMatcher(' ', LocationMatcher(filepath, 27, 1), LocationMatcher(filepath, 27, 3)), ChunkMatcher(' ', LocationMatcher(filepath, 28, 1), LocationMatcher(filepath, 28, 4)), ChunkMatcher(' ', LocationMatcher(filepath, 29, 1), LocationMatcher(filepath, 29, 4)), ChunkMatcher(' ', LocationMatcher(filepath, 30, 1), LocationMatcher(filepath, 30, 3)), ChunkMatcher( ' ', LocationMatcher(filepath, 30, 17), LocationMatcher(filepath, 30, 17)), ) })) }) } })
def test_that_an_empty_body_returns_400(self): result = self.app.post(self.url, json='null') assert_that(result.status_code, equal_to(400)) assert_that(result.json, has_entries('error_id', 'invalid-data'))
def matcher(self): """Return matcher.""" matcher = only_contains(super().matcher) if self.optional: matcher = any_of(empty(), matcher) return has_entries(data=all_of(instance_of(list), matcher))
def _FixIt_Check_cpp11_InsMultiLine( self, results ): # Similar to _FixIt_Check_cpp11_1 but inserts split across lines # assert_that( results, has_entries( { 'fixits': contains( has_entries( { 'chunks': contains( has_entries( { 'replacement_text': equal_to('static_cast<int>('), 'range': has_entries( { 'start': has_entries( { 'line_num': 26, 'column_num': 7 } ), 'end' : has_entries( { 'line_num': 26, 'column_num': 7 } ), } ), } ), has_entries( { 'replacement_text': equal_to(')'), 'range': has_entries( { 'start': has_entries( { 'line_num': 28, 'column_num': 2 } ), 'end' : has_entries( { 'line_num': 28, 'column_num': 2 } ), } ), } ) ), 'location': has_entries( { 'line_num': 25, 'column_num': 3 } ) } ) ) } ) )
def is_multipoly(): """Return matcher for MultiPolygon.""" return has_entries({"type": "MultiPolygon", "coordinates": is_geo_multi()})
def _FixIt_Check_cpp11_MultiSecond( self, results ): assert_that( results, has_entries( { 'fixits': contains( # second fix-it at 54,52 has_entries( { 'chunks': contains( has_entries( { 'replacement_text': equal_to(''), 'range': has_entries( { 'start': has_entries( { 'line_num': 54, 'column_num': 52 } ), 'end' : has_entries( { 'line_num': 54, 'column_num': 53 } ), } ), } ), has_entries( { 'replacement_text': equal_to('~'), 'range': has_entries( { 'start': has_entries( { 'line_num': 54, 'column_num': 58 } ), 'end' : has_entries( { 'line_num': 54, 'column_num': 58 } ), } ), } ), ), 'location': has_entries( { 'line_num': 54, 'column_num': 52 } ) } ), # first fix-it at 54,16 has_entries( { 'chunks': contains( has_entries( { 'replacement_text': equal_to('foo'), 'range': has_entries( { 'start': has_entries( { 'line_num': 54, 'column_num': 16 } ), 'end' : has_entries( { 'line_num': 54, 'column_num': 19 } ), } ), } ) ), 'location': has_entries( { 'line_num': 54, 'column_num': 16 } ) } ) ) } ) )
def is_featurecollection(props: dict) -> BaseMatcher: """Get matcher for collection with given feature & properties.""" return has_entries( {"type": "FeatureCollection", "features": only_contains(is_feature(props))} )
def Diagnostics_LocationExtent_MissingSemicolon_test(app): filepath = PathToTestFile('location_extent.cc') contents = ReadFile(filepath) request = {'contents': contents, 'filepath': filepath, 'filetype': 'cpp'} test = {'request': request, 'route': '/receive_messages'} response = RunAfterInitialized(app, test) pprint(response) assert_that( response, contains_exactly( has_entries({ 'diagnostics': has_items( has_entries({ 'kind': equal_to('ERROR'), 'location': LocationMatcher(filepath, 2, 9), 'location_extent': RangeMatcher(filepath, (2, 9), (2, 9)), 'ranges': contains_exactly(RangeMatcher(filepath, (2, 9), (2, 9))), 'text': equal_to( "Expected ';' at end of declaration list (fix " "available) [expected_semi_decl_list]"), 'fixit_available': False }), has_entries({ 'kind': equal_to('ERROR'), 'location': LocationMatcher(filepath, 5, 1), 'location_extent': RangeMatcher(filepath, (5, 1), (6, 11)), 'ranges': contains_exactly( RangeMatcher(filepath, (5, 1), (6, 11))), 'text': equal_to("Unknown type name 'multiline_identifier'" " [unknown_typename]"), 'fixit_available': False }), has_entries({ 'kind': equal_to('ERROR'), 'location': LocationMatcher(filepath, 8, 7), 'location_extent': RangeMatcher(filepath, (8, 7), (8, 11)), 'ranges': contains_exactly( RangeMatcher(filepath, (8, 7), (8, 11))), 'text': equal_to('Constructor cannot have a return type' ' [constructor_return_type]'), 'fixit_available': False })) })))
def matcher(self) -> BaseMatcher: """Return matcher.""" return has_entries(id=instance_of(str), type=equal_to(self.resource_name))
def Diagnostics_UpdatedOnBufferVisit_test(app): with TemporaryTestDir() as tmp_dir: source_file = os.path.join(tmp_dir, 'source.cpp') source_contents = """#include "header.h" int main() {return S::h();} """ with open(source_file, 'w') as sf: sf.write(source_contents) header_file = os.path.join(tmp_dir, 'header.h') old_header_content = """#pragma once struct S{static int h();}; """ with open(header_file, 'w') as hf: hf.write(old_header_content) flags_file = os.path.join(tmp_dir, 'compile_flags.txt') flags_content = """-xc++""" with open(flags_file, 'w') as ff: ff.write(flags_content) messages_request = { 'contents': source_contents, 'filepath': source_file, 'filetype': 'cpp' } test = {'request': messages_request, 'route': '/receive_messages'} response = RunAfterInitialized(app, test) assert_that(response, contains_exactly(has_entries({'diagnostics': empty()}))) # Overwrite header.cpp new_header_content = """#pragma once static int h(); """ with open(header_file, 'w') as f: f.write(new_header_content) # Send BufferSaved notification for the header file_save_request = { "event_name": "FileSave", "filepath": header_file, "filetype": 'cpp' } app.post_json('/event_notification', BuildRequest(**file_save_request)) # Send BufferVisit notification buffer_visit_request = { "event_name": "BufferVisit", "filepath": source_file, "filetype": 'cpp' } app.post_json('/event_notification', BuildRequest(**buffer_visit_request)) # Assert diagnostics for message in PollForMessages(app, messages_request): if 'diagnostics' in message: assert_that( message, has_entries({ 'diagnostics': contains_exactly( has_entries({ 'kind': equal_to('ERROR'), 'text': "Use of undeclared identifier 'S' [undeclared_var_use]", 'ranges': contains_exactly( RangeMatcher(contains_string(source_file), (2, 20), (2, 21))), 'location': LocationMatcher(contains_string(source_file), 2, 20), 'location_extent': RangeMatcher(contains_string(source_file), (2, 20), (2, 21)) })) })) break # Restore original content with open(header_file, 'w') as f: f.write(old_header_content) # Send BufferSaved notification for the header file_save_request = { "event_name": "FileSave", "filepath": header_file, "filetype": 'cpp' } app.post_json('/event_notification', BuildRequest(**file_save_request)) # Send BufferVisit notification app.post_json('/event_notification', BuildRequest(**buffer_visit_request)) # Assert no diagnostics for message in PollForMessages(app, messages_request): print(f'Message { pformat( message ) }') if 'diagnostics' in message: assert_that(message, has_entries({'diagnostics': empty()})) break # Assert no dirty files with open(header_file, 'r') as f: assert_that(f.read(), equal_to(old_header_content))
def calld_is_ready(): status = integration_test.calld.status() assert_that(status, has_entries({ 'ari': has_entry('status', 'ok'), 'bus_consumer': has_entry('status', 'ok') }))
def Subcommands_RefactorRename_MultipleFiles_test(app): RunTest( app, { 'description': 'RefactorRename works across files', 'request': { 'command': 'RefactorRename', 'arguments': ['this-is-a-longer-string'], 'line_num': 25, 'column_num': 9, 'filepath': PathToTestFile('test.ts'), }, 'expect': { 'response': requests.codes.ok, 'data': has_entries({ 'fixits': contains_exactly( has_entries({ 'chunks': contains_inanyorder( ChunkMatcher( 'this-is-a-longer-string', LocationMatcher(PathToTestFile('test.ts'), 25, 7), LocationMatcher(PathToTestFile('test.ts'), 25, 10)), ChunkMatcher( 'this-is-a-longer-string', LocationMatcher(PathToTestFile('test.ts'), 33, 15), LocationMatcher(PathToTestFile('test.ts'), 33, 18)), ChunkMatcher( 'this-is-a-longer-string', LocationMatcher(PathToTestFile('test.ts'), 37, 1), LocationMatcher(PathToTestFile('test.ts'), 37, 4)), ChunkMatcher( 'this-is-a-longer-string', LocationMatcher(PathToTestFile('file2.ts'), 1, 5), LocationMatcher(PathToTestFile('file2.ts'), 1, 8)), ChunkMatcher( 'this-is-a-longer-string', LocationMatcher(PathToTestFile('file3.ts'), 1, 15), LocationMatcher(PathToTestFile('file3.ts'), 1, 18)), ChunkMatcher( 'this-is-a-longer-string', LocationMatcher(PathToTestFile('test.tsx'), 10, 8), LocationMatcher(PathToTestFile('test.tsx'), 10, 11)), ), 'location': LocationMatcher(PathToTestFile('test.ts'), 25, 9) })) }) } })
def Subcommands_FixIt_test(app): RunTest( app, { 'description': 'FixIt works on a non-existing method', 'request': { 'command': 'FixIt', 'line_num': 35, 'column_num': 12, 'filepath': PathToTestFile('test.ts'), }, 'expect': { 'response': requests.codes.ok, 'data': has_entries({ 'fixits': contains_inanyorder( has_entries({ 'text': "Declare method 'nonExistingMethod'", 'chunks': contains_exactly( ChunkMatcher( matches_regexp( '^\r?\n' ' nonExistingMethod\\(\\) {\r?\n' ' throw new Error\\("Method not implemented."\\);\r?\n' ' }$', ), LocationMatcher(PathToTestFile('test.ts'), 25, 12), LocationMatcher(PathToTestFile('test.ts'), 25, 12))), 'location': LocationMatcher(PathToTestFile('test.ts'), 35, 12) }), has_entries({ 'text': "Declare property 'nonExistingMethod'", 'chunks': contains_exactly( ChunkMatcher( matches_regexp( '^\r?\n' ' nonExistingMethod: any;$'), LocationMatcher(PathToTestFile('test.ts'), 25, 12), LocationMatcher(PathToTestFile('test.ts'), 25, 12))), 'location': LocationMatcher(PathToTestFile('test.ts'), 35, 12) }), has_entries({ 'text': "Add index signature for property 'nonExistingMethod'", 'chunks': contains_exactly( ChunkMatcher( matches_regexp('^\r?\n' ' \\[x: string\\]: any;$'), LocationMatcher(PathToTestFile('test.ts'), 25, 12), LocationMatcher(PathToTestFile('test.ts'), 25, 12))), 'location': LocationMatcher(PathToTestFile('test.ts'), 35, 12) })) }) } })
def test_create_multi_tenant(): response = confd.endpoints.sip.templates.post(wazo_tenant=SUB_TENANT) response.assert_created() assert_that(response.item, has_entries(tenant_uuid=SUB_TENANT))
def test_edit_all_parameters(transport, template, sip): aor = [ ['maximum_expiration', '3600'], ['remove_existing', 'yes'], ['max_contacts', '1'], ] auth = [['username', 'yiq8yej0'], ['password', '1337']] endpoint = [ ['force_rport', 'no'], ['rewrite_contact', 'yes'], ['callerid', '"Firstname Lastname" <666>'], ] identify = [ ['match', '54.172.60.0'], ['match', '54.172.60.1'], ['match', '54.172.60.2'], ['match', '54.172.60.3'], ] registration = [ ['client_uri', 'sip:[email protected]'], ['server_uri', 'sip:proxy.example.com'], ['expiration', '90'], ] registration_outbound_auth = [ ['username', 'outbound-registration-username'], ['password', 'outbound-registration-password'], ] outbound_auth = [ ['username', 'outbound-auth'], ['password', 'outbound-password'], ] response = confd.endpoints.sip.templates(sip['uuid']).put( aor_section_options=aor, auth_section_options=auth, endpoint_section_options=endpoint, identify_section_options=identify, registration_section_options=registration, registration_outbound_auth_section_options=registration_outbound_auth, outbound_auth_section_options=outbound_auth, templates=[template], transport=transport, ) response.assert_updated() response = confd.endpoints.sip.templates(sip['uuid']).get() assert_that( response.item, has_entries( aor_section_options=contains_inanyorder(*aor), auth_section_options=contains_inanyorder(*auth), endpoint_section_options=contains_inanyorder(*endpoint), identify_section_options=contains_inanyorder(*identify), registration_section_options=contains_inanyorder(*registration), registration_outbound_auth_section_options=contains_inanyorder( *registration_outbound_auth), outbound_auth_section_options=contains_inanyorder(*outbound_auth), templates=contains_inanyorder( has_entries(uuid=template['uuid'], label=template['label'])), transport=has_entries(uuid=transport['uuid']), ), )