def EventNotification_FileReadyToParse_SyntaxKeywords_ClearCacheIfRestart_test( ycm, *args ): current_buffer = VimBuffer( name = 'current_buffer', filetype = 'some_filetype' ) with patch( 'ycm.client.event_notification.EventNotification.' 'PostDataToHandlerAsync' ) as post_data_to_handler_async: with MockVimBuffers( [ current_buffer ], [ current_buffer ] ): ycm.OnFileReadyToParse() assert_that( # Positional arguments passed to PostDataToHandlerAsync. post_data_to_handler_async.call_args[ 0 ], contains( has_entry( 'syntax_keywords', has_items( 'foo', 'bar' ) ), 'event_notification' ) ) # Send again the syntax keywords after restarting the server. ycm.RestartServer() WaitUntilReady() ycm.OnFileReadyToParse() assert_that( # Positional arguments passed to PostDataToHandlerAsync. post_data_to_handler_async.call_args[ 0 ], contains( has_entry( 'syntax_keywords', has_items( 'foo', 'bar' ) ), 'event_notification' ) )
def GetCompletions_CacheIsNotValid_DifferentNumberOfLines_test( app, candidates_list, *args ): with PatchCompleter( DummyCompleter, 'dummy_filetype' ): completion_data = BuildRequest( filetype = 'dummy_filetype', contents = 'objectA.attr\n' 'objectB.attr', line_num = 1, column_num = 12 ) results = app.post_json( '/completions', completion_data ).json[ 'completions' ] assert_that( results, has_items( CompletionEntryMatcher( 'attributeA' ) ) ) completion_data = BuildRequest( filetype = 'dummy_filetype', contents = 'objectA.attr', line_num = 1, column_num = 12 ) results = app.post_json( '/completions', completion_data ).json[ 'completions' ] assert_that( results, has_items( CompletionEntryMatcher( 'attributeB' ) ) ) # We ask for candidates twice because of cache invalidation: # both requests have the same cursor position and current line but the # number of lines in the current file is different. assert_that( candidates_list.call_count, equal_to( 2 ) )
def GetCompletions_CacheIsNotValid_DifferentExtraConfData_test( app, candidates_list, *args ): with PatchCompleter( DummyCompleter, 'dummy_filetype' ): completion_data = BuildRequest( filetype = 'dummy_filetype', contents = 'objectA.attr', line_num = 1, column_num = 12 ) results = app.post_json( '/completions', completion_data ).json[ 'completions' ] assert_that( results, has_items( CompletionEntryMatcher( 'attributeA' ) ) ) completion_data = BuildRequest( filetype = 'dummy_filetype', contents = 'objectA.attr', line_num = 1, column_num = 12, extra_conf_data = { 'key': 'value' } ) results = app.post_json( '/completions', completion_data ).json[ 'completions' ] assert_that( results, has_items( CompletionEntryMatcher( 'attributeB' ) ) ) # We ask for candidates twice because of cache invalidation: # both requests are identical except the extra conf data. assert_that( candidates_list.call_count, equal_to( 2 ) )
def GetCompletions_CacheIsNotValid_DifferentForceSemantic_test( app, candidates_list, *args ): with PatchCompleter( DummyCompleter, 'dummy_filetype' ): completion_data = BuildRequest( filetype = 'dummy_filetype', contents = 'objectA.attr', line_num = 1, column_num = 12, force_semantic = True ) results = app.post_json( '/completions', completion_data ).json[ 'completions' ] assert_that( results, has_items( CompletionEntryMatcher( 'attributeA' ) ) ) completion_data = BuildRequest( filetype = 'dummy_filetype', contents = 'objectA.attr', line_num = 1, column_num = 12 ) results = app.post_json( '/completions', completion_data ).json[ 'completions' ] assert_that( results, has_items( CompletionEntryMatcher( 'attributeB' ) ) ) # We ask for candidates twice because of cache invalidation: # semantic completion is forced for one of the request, not the other. assert_that( candidates_list.call_count, equal_to( 2 ) )
def GetCompletions_CacheIsValid_test( app, candidates_list, *args ): with PatchCompleter( DummyCompleter, 'dummy_filetype' ): completion_data = BuildRequest( filetype = 'dummy_filetype', contents = 'object.attr', line_num = 1, column_num = 12 ) results = app.post_json( '/completions', completion_data ).json[ 'completions' ] assert_that( results, has_items( CompletionEntryMatcher( 'attribute' ) ) ) completion_data = BuildRequest( filetype = 'dummy_filetype', contents = 'object.attri', line_num = 1, column_num = 13 ) results = app.post_json( '/completions', completion_data ).json[ 'completions' ] assert_that( results, has_items( CompletionEntryMatcher( 'attribute' ) ) ) # We ask for candidates only once because of cache. assert_that( candidates_list.call_count, equal_to( 1 ) )
def GetCompletions_CacheIsNotValid_DifferentCompletionType_test( app, candidates_list, *args ): with PatchCompleter( DummyCompleter, 'dummy_filetype' ): completion_data = BuildRequest( filetype = 'dummy_filetype', contents = 'objectA.attr', line_num = 1, column_num = 12 ) results = app.post_json( '/completions', completion_data ).json[ 'completions' ] assert_that( results, has_items( CompletionEntryMatcher( 'attributeA' ) ) ) completion_data = BuildRequest( filetype = 'dummy_filetype', contents = 'objectA.attr', line_num = 1, column_num = 12 ) results = app.post_json( '/completions', completion_data ).json[ 'completions' ] assert_that( results, has_items( CompletionEntryMatcher( 'attributeB' ) ) ) # We ask for candidates twice because of cache invalidation: # completion types are different between requests. assert_that( candidates_list.call_count, equal_to( 2 ) )
def test_parser_dirbuster_parse_report(self): # Dirbuster 1.0-RC1 from .dirbuster_reports import report_low with mock.patch('ptp.libptp.parser.LineParser.handle_file', side_effect=handle_file): DirbusterParser.__format__ = '' my_dirbuster = DirbusterParser(report_low) assert_that(my_dirbuster.parse_report(), has_items(*[{'ranking': LOW}] * 1)) from .dirbuster_reports import report_high with mock.patch('ptp.libptp.parser.LineParser.handle_file', side_effect=handle_file): DirbusterParser.__format__ = '' my_dirbuster = DirbusterParser(report_high) assert_that(my_dirbuster.parse_report(), has_items(*[{'ranking': HIGH}] * 1))
def Diagnostics_MultipleMissingIncludes_test( app ): contents = ReadFile( PathToTestFile( 'multiple_missing_includes.cc' ) ) event_data = BuildRequest( contents = contents, event_name = 'FileReadyToParse', filetype = 'cpp', compilation_flags = [ '-x', 'c++' ] ) response = app.post_json( '/event_notification', event_data ).json pprint( response ) assert_that( response, has_items( has_entries( { 'kind': equal_to( 'ERROR' ), 'location': has_entries( { 'line_num': 1, 'column_num': 10 } ), 'text': equal_to( "'first_missing_include' file not found" ), 'fixit_available': False } ), has_entries( { 'kind': equal_to( 'ERROR' ), 'location': has_entries( { 'line_num': 2, 'column_num': 10 } ), 'text': equal_to( "'second_missing_include' file not found" ), 'fixit_available': False } ), ) )
def GetCompletions_ClangCompleter_WorksWithExplicitFlags_test(): app = TestApp( handlers.app ) contents = """ struct Foo { int x; int y; char c; }; int main() { Foo foo; foo. } """ # 0-based line and column! completion_data = BuildRequest( filepath = '/foo.cpp', filetype = 'cpp', contents = contents, line_num = 10, column_num = 6, start_column = 6, compilation_flags = ['-x', 'c++'] ) results = app.post_json( '/completions', completion_data ).json assert_that( results, has_items( CompletionEntryMatcher( 'c' ), CompletionEntryMatcher( 'x' ), CompletionEntryMatcher( 'y' ) ) )
def GoToReferences_test( self ): filepath = self._PathToTestFile( 'test.ts' ) contents = ReadFile( filepath ) event_data = self._BuildRequest( filepath = filepath, filetype = 'typescript', contents = contents, event_name = 'BufferVisit' ) self._app.post_json( '/event_notification', event_data ) references_data = self._BuildRequest( completer_target = 'filetype_default', command_arguments = [ 'GoToReferences' ], line_num = 28, column_num = 6, contents = contents, filetype = 'typescript', filepath = filepath ) expected = has_items( has_entries( { 'description': 'var bar = new Bar();', 'line_num' : 28, 'column_num' : 5 } ), has_entries( { 'description': 'bar.testMethod();', 'line_num' : 29, 'column_num' : 1 } ) ) actual = self._app.post_json( '/run_completer_command', references_data ).json assert_that( actual, expected )
def test_good_gotodefinition(): app = TestApp( handlers.app ) filepath = fixture_filepath( 'goto.py' ) request_data = { 'source': open( filepath ).read(), 'line': 10, 'col': 3, 'source_path': filepath } definitions = app.post_json( '/gotodefinition', request_data ).json[ 'definitions' ] assert_that( definitions, has_length( 2 ) ) assert_that( definitions, has_items( { 'description': 'def f', 'line': 1, 'in_builtin_module': False, 'column': 4, 'is_keyword': False, 'module_path': filepath, 'docstring': 'f()\n\nModule method docs\nAre ' 'dedented, like you might expect' }, { 'description': 'class C', 'line': 6, 'in_builtin_module': False, 'column': 6, 'is_keyword': False, 'module_path': filepath, 'docstring': 'Class Documentation' } ) )
def returns_from_inner_and_outer_function_can_be_distinguished(): program = """ def answer(): def f(): return 42 return float(f()) print(answer()) """ trace = _run_and_trace(program) assert_that(trace, m.has_items( m.has_properties({ "location": m.has_properties({ "lineno": 2, "col_offset": 0 }), "returns": describe(float) }), m.has_properties({ "location": m.has_properties({ "lineno": 3, "col_offset": 4 }), "returns": describe(int) }), ))
def Diagnostics_FixIt_Available_test(app): contents = ReadFile(PathToTestFile("FixIt_Clang_cpp11.cpp")) event_data = BuildRequest( contents=contents, event_name="FileReadyToParse", filetype="cpp", compilation_flags=["-x", "c++", "-std=c++03", "-Wall", "-Wextra", "-pedantic"], ) response = app.post_json("/event_notification", event_data).json pprint(response) assert_that( response, has_items( has_entries( { "location": has_entries({"line_num": 16, "column_num": 3}), "text": equal_to("switch condition type 'A' " "requires explicit conversion to 'int'"), "fixit_available": True, } ), has_entries( { "location": has_entries({"line_num": 11, "column_num": 3}), "text": equal_to("explicit conversion functions are a C++11 extension"), "fixit_available": False, } ), ), )
def GetCompletions_IdentifierCompleter_WorksForSpecialIdentifierChars_test( app ): contents = """ textarea { font-family: sans-serif; font-size: 12px; }""" event_data = BuildRequest( contents = contents, filetype = 'css', event_name = 'FileReadyToParse' ) app.post_json( '/event_notification', event_data ) # query is 'fo' completion_data = BuildRequest( contents = 'fo ' + contents, filetype = 'css', column_num = 3 ) results = app.post_json( '/completions', completion_data ).json[ 'completions' ] assert_that( results, has_items( CompletionEntryMatcher( 'font-size', '[ID]' ), CompletionEntryMatcher( 'font-family', '[ID]' ) ) )
def EventNotification_FileReadyToParse_SyntaxKeywords_SeedWithCache_test( ycm, *args ): current_buffer = VimBuffer( name = 'current_buffer', filetype = 'some_filetype' ) with patch( 'ycm.client.event_notification.EventNotification.' 'PostDataToHandlerAsync' ) as post_data_to_handler_async: with MockVimBuffers( [ current_buffer ], [ current_buffer ] ): ycm.OnFileReadyToParse() assert_that( # Positional arguments passed to PostDataToHandlerAsync. post_data_to_handler_async.call_args[ 0 ], contains( has_entry( 'syntax_keywords', has_items( 'foo', 'bar' ) ), 'event_notification' ) ) # Do not send again syntax keywords in subsequent requests. ycm.OnFileReadyToParse() assert_that( # Positional arguments passed to PostDataToHandlerAsync. post_data_to_handler_async.call_args[ 0 ], contains( is_not( has_key( 'syntax_keywords' ) ), 'event_notification' ) )
def test_parser_wapiti221_xml_parse_report(self, mock_lxml_etree_parse): from .wapiti_reports_2_2_1 import report_high with mock.patch('ptp.libptp.parser.AbstractParser._recursive_find', return_value=[report_high]): Wapiti221XMLParser.__format__ = '' my_wapiti221 = Wapiti221XMLParser() report = my_wapiti221.parse_report() assert_that(report, has_items(*[{'ranking': HIGH, 'name': 'Cross Site Scripting', 'description': '\nCross-site scripting (XSS) is a type of computer security vulnerability typically found in web applications which allow code injection by malicious web users into the web pages viewed by other users. Examples of such code include HTML code and client-side scripts. '}] * 1))
def GetCompletions_ClangCompleter_WorksWithExplicitFlags_test(): app = TestApp( handlers.app ) app.post_json( '/ignore_extra_conf_file', { 'filepath': PathToTestFile( '.ycm_extra_conf.py' ) } ) contents = """ struct Foo { int x; int y; char c; }; int main() { Foo foo; foo. } """ completion_data = BuildRequest( filepath = '/foo.cpp', filetype = 'cpp', contents = contents, line_num = 11, column_num = 7, compilation_flags = ['-x', 'c++'] ) response_data = app.post_json( '/completions', completion_data ).json assert_that( response_data[ 'completions'], has_items( CompletionEntryMatcher( 'c' ), CompletionEntryMatcher( 'x' ), CompletionEntryMatcher( 'y' ) ) ) eq_( 7, response_data[ 'completion_start_column' ] )
def GetCompletions_ClangCompleter_WorksWithExplicitFlags_test(): app = TestApp(handlers.app) contents = """ struct Foo { int x; int y; char c; }; int main() { Foo foo; foo. } """ # 0-based line and column! completion_data = BuildRequest( filepath="/foo.cpp", filetype="cpp", contents=contents, line_num=10, column_num=6, start_column=6, compilation_flags=["-x", "c++"], ) results = app.post_json("/completions", completion_data).json assert_that( results, has_items(CompletionEntryMatcher("c"), CompletionEntryMatcher("x"), CompletionEntryMatcher("y")) )
def GetCompletions_TypeScriptCompleter_test(): app = TestApp( handlers.app ) filepath = PathToTestFile( 'test.ts' ) contents = open ( filepath ).read() event_data = BuildRequest( filepath = filepath, filetype = 'typescript', contents = contents, event_name = 'BufferVisit' ) app.post_json( '/event_notification', event_data ) completion_data = BuildRequest( filepath = filepath, filetype = 'typescript', contents = contents, force_semantic = True, line_num = 11, column_num = 6 ) results = app.post_json( '/completions', completion_data ).json[ 'completions' ] assert_that( results, has_items( CompletionEntryMatcher( 'methodA' ), CompletionEntryMatcher( 'methodB' ), CompletionEntryMatcher( 'methodC' ) ) )
def GetCompletions_CsCompleter_MultipleSolution_Works_test(): app = TestApp( handlers.app ) app.post_json( '/ignore_extra_conf_file', { 'filepath': PathToTestFile( '.ycm_extra_conf.py' ) } ) filepaths = [ PathToTestFile( 'testy/Program.cs' ), PathToTestFile( 'testy-multiple-solutions/' 'solution-named-like-folder/' 'testy/' 'Program.cs' ) ] lines = [ 10, 9 ] for filepath, line in zip( filepaths, lines ): contents = open( filepath ).read() event_data = BuildRequest( filepath = filepath, filetype = 'cs', contents = contents, event_name = 'FileReadyToParse' ) app.post_json( '/event_notification', event_data ) WaitUntilOmniSharpServerReady( app, filepath ) completion_data = BuildRequest( filepath = filepath, filetype = 'cs', contents = contents, line_num = line, column_num = 12 ) response_data = app.post_json( '/completions', completion_data ).json assert_that( response_data[ 'completions' ], has_items( CompletionEntryMatcher( 'CursorLeft' ), CompletionEntryMatcher( 'CursorSize' ) ) ) eq_( 12, response_data[ 'completion_start_column' ] ) StopOmniSharpServer( app, filepath )
def GetCompletions_Basic_test( app ): filepath = PathToTestFile( 'test.go' ) completion_data = BuildRequest( filepath = filepath, filetype = 'go', contents = ReadFile( filepath ), force_semantic = True, line_num = 9, column_num = 9 ) results = app.post_json( '/completions', completion_data ).json[ 'completions' ] assert_that( results, all_of( has_items( CompletionEntryMatcher( 'Llongfile', 'untyped int' ), CompletionEntryMatcher( 'Logger', 'struct' ) ) ) ) completion_data = BuildRequest( filepath = filepath, filetype = 'go', contents = ReadFile( filepath ), force_semantic = True, line_num = 9, column_num = 11 ) results = app.post_json( '/completions', completion_data ).json[ 'completions' ] assert_that( results, all_of( has_item( CompletionEntryMatcher( 'Logger', 'struct' ) ) ) )
def GetCompletions_CsCompleter_PathWithSpace_test(): app = TestApp( handlers.app ) app.post_json( '/ignore_extra_conf_file', { 'filepath': PathToTestFile( '.ycm_extra_conf.py' ) } ) filepath = PathToTestFile( 'неприличное слово/Program.cs' ) contents = open( filepath ).read() event_data = BuildRequest( filepath = filepath, filetype = 'cs', contents = contents, event_name = 'FileReadyToParse' ) app.post_json( '/event_notification', event_data ) WaitUntilOmniSharpServerReady( app, filepath ) completion_data = BuildRequest( filepath = filepath, filetype = 'cs', contents = contents, line_num = 9, column_num = 12 ) response_data = app.post_json( '/completions', completion_data ).json assert_that( response_data[ 'completions' ], has_items( CompletionEntryMatcher( 'CursorLeft' ), CompletionEntryMatcher( 'CursorSize' ) ) ) eq_( 12, response_data[ 'completion_start_column' ] ) StopOmniSharpServer( app, filepath )
def GetCompletions_CsCompleter_Works_test(): app = TestApp(handlers.app) filepath = PathToTestFile("testy/Program.cs") contents = open(filepath).read() event_data = BuildRequest(filepath=filepath, filetype="cs", contents=contents, event_name="FileReadyToParse") app.post_json("/event_notification", event_data) # We need to wait until the server has started up. while True: result = app.post_json( "/run_completer_command", BuildRequest(completer_target="filetype_default", command_arguments=["ServerRunning"], filetype="cs"), ).json if result: break time.sleep(0.2) completion_data = BuildRequest( filepath=filepath, filetype="cs", contents=contents, line_num=8, column_num=11, start_column=11 ) results = app.post_json("/completions", completion_data).json assert_that(results, has_items(CompletionEntryMatcher("CursorLeft"), CompletionEntryMatcher("CursorSize"))) # We need to turn off the CS server so that it doesn't stick around app.post_json( "/run_completer_command", BuildRequest(completer_target="filetype_default", command_arguments=["StopServer"], filetype="cs"), )
def GetCompletions_CsCompleter_HasBothImportsAndNonImport_test(): app = TestApp( handlers.app ) app.post_json( '/ignore_extra_conf_file', { 'filepath': PathToTestFile( '.ycm_extra_conf.py' ) } ) filepath = PathToTestFile( 'testy/ImportTest.cs' ) contents = open( filepath ).read() event_data = BuildRequest( filepath = filepath, filetype = 'cs', contents = contents, event_name = 'FileReadyToParse' ) app.post_json( '/event_notification', event_data ) WaitUntilOmniSharpServerReady( app, filepath ) completion_data = BuildRequest( filepath = filepath, filetype = 'cs', contents = contents, line_num = 9, column_num = 12, force_semantic = True, query = 'Date' ) response_data = app.post_json( '/completions', completion_data ).json assert_that( response_data[ 'completions' ], has_items( CompletionEntryMatcher( 'DateTime' ), CompletionEntryMatcher( 'DateTimeStyles' ) ) ) StopOmniSharpServer( app, filepath )
def FixIt_Available_test( self ): contents = open( self._PathToTestFile( 'FixIt_Clang_cpp11.cpp' ) ).read() event_data = self._BuildRequest( contents = contents, event_name = 'FileReadyToParse', filetype = 'cpp', compilation_flags = [ '-x', 'c++', '-std=c++03', '-Wall', '-Wextra', '-pedantic' ] ) response = self._app.post_json( '/event_notification', event_data ).json pprint( response ) assert_that( response, has_items( has_entries( { 'location': has_entries( { 'line_num': 16, 'column_num': 3 } ), 'text': equal_to( 'switch condition type \'A\' ' 'requires explicit conversion to \'int\''), 'fixit_available': True } ), has_entries( { 'location': has_entries( { 'line_num': 11, 'column_num': 3 } ), 'text': equal_to( 'explicit conversion functions are a C++11 extension' ), 'fixit_available': False } ), ) )
def verify_pool_vnx_sg_test(self, pool): assert_that(pool.movers_id, has_items(1, 2)) assert_that(pool.member_volumes, has_items(105)) assert_that(pool.name, equal_to('vnx-sg_test')) assert_that(pool.description, equal_to("vnx-sg_test on 000196800192")) assert_that(pool.may_contain_slices_default, equal_to(False)) assert_that(pool.disk_type, equal_to('Mixed')) assert_that(pool.size, equal_to(0)) assert_that(pool.used_size, equal_to(0)) assert_that(pool.total_size, equal_to(2077)) assert_that(pool.virtual_provisioning, equal_to(True)) assert_that(pool.is_homogeneous, equal_to(True)) assert_that(pool.template_pool, equal_to(63)) assert_that(pool.stripe_count, equal_to(8)) assert_that(pool.stripe_size, equal_to(256)) assert_that(pool.pool_id, equal_to(63))
def GetCompletions_Basic_test( app ): filepath = PathToTestFile( 'basic.py' ) completion_data = BuildRequest( filepath = filepath, filetype = 'python', contents = ReadFile( filepath ), line_num = 7, column_num = 3 ) results = app.post_json( '/completions', completion_data ).json[ 'completions' ] assert_that( results, has_items( CompletionEntryMatcher( 'a', 'self.a = 1', { 'extra_data': has_entry( 'location', has_entries( { 'line_num': 3, 'column_num': 10, 'filepath': filepath } ) ) } ), CompletionEntryMatcher( 'b', 'self.b = 2', { 'extra_data': has_entry( 'location', has_entries( { 'line_num': 4, 'column_num': 10, 'filepath': filepath } ) ) } ) ) ) completion_data = BuildRequest( filepath = filepath, filetype = 'python', contents = ReadFile( filepath ), line_num = 7, column_num = 4 ) results = app.post_json( '/completions', completion_data ).json[ 'completions' ] assert_that( results, all_of( has_item( CompletionEntryMatcher( 'a', 'self.a = 1', { 'extra_data': has_entry( 'location', has_entries( { 'line_num': 3, 'column_num': 10, 'filepath': filepath } ) ) } ) ), is_not( has_item( CompletionEntryMatcher( 'b' ) ) ) ) )
def test_good_gotodefinition(): app = TestApp(handlers.app) filepath = fixture_filepath("goto.py") request_data = {"source": open(filepath).read(), "line": 10, "col": 3, "source_path": filepath} definitions = app.post_json("/gotodefinition", request_data).json["definitions"] assert_that(definitions, has_length(2)) assert_that( definitions, has_items( { "module_path": filepath, "name": "f", "in_builtin_module": False, "line": 1, "column": 4, "docstring": "f()\n\nModule method docs\nAre " "dedented, like you might expect", "description": "def f", "is_keyword": False, }, { "module_path": filepath, "name": "C", "in_builtin_module": False, "line": 6, "column": 6, "docstring": "Class Documentation", "description": "class C", "is_keyword": False, }, ), )
def MultipleSolution_test( self ): filepaths = [ self._PathToTestFile( 'testy', 'Program.cs' ), self._PathToTestFile( 'testy-multiple-solutions', 'solution-named-like-folder', 'testy', 'Program.cs' ) ] lines = [ 10, 9 ] for filepath, line in zip( filepaths, lines ): contents = open( filepath ).read() event_data = self._BuildRequest( filepath = filepath, filetype = 'cs', contents = contents, event_name = 'FileReadyToParse' ) self._app.post_json( '/event_notification', event_data ) self._WaitUntilOmniSharpServerReady( filepath ) completion_data = self._BuildRequest( filepath = filepath, filetype = 'cs', contents = contents, line_num = line, column_num = 12 ) response_data = self._app.post_json( '/completions', completion_data ).json assert_that( response_data[ 'completions' ], has_items( self._CompletionEntryMatcher( 'CursorLeft' ), self._CompletionEntryMatcher( 'CursorSize' ) ) ) eq_( 12, response_data[ 'completion_start_column' ] ) self._StopOmniSharpServer( filepath )
def HasBothImportsAndNonImport_test( self ): filepath = self._PathToTestFile( 'testy', 'ImportTest.cs' ) contents = open( filepath ).read() event_data = self._BuildRequest( filepath = filepath, filetype = 'cs', contents = contents, event_name = 'FileReadyToParse' ) self._app.post_json( '/event_notification', event_data ) self._WaitUntilOmniSharpServerReady( filepath ) completion_data = self._BuildRequest( filepath = filepath, filetype = 'cs', contents = contents, line_num = 9, column_num = 12, force_semantic = True, query = 'Date' ) response_data = self._app.post_json( '/completions', completion_data ).json assert_that( response_data[ 'completions' ], has_items( self._CompletionEntryMatcher( 'DateTime' ), self._CompletionEntryMatcher( 'DateTimeStyles' ) ) ) self._StopOmniSharpServer( filepath )
def test_message_unmarshalling(self): event_name = 'unmarshaller' payload = { 'data': { 'name': event_name, 'id': 4567, 'value': 'something' }, 'name': event_name, 'origin_uuid': '00000000-0000-0000-0000-100000000000', 'timestamp': datetime.now().isoformat(), } with self.local_event(event_name, routing_key='other.key.#'): self.remote_bus.publish(event_name, payload=payload, routing_key='other.key.1') assert_that( self.local_messages(event_name, 1), has_items(has_entries(id=4567, value='something')), )
def test_given_2_incalls_on_same_user_then_returns_two_items(self): user_line_row = self.add_user_line_with_exten(exten='1000', context='default') extension_row = self.add_extension(exten='1000', context='from-extern') second_extension_row = self.add_extension(exten='1001', context='from-extern') self.create_incall_row_for_user(user_line_row.user_id, extension_row.id) self.create_incall_row_for_user(user_line_row.user_id, second_extension_row.id) first_line_extension = LineExtension(line_id=user_line_row.line_id, extension_id=extension_row.id) second_line_extension = LineExtension( line_id=user_line_row.line_id, extension_id=second_extension_row.id) result = dao.find_all_line_extensions_by_line_id(user_line_row.line_id) assert_that(result, has_items(first_line_extension, second_line_extension))
def GetCompletions_MultipleSolution_test(app): filepaths = [ PathToTestFile('testy', 'Program.cs'), PathToTestFile('testy-multiple-solutions', 'solution-named-like-folder', 'testy', 'Program.cs') ] lines = [10, 9] for filepath, line in zip(filepaths, lines): with WrapOmniSharpServer(app, filepath): contents = ReadFile(filepath) completion_data = BuildRequest(filepath=filepath, filetype='cs', contents=contents, line_num=line, column_num=12) response_data = app.post_json('/completions', completion_data).json assert_that( response_data['completions'], has_items(CompletionEntryMatcher('CursorLeft'), CompletionEntryMatcher('CursorSize'))) eq_(12, response_data['completion_start_column'])
def GetCompletions_Basic_test(app): filepath = PathToTestFile('td', 'test.go') completion_data = BuildRequest(filepath=filepath, filetype='go', contents=ReadFile(filepath), force_semantic=True, line_num=10, column_num=9) results = app.post_json('/completions', completion_data).json['completions'] assert_that( results, all_of( has_items( CompletionEntryMatcher( 'Llongfile', 'int', { 'detailed_info': 'Llongfile\n\n' 'These flags define which text to' ' prefix to each log entry generated' ' by the Logger.', 'menu_text': 'Llongfile', 'kind': 'Constant', }), CompletionEntryMatcher( 'Logger', 'struct{...}', { 'detailed_info': 'Logger\n\n' 'A Logger represents an active logging' ' object that generates lines of output' ' to an io.Writer.', 'menu_text': 'Logger', 'kind': 'Struct', }))))
def EventNotification_FileReadyToParse_SyntaxKeywords_SeedWithCache_test( ycm, *args): current_buffer = VimBuffer(name='current_buffer', filetype='some_filetype') with patch('ycm.client.event_notification.EventNotification.' 'PostDataToHandlerAsync') as post_data_to_handler_async: with MockVimBuffers([current_buffer], [current_buffer]): ycm.OnFileReadyToParse() assert_that( # Positional arguments passed to PostDataToHandlerAsync. post_data_to_handler_async.call_args[0], contains(has_entry('syntax_keywords', has_items('foo', 'bar')), 'event_notification')) # Do not send again syntax keywords in subsequent requests. ycm.OnFileReadyToParse() assert_that( # Positional arguments passed to PostDataToHandlerAsync. post_data_to_handler_async.call_args[0], contains(is_not(has_key('syntax_keywords')), 'event_notification'))
def test_given_one_call_and_one_user_when_connect_user_then_the_two_are_talking(self): self.ari.set_channels(MockChannel(id='call-id'), MockChannel(id='new-call-id', )) self.ari.set_channel_variable({'new-call-id': {'XIVO_USERUUID': 'user-uuid'}}) self.ari.set_global_variables({'XIVO_CHANNELS_call-id': json.dumps({'app': 'sw', 'app_instance': 'sw1', 'state': 'ringing'})}) self.confd.set_users(MockUser(uuid='user-uuid')) self.confd.set_lines(MockLine(id='line-id', name='line-name', protocol='sip')) self.confd.set_user_lines({'user-uuid': [MockUserLine('line-id')]}) self.ari.set_originates(MockChannel(id='new-call-id')) new_call = self.ctid_ng.connect_user('call-id', 'user-uuid') assert_that(new_call, has_entries({ 'call_id': 'new-call-id' })) assert_that(self.ari.requests(), has_entry('requests', has_items(has_entries({ 'method': 'POST', 'path': '/ari/channels', 'query': contains_inanyorder(['app', 'callcontrol'], ['endpoint', 'sip/line-name'], ['appArgs', 'sw1,dialed_from,call-id']), }))))
def LanguageServerCompleter_GetCompletions_List_test( app ): completer = MockCompleter() request_data = RequestWrap( BuildRequest() ) completion_response = { 'result': [ { 'label': 'test' } ] } resolve_responses = [ { 'result': { 'label': 'test' } }, ] with patch.object( completer, '_is_completion_provider', True ): with patch.object( completer.GetConnection(), 'GetResponse', side_effect = [ completion_response ] + resolve_responses ): assert_that( completer.ComputeCandidatesInner( request_data, 1 ), contains_exactly( has_items( has_entries( { 'insertion_text': 'test' } ) ), False ) )
def GetCompletions_UltiSnipsCompleter_Works_test( app ): event_data = BuildRequest( event_name = 'BufferVisit', ultisnips_snippets = [ { 'trigger': 'foo', 'description': 'bar' }, { 'trigger': 'zoo', 'description': 'goo' }, ] ) app.post_json( '/event_notification', event_data ) completion_data = BuildRequest( contents = 'oo ', column_num = 3 ) results = app.post_json( '/completions', completion_data ).json[ 'completions' ] assert_that( results, has_items( CompletionEntryMatcher( 'foo', extra_menu_info='<snip> bar' ), CompletionEntryMatcher( 'zoo', extra_menu_info='<snip> goo' ) ) )
def test_create_voicemail_with_all_parameters(): number, context = vm_helper.generate_number_and_context() parameters = {'name': 'full', 'number': number, 'context': context, 'email': '*****@*****.**', 'pager': '*****@*****.**', 'language': 'en_US', 'timezone': 'eu-fr', 'password': '******', 'max_messages': 10, 'attach_audio': True, 'ask_password': False, 'delete_messages': True, 'enabled': True, 'options': [["saycid", "yes"], ["emailbody", "this\nis\ra\temail|body"]]} expected = has_entries({'name': 'full', 'number': number, 'context': context, 'email': '*****@*****.**', 'pager': '*****@*****.**', 'language': 'en_US', 'timezone': 'eu-fr', 'password': '******', 'max_messages': 10, 'attach_audio': True, 'ask_password': False, 'delete_messages': True, 'enabled': True, 'options': has_items(["saycid", "yes"], ["emailbody", "this\nis\ra\temail|body"]) }) response = confd.voicemails.post(parameters) response.assert_created('voicemails') assert_that(response.item, expected)
def assert_function(): assert_that( self.events.accumulate(), has_items( { 'name': 'chat_message_event', 'origin_uuid': XIVO_UUID, 'required_acl': 'events.chat.message.{}.{}'.format( XIVO_UUID, message.to), 'data': { 'alias': message.alias, 'to': [XIVO_UUID, message.to], 'from': [XIVO_UUID, self.token_user_uuid], 'msg': message.content, } }, has_entry('name', 'chat_message_received'), has_entry('name', 'chat_message_sent'), ))
def GetCompletions_IdentifierCompleter_UnicodeQuery_InLine_test(app): contents = """ This is some text cøntaining unicøde """ event_data = BuildRequest(contents=contents, filetype='css', event_name='FileReadyToParse') app.post_json('/event_notification', event_data) # query is 'cø' completion_data = BuildRequest(contents='cø ' + contents, filetype='css', column_num=4) results = app.post_json('/completions', completion_data).json['completions'] assert_that( results, has_items(CompletionEntryMatcher('cøntaining', '[ID]'), CompletionEntryMatcher('unicøde', '[ID]')))
def test_create_all_parameters(): response = confd.endpoints.sip.post( username="******", name="not-the-username", secret="mysecret", type="peer", host="127.0.0.1", options=ALL_OPTIONS, ) assert_that( response.item, has_entries({ 'tenant_uuid': MAIN_TENANT, 'username': '******', 'name': 'not-the-username', 'secret': 'mysecret', 'type': 'peer', 'host': '127.0.0.1', 'options': has_items(*ALL_OPTIONS), }), )
def GetCompletions_DefaultToIdentifier_test(app): filepath = PathToTestFile('testy', 'Program.cs') with WrapOmniSharpServer(app, filepath): contents = ReadFile(filepath) completion_data = BuildRequest(filepath=filepath, filetype='cs', contents=contents, line_num=10, column_num=7) response_data = app.post_json('/completions', completion_data).json print('Response: ', response_data) assert_that( response_data, has_entries({ 'completion_start_column': 4, 'completions': has_items(CompletionEntryMatcher('Console', '[ID]'), ), 'errors': empty(), }))
def bus_events_received(): assert_that( events.accumulate(with_headers=True), has_items( has_entries( message=has_entries({ 'name': 'fax_outbound_user_created', 'data': has_entries({ 'id': fax_id, 'context': 'user-context', 'extension': 'recipient-fax', 'user_uuid': 'some-user-id', 'tenant_uuid': 'my-tenant', }), }), headers=has_entries({ 'name': 'fax_outbound_user_created', 'tenant_uuid': 'my-tenant', }) ), has_entries( message=has_entries({ 'name': 'fax_outbound_user_succeeded', 'data': has_entries({ 'id': fax_id, 'context': 'user-context', 'extension': 'recipient-fax', 'user_uuid': 'some-user-id', 'tenant_uuid': 'my-tenant', }), }), headers=has_entries({ 'name': 'fax_outbound_user_succeeded', 'tenant_uuid': 'my-tenant', }) ) ) )
def GetCompletions_Basic_test(app): filepath = PathToTestFile('basic.py') completion_data = BuildRequest(filepath=filepath, filetype='python', contents=ReadFile(filepath), line_num=7, column_num=3) results = app.post_json('/completions', completion_data).json['completions'] assert_that( results, has_items( CompletionEntryMatcher('a', 'self.a = 1', { 'detailed_info': '', 'kind': 'statement' }), CompletionEntryMatcher('b', 'self.b = 2', { 'detailed_info': '', 'kind': 'statement' }))) completion_data = BuildRequest(filepath=filepath, filetype='python', contents=ReadFile(filepath), line_num=7, column_num=4) results = app.post_json('/completions', completion_data).json['completions'] assert_that( results, all_of( has_item( CompletionEntryMatcher('a', 'self.a = 1', { 'detailed_info': '', 'kind': 'statement' })), is_not(has_item(CompletionEntryMatcher('b')))))
def check_cluster_status(self, cluster_status, cluster_node_names, disabled_nodes_number=0): """Step to check status of RabbitMQ cluster. Cluster status is checked by 'rabbitmqctl cluster_status'. Example of its output: Cluster status of node rabbit@ctl03 ... [{nodes,[{disc,[rabbit@ctl01,rabbit@ctl02,rabbit@ctl03]}]}, {running_nodes,[rabbit@ctl02,rabbit@ctl03]}, {cluster_name,<<"openstack">>}, {partitions,[]}, {alarms,[{rabbit@ctl02,[]},{rabbit@ctl03,[]}]}] This step checks that: - list of nodes is equal to expected one (from config file) - list of running nodes is subset of list of all nodes - number of running nodes <= number of all nodes (depending on disabled_nodes_number) Args: cluster_status (str): output of rabbitmqctl cluster_node_names (list): names of cluster nodes disabled_nodes_number (int): number of nodes where RabbitMQ is stopped check (bool, optional): flag whether to check step or not Raises: AssertionError: if hosts list is empty """ lines = cluster_status.split('\n') all_nodes = re.findall(r'(\w+@\w+)', lines[1]) running_nodes = re.findall(r'(\w+@\w+)', lines[2]) assert_that(sorted(all_nodes), is_(sorted(cluster_node_names))) assert_that(all_nodes, has_items(*running_nodes)) assert_that( len(all_nodes) - disabled_nodes_number, is_(len(running_nodes)))
def test_Diagnostics_FixIt_Available(self, app): filepath = PathToTestFile('FixIt_Clang_cpp11.cpp') event_data = BuildRequest(contents=ReadFile(filepath), event_name='FileReadyToParse', filepath=filepath, filetype='cpp', compilation_flags=[ '-x', 'c++', '-std=c++03', '-Wall', '-Wextra', '-pedantic' ]) response = app.post_json('/event_notification', event_data).json pprint(response) assert_that( response, has_items( has_entries({ 'location': LocationMatcher(filepath, 16, 3), 'text': equal_to('switch condition type \'A\' ' 'requires explicit conversion to \'int\''), 'fixit_available': True }), has_entries({ 'location': LocationMatcher(filepath, 11, 3), 'text': equal_to( 'explicit conversion functions are a C++11 extension'), 'fixit_available': False }), ))
def test_swagger(self): response = self.client.get("/api/swagger") assert_that(response.status_code, is_(equal_to(200))) data = loads(response.data)["paths"]["/foo"]["post"] assert_that( data["parameters"], has_items( has_entry( "in", "body", ), has_entry( "schema", has_entry( "$ref", "#/definitions/FooRequest", ), ), ), ) assert_that( data["responses"], all_of( has_key("200"), is_not(has_key("204")), has_entry( "200", has_entry( "schema", has_entry( "$ref", "#/definitions/FooList", ), ), ), ), )
def GenericLSPCompleter_SingleDiagnostics_test( app ): request = BuildRequest( filepath = TEST_FILE, filetype = 'foo', line_num = 1, column_num = 1, contents = TEST_FILE_CONTENT, event_name = 'FileReadyToParse' ) app.post_json( '/event_notification', request ) WaitUntilCompleterServerReady( app, 'foo' ) request.pop( 'event_name' ) response = app.post_json( '/receive_messages', request ) assert_that( response.json, has_items( has_entries( { 'diagnostics': contains_exactly( has_entries( { 'kind': equal_to( 'WARNING' ), 'location': LocationMatcher( TEST_FILE, 2, 1 ), 'location_extent': RangeMatcher( TEST_FILE, ( 2, 1 ), ( 2, 4 ) ), 'text': equal_to( 'FOO is all uppercase.' ), 'fixit_available': False } ) ) } ) ) )
def test_update_policies(self): service_name = 'service-standard-internal' policy_uuid = self.auth.policies.list( name=service_name)['items'][0]['uuid'] self.auth.policies.edit(policy_uuid, name=service_name, acl=['break.all.acl']) self._service_update() policies = self.auth.policies.list()['items'] assert_that( policies, has_items( has_entries( name=service_name, acl=contains_inanyorder( 'random.acl.*', 'weird.random.#', 'another.random.read', ), ), ), )
def test_edit_additional_options(self): iax = self.add_useriax(_options=[ ["foo", "bar"], ["foo", "baz"], ["spam", "eggs"], ]) self.session.expire_all() iax.options = [ ["foo", "newbar"], ["foo", "newbaz"], ["spam", "neweggs"], ] iax_dao.edit(iax) self.session.expire_all() assert_that( iax, has_properties(_options=has_items( ["foo", "newbar"], ["foo", "newbaz"], ["spam", "neweggs"], )))
def IdentifierCompleter_WorksForSpecialIdentifierChars_test(self): contents = """ textarea { font-family: sans-serif; font-size: 12px; }""" event_data = self._BuildRequest(contents=contents, filetype='css', event_name='FileReadyToParse') self._app.post_json('/event_notification', event_data) # query is 'fo' completion_data = self._BuildRequest(contents='fo ' + contents, filetype='css', column_num=3) results = self._app.post_json('/completions', completion_data).json['completions'] assert_that( results, has_items(self._CompletionEntryMatcher('font-size', '[ID]'), self._CompletionEntryMatcher('font-family', '[ID]')))
def test_find_all_by_user_id_two_user_call_permissions(self): user = self.add_user() call_permission1 = self.add_call_permission() call_permission2 = self.add_call_permission() self.add_user_call_permission(user_id=user.id, call_permission_id=call_permission1.id) self.add_user_call_permission(user_id=user.id, call_permission_id=call_permission2.id) result = user_call_permission_dao.find_all_by(user_id=user.id) assert_that( result, has_items( has_properties({ 'user_id': user.id, 'call_permission_id': call_permission2.id }), has_properties({ 'user_id': user.id, 'call_permission_id': call_permission1.id }), ))
def DebugInfo_JvmArgs_test( app ): StartJavaCompleterServerInDirectory( app, PathToTestFile( 'lombok_project', 'src' ) ) filepath = PathToTestFile( 'lombok_project', 'src', 'main', 'java', 'com', 'ycmd', 'App.java' ) request_data = BuildRequest( filepath = filepath, filetype = 'java' ) assert_that( app.post_json( '/debug_info', request_data ).json, has_entry( 'completer', has_entries( { 'servers': contains_exactly( has_entries( { 'executable': has_items( starts_with( '-javaagent:' ) ), } ) ) } ) ) )
def GetCompletions_IdentifierCompleter_WorksForSpecialIdentifierChars_test(): app = TestApp( handlers.app ) contents = """ textarea { font-family: sans-serif; font-size: 12px; }""" event_data = BuildRequest( contents = contents, filetype = 'css', event_name = 'FileReadyToParse' ) app.post_json( '/event_notification', event_data ) # query is 'fo' completion_data = BuildRequest( contents = 'fo ' + contents, filetype = 'css', column_num = 3 ) results = app.post_json( '/completions', completion_data ).json[ 'completions' ] assert_that( results, has_items( CompletionEntryMatcher( 'font-size' ), CompletionEntryMatcher( 'font-family' ) ) )
def test_Diagnostics_MultipleMissingIncludes(self, app): filepath = PathToTestFile('multiple_missing_includes.cc') event_data = BuildRequest(contents=ReadFile(filepath), event_name='FileReadyToParse', filepath=filepath, filetype='cpp', compilation_flags=['-x', 'c++']) response = app.post_json('/event_notification', event_data).json pprint(response) assert_that( response, has_items( has_entries({ 'kind': equal_to('ERROR'), 'location': LocationMatcher(filepath, 1, 10), 'text': equal_to("'first_missing_include' file not found"), 'fixit_available': False }), has_entries({ 'kind': equal_to('ERROR'), 'location': LocationMatcher(filepath, 2, 10), 'text': equal_to("'second_missing_include' file not found"), 'fixit_available': False }), ))
def test_good_gotodefinition(): app = TestApp(handlers.app) filepath = fixture_filepath('goto.py') request_data = { 'source': open(filepath).read(), 'line': 10, 'col': 3, 'source_path': filepath } definitions = app.post_json('/gotodefinition', request_data).json['definitions'] assert_that(definitions, has_length(2)) assert_that( definitions, has_items( { 'module_path': filepath, 'name': 'f', 'in_builtin_module': False, 'line': 1, 'column': 4, 'docstring': 'f()\n\nModule method docs\nAre ' 'dedented, like you might expect', 'description': 'def f', 'is_keyword': False, }, { 'module_path': filepath, 'name': 'C', 'in_builtin_module': False, 'line': 6, 'column': 6, 'docstring': 'Class Documentation', 'description': 'class C', 'is_keyword': False }))
def test_put_record_start(self): channel_id = self.given_call_not_stasis() routing_key = 'calls.*.updated' event_accumulator = self.bus.accumulator(routing_key) self.calld_client.calls.start_record(channel_id) def event_received(): assert_that( event_accumulator.accumulate(with_headers=True), has_items( has_entries( message=has_entries( name='call_updated', data=has_entries(call_id=channel_id, record_state='active'), ), headers=has_entries( name='call_updated', tenant_uuid=VALID_TENANT, ), ) ) ) until.assert_(event_received, tries=10) assert_that( self.calld_client.calls.list_calls()['items'], has_items(has_entries(call_id=channel_id, record_state='active')) ) # Should not raise an error on second record start assert_that( calling(self.calld_client.calls.start_record).with_args(channel_id), not_(raises(CalldError)) )
def test_given_voicemail_when_editing_then_updates_via_confd(self): self.confd.add_json_response("/voicemails", {'total': 1, 'items': [self.voicemail]}) self.confd.add_json_response("/voicemails/1", self.voicemail) self.confd.add_json_response("/voicemails/1", self.voicemail) self.confd.add_response("/voicemails/1", method="PUT", code=204) voicemail_page = self.browser.voicemails.edit("Edited Voicemail") voicemail_page.fill_form(email="*****@*****.**") email_tab = voicemail_page.email() email_tab.fill_form(emailbody="Hello world\nThis is an email\nGoodbye|") advanced_tab = voicemail_page.advanced() advanced_tab.add_option("saycid", "yes") voicemail_page.save() expected_voicemail = has_entries({u'ask_password': True, u'attach_audio': False, u'context': u'default', u'delete_messages': False, u'email': u"*****@*****.**", u'language': None, u'max_messages': None, u'name': u'Edited Voicemail', u'number': u'1001', u'pager': None, u'password': None, u'timezone': u'eu-fr', u'options': has_items( [u"emailbody", u"Hello world\r\nThis is an email\r\nGoodbye|"], [u"saycid", u"yes"]) }) request = self.confd.request_matching('/voicemails/1', method='PUT') assert_that(json.loads(request['body']), expected_voicemail)
def GetCompletions_CsCompleter_Works_test(): app = TestApp( handlers.app ) filepath = PathToTestFile( 'testy/Program.cs' ) contents = open( filepath ).read() event_data = BuildRequest( filepath = filepath, filetype = 'cs', contents = contents, event_name = 'FileReadyToParse' ) app.post_json( '/event_notification', event_data ) # We need to wait until the server has started up. while True: result = app.post_json( '/run_completer_command', BuildRequest( completer_target = 'filetype_default', command_arguments = ['ServerRunning'], filetype = 'cs' ) ).json if result: break time.sleep( 0.2 ) completion_data = BuildRequest( filepath = filepath, filetype = 'cs', contents = contents, line_num = 8, column_num = 11, start_column = 11 ) results = app.post_json( '/completions', completion_data ).json assert_that( results, has_items( CompletionEntryMatcher( 'CursorLeft' ), CompletionEntryMatcher( 'CursorSize' ) ) ) # We need to turn off the CS server so that it doesn't stick around app.post_json( '/run_completer_command', BuildRequest( completer_target = 'filetype_default', command_arguments = ['StopServer'], filetype = 'cs' ) )