def test_apply_physical_no_filter(self): metrics = DirectMetrics() metrics.update_physical(object(), MetricUpdates( counters={MetricKey('step1', self.name1): 5, MetricKey('step1', self.name3): 8})) metrics.update_physical(object(), MetricUpdates( counters={MetricKey('step2', self.name1): 7, MetricKey('step1', self.name3): 4})) results = metrics.query() hc.assert_that(results['counters'], hc.contains_inanyorder(*[ MetricResult(MetricKey('step1', self.name1), 0, 5), MetricResult(MetricKey('step1', self.name3), 0, 12), MetricResult(MetricKey('step2', self.name1), 0, 7)])) metrics.commit_physical(object(), MetricUpdates()) results = metrics.query() hc.assert_that(results['counters'], hc.contains_inanyorder(*[ MetricResult(MetricKey('step1', self.name1), 0, 5), MetricResult(MetricKey('step1', self.name3), 0, 12), MetricResult(MetricKey('step2', self.name1), 0, 7)]))
def test_get_listing(self): context = VOSpaceWorkingContext(LISTING_TEST_DIR) assert_that(context.get_listing(".cands.astrom"), contains_inanyorder("xxx1.cands.astrom", "xxx2.cands.astrom")) assert_that(context.get_listing(".reals.astrom"), contains_inanyorder("xxx3.reals.astrom", "xxx4.reals.astrom"))
def test_write_progress_two_simultaneous_managers(self): assert_that(self.progress_manager.get_done(tasks.CANDS_TASK), has_length(0)) assert_that(self.progress_manager.get_done(tasks.REALS_TASK), has_length(0)) processed1 = "xxx2.reals.astrom" self.progress_manager.lock(processed1) self.progress_manager.record_done(processed1) assert_that(self.progress_manager.get_done(tasks.CANDS_TASK), has_length(0)) assert_that(self.progress_manager.get_done(tasks.REALS_TASK), contains(processed1)) # Create a second simultaneous manager manager2 = self.create_concurrent_progress_manager() processed2 = "xxx3.reals.astrom" self.progress_manager.lock(processed2) manager2.record_done(processed2) # Make sure second manager sees both entries assert_that(manager2.get_done(tasks.CANDS_TASK), has_length(0)) assert_that(manager2.get_done(tasks.REALS_TASK), contains_inanyorder(processed1, processed2)) # Make sure original manager sees both entries assert_that(self.progress_manager.get_done(tasks.CANDS_TASK), has_length(0)) assert_that(self.progress_manager.get_done(tasks.REALS_TASK), contains_inanyorder(processed1, processed2))
def test_file_found_to_be_done_not_checked_again(self): test_files = [self.file1, self.file2, self.file3, self.file4] self.directory_manager.set_listing(self.taskid, test_files) self.progress_manager.lock(self.file2) self.progress_manager.record_done(self.file2) self.progress_manager.unlock(self.file2) # We don't yet know file1 is done. assert_that(self.undertest.get_potential_files([]), contains_inanyorder(self.file1, self.file2, self.file3, self.file4)) assert_that(self.undertest.get_workunit().get_filename(), equal_to(self.file1)) # We have not yet discovered file2 is done because we found file 1 # right away. However, we should remember we already returned file1. assert_that(self.undertest.get_potential_files([]), contains_inanyorder(self.file2, self.file3, self.file4)) # Here we should discover file2 is done and skip over it assert_that(self.undertest.get_workunit().get_filename(), equal_to(self.file3)) # So the next time we know not to check file2 again assert_that(self.undertest.get_potential_files([]), contains_inanyorder(self.file4))
def test_commit_logical_no_filter(self): metrics = DirectMetrics() metrics.commit_logical( self.bundle1, MetricUpdates( counters={MetricKey('step1', self.name1): 5, MetricKey('step1', self.name2): 8}, distributions={ MetricKey('step1', self.name1): DistributionData(8, 2, 3, 5)})) metrics.commit_logical( self.bundle1, MetricUpdates( counters={MetricKey('step2', self.name1): 7, MetricKey('step1', self.name2): 4}, distributions={ MetricKey('step1', self.name1): DistributionData(4, 1, 4, 4)})) results = metrics.query() hc.assert_that( results['counters'], hc.contains_inanyorder(*[ MetricResult(MetricKey('step1', self.name2), 12, 0), MetricResult(MetricKey('step2', self.name1), 7, 0), MetricResult(MetricKey('step1', self.name1), 5, 0)])) hc.assert_that( results['distributions'], hc.contains_inanyorder( MetricResult(MetricKey('step1', self.name1), DistributionResult( DistributionData(12, 3, 3, 5)), DistributionResult( DistributionData(0, 0, None, None)))))
def test_given_sip_account_has_pickups_then_pickups_are_returned(self): sip = self.add_usersip(category='user') line = self.add_line(protocol='sip', protocolid=sip.id) user = self.add_user() self.add_user_line(user_id=user.id, line_id=line.id) pickup1 = self.add_pickup() pickup2 = self.add_pickup() pickup3 = self.add_pickup() pickup4 = self.add_pickup() self.add_pickup_member_user(pickup1, user.id, category='member') self.add_pickup_member_user(pickup2, user.id, category='member') self.add_pickup_member_user(pickup3, user.id, category='pickup') self.add_pickup_member_user(pickup4, user.id, category='pickup') results = list(asterisk_conf_dao.find_sip_user_settings()) assert_that(results, has_length(1)) namedcallgroup = results[0].namedcallgroup namedpickupgroup = results[0].namedpickupgroup call_groups = namedcallgroup.split(',') pickup_groups = namedpickupgroup.split(',') assert_that(call_groups, contains_inanyorder(str(pickup3.id), str(pickup4.id))) assert_that(pickup_groups, contains_inanyorder(str(pickup1.id), str(pickup2.id)))
def test_grid_query(self): grid = Grid(11.0, 12.0, 51.0, 52.0, 0.1, 0.2, self.srid) query = self.query_builder.grid_query("<table_name>", grid, count_threshold=0, time_interval=TimeInterval(self.start_time, self.end_time)) assert_that(str(query), is_( "SELECT TRUNC((ST_X(ST_Transform(geog::geometry, %(srid)s)) - %(xmin)s) / %(xdiv)s)::integer AS rx, " "TRUNC((ST_Y(ST_Transform(geog::geometry, %(srid)s)) - %(ymin)s) / %(ydiv)s)::integer AS ry, " "count(*) AS strike_count, max(\"timestamp\") as \"timestamp\" FROM <table_name> " "WHERE ST_GeomFromWKB(%(envelope)s, %(envelope_srid)s) && geog AND " "\"timestamp\" >= %(start_time)s AND \"timestamp\" < %(end_time)s GROUP BY rx, ry")) parameters = query.get_parameters() assert_that(parameters.keys(), contains_inanyorder('xmin', 'ymin', 'xdiv', 'ydiv', 'envelope', 'envelope_srid', 'srid', 'start_time', 'end_time')) assert_that(parameters.keys(), not contains_inanyorder('count_threshold')) assert_that(parameters['xmin'], is_(11.0)) assert_that(parameters['ymin'], is_(51.0)) assert_that(parameters['xdiv'], is_(0.1)) assert_that(parameters['ydiv'], is_(0.2)) assert_that(parameters['envelope'], is_(not_none())) assert_that(parameters['envelope_srid'], is_(self.srid)) assert_that(parameters['start_time'], is_(self.start_time)) assert_that(parameters['end_time'], is_(self.end_time)) assert_that(parameters['srid'], is_(self.srid))
def test_direct_runner_metrics(self): class MyDoFn(beam.DoFn): def start_bundle(self): count = Metrics.counter(self.__class__, 'bundles') count.inc() def finish_bundle(self): count = Metrics.counter(self.__class__, 'finished_bundles') count.inc() def process(self, element): gauge = Metrics.gauge(self.__class__, 'latest_element') gauge.set(element) count = Metrics.counter(self.__class__, 'elements') count.inc() distro = Metrics.distribution(self.__class__, 'element_dist') distro.update(element) return [element] runner = DirectRunner() p = Pipeline(runner, options=PipelineOptions(self.default_properties)) pcoll = (p | ptransform.Create([1, 2, 3, 4, 5]) | 'Do' >> beam.ParDo(MyDoFn())) assert_that(pcoll, equal_to([1, 2, 3, 4, 5])) result = p.run() result.wait_until_finish() metrics = result.metrics().query() namespace = '{}.{}'.format(MyDoFn.__module__, MyDoFn.__name__) hc.assert_that( metrics['counters'], hc.contains_inanyorder( MetricResult( MetricKey('Do', MetricName(namespace, 'elements')), 5, 5), MetricResult( MetricKey('Do', MetricName(namespace, 'bundles')), 1, 1), MetricResult( MetricKey('Do', MetricName(namespace, 'finished_bundles')), 1, 1))) hc.assert_that( metrics['distributions'], hc.contains_inanyorder( MetricResult( MetricKey('Do', MetricName(namespace, 'element_dist')), DistributionResult(DistributionData(15, 5, 1, 5)), DistributionResult(DistributionData(15, 5, 1, 5))))) gauge_result = metrics['gauges'][0] hc.assert_that( gauge_result.key, hc.equal_to(MetricKey('Do', MetricName(namespace, 'latest_element')))) hc.assert_that(gauge_result.committed.value, hc.equal_to(5)) hc.assert_that(gauge_result.attempted.value, hc.equal_to(5))
def test_listdir_for_task(self): directory = self.get_abs_path("data/testdir") listing1 = listdir_for_suffix(directory, tasks.get_suffix(tasks.CANDS_TASK)) assert_that(listing1, contains_inanyorder("xxx1.cands.astrom", "xxx2.cands.astrom")) listing2 = listdir_for_suffix(directory, tasks.get_suffix(tasks.REALS_TASK)) assert_that(listing2, contains_inanyorder("xxx1.reals.astrom", "xxx2.reals.astrom"))
def test_listdir_for_suffix(self): directory = self.get_abs_path("data/testdir") listing1 = listdir_for_suffix(directory, "cands.astrom") assert_that(listing1, contains_inanyorder("xxx1.cands.astrom", "xxx2.cands.astrom")) listing2 = listdir_for_suffix(directory, "reals.astrom") assert_that(listing2, contains_inanyorder("xxx1.reals.astrom", "xxx2.reals.astrom"))
def test_load_all_applicants(self): applicant_id = "9g734m0hg73bf06" httpretty.register_uri(httpretty.GET, "https://api.onfido.com/v1/applicants/{0}".format(applicant_id), body=load_fixture_string("applicant.json"), content_type="application/json") response = onfido.Applicant.retrieve(applicant_id) assert_that(response.id, equal_to("1030303-123123-123123")) assert_that(response.created_at, equal_to("2014-05-23T13:50:33Z")) assert_that(response.href, equal_to("/v1/applicants/1030303-123123-123123")) assert_that(response.title, equal_to("Mr")) assert_that(response.first_name, equal_to("John")) assert_that(response.middle_name, equal_to(None)) assert_that(response.last_name, equal_to("Smith")) assert_that(response.gender, equal_to("male")) assert_that(response.dob, equal_to("2013-02-17")) assert_that(response.telephone, equal_to("02088909293")) assert_that(response.mobile, equal_to(None)) assert_that(response.country, equal_to("GBR")) assert_that(response.id_numbers, contains_inanyorder( { "type": "ssn", "value": "433-54-3937" }, { "type": "driving_license", "value": "I1234562", "state": "CA" } )) assert_that(response.addresses, contains_inanyorder( { "flat_number": None, "building_name": None, "building_number": "100", "street": "Main Street", "sub_street": None, "state": None, "town": "London", "postcode": "SW4 6EH", "country": "GBR", "start_date": "2013-01-01", "end_date": None }, { "flat_number": "Apt 2A", "building_name": None, "building_number": "1017", "street": "Oakland Ave", "sub_street": None, "town": "Piedmont", "state": "CA", "postcode": "94611", "country": "USA", "start_date": "2006-03-07", "end_date": "2012-12-31" } ))
def test_split_ratio_using_labels(self): labels = ["a", "a", "b", "a", "b", "a", "b", "b", "a", "a"] dataset = self.create_dataset(labels=labels, sample_ids=range(len(labels))) first, second = dataset.split(0.75, using_labels=True) assert_that(first.get_labels(), contains_inanyorder("a", "a", "a", "a", "b", "b", "b")) assert_that(second.get_labels(), contains_inanyorder("a", "a", "b"))
def test_directory_manager_get_listing(self): directory = self.get_abs_path("data/testdir") directory_manager = LocalDirectoryWorkingContext(directory) listing1 = directory_manager.get_listing("cands.astrom") assert_that(listing1, contains_inanyorder("xxx1.cands.astrom", "xxx2.cands.astrom")) listing2 = directory_manager.get_listing("reals.astrom") assert_that(listing2, contains_inanyorder("xxx1.reals.astrom", "xxx2.reals.astrom")) assert_that(directory_manager.get_full_path("xxx1.cands.astrom"), equal_to(self.get_abs_path("data/testdir/xxx1.cands.astrom")))
def test_processed_indices(self): assert_that(self.undertest.get_processed_indices(self.file1), has_length(0)) self.undertest.lock(self.file1) self.undertest.record_index(self.file1, 1) assert_that(self.undertest.get_processed_indices(self.file1), contains_inanyorder(1)) self.undertest.record_index(self.file1, 2) assert_that(self.undertest.get_processed_indices(self.file1), contains_inanyorder(1, 2)) assert_that(self.undertest.get_processed_indices(self.file2), has_length(0))
def IncludeCompletion_test( self ): data = self._CompletionResultsForLine( '#include <' ) assert_that( data, contains_inanyorder( ( 'QDialog', '[File]' ), ( 'QWidget', '[File]' ), ( 'Qt', '[Dir]' ), ( 'QtGui', '[File&Dir]' ) ) ) data = self._CompletionResultsForLine( '#include <QtGui/' ) assert_that( data, contains_inanyorder( ( 'QDialog', '[File]' ), ( 'QWidget', '[File]' ) ) )
def test_group_query_with_collect_fields(self): self._save_all( 'foo_bar', {'foo': 'foo', 'c': 1}, {'foo': 'foo', 'c': 3}, {'foo': 'bar', 'c': 2} ) results = self.engine.execute_query('foo_bar', Query.create( group_by=['foo'], collect=[('c', 'sum')])) assert_that(results, contains_inanyorder( has_entries({'foo': 'bar', 'c': [2]}), has_entries({'foo': 'foo', 'c': contains_inanyorder(1, 3)})))
def GetCompletions_ClangCompleter_ForceSemantic_OnlyFileteredCompletions_test(): app = TestApp( handlers.app ) contents = """ int main() { int foobar; int floozar; int gooboo; int bleble; fooar } """ # 0-based line and column! completion_data = BuildRequest( filepath = '/foo.cpp', filetype = 'cpp', force_semantic = True, contents = contents, line_num = 8, column_num = 7, start_column = 7, query = 'fooar', compilation_flags = ['-x', 'c++'] ) results = app.post_json( '/completions', completion_data ).json assert_that( results, contains_inanyorder( CompletionEntryMatcher( 'foobar' ), CompletionEntryMatcher( 'floozar' ) ) )
def GetCompletions_Require_NoQuery_test( app ): RunTest( app, { 'description': 'semantic completion works for simple object no query', 'request': { 'filetype' : 'javascript', 'filepath' : PathToTestFile( 'requirejs_test.js' ), 'line_num' : 2, 'column_num': 15, }, 'expect': { 'response': requests.codes.ok, 'data': has_entries( { 'completions': contains_inanyorder( CompletionEntryMatcher( 'mine_bitcoin', 'fn(how_much: ?) -> number' ), CompletionEntryMatcher( 'get_number', 'number' ), CompletionEntryMatcher( 'get_string', 'string' ), CompletionEntryMatcher( 'get_thing', 'fn(a: ?) -> number|string' ), CompletionEntryMatcher( 'toString', 'fn() -> string' ), CompletionEntryMatcher( 'toLocaleString', 'fn() -> string' ), CompletionEntryMatcher( 'valueOf', 'fn() -> number' ), CompletionEntryMatcher( 'hasOwnProperty', 'fn(prop: string) -> bool' ), CompletionEntryMatcher( 'isPrototypeOf', 'fn(obj: ?) -> bool' ), CompletionEntryMatcher( 'propertyIsEnumerable', 'fn(prop: string) -> bool' ), ), 'errors': empty(), } ) }, } )
def GetCompletions_NoQuery_test(app): RunTest( app, { "description": "semantic completion works for simple object no query", "request": { "filetype": "javascript", "filepath": PathToTestFile("simple_test.js"), "line_num": 13, "column_num": 43, }, "expect": { "response": http.client.OK, "data": has_entries( { "completions": contains_inanyorder( CompletionEntryMatcher("a_simple_function", "fn(param: ?) -> string"), CompletionEntryMatcher("basic_type", "number"), CompletionEntryMatcher("object", "object"), CompletionEntryMatcher("toString", "fn() -> string"), CompletionEntryMatcher("toLocaleString", "fn() -> string"), CompletionEntryMatcher("valueOf", "fn() -> number"), CompletionEntryMatcher("hasOwnProperty", "fn(prop: string) -> bool"), CompletionEntryMatcher("isPrototypeOf", "fn(obj: ?) -> bool"), CompletionEntryMatcher("propertyIsEnumerable", "fn(prop: string) -> bool"), ), "errors": empty(), } ), }, }, )
def GetCompletions_ReturnsDocsInCompletions_test( app ): # This tests that we supply docs for completions RunTest( app, { 'description': 'completions supply docs', 'request': { 'filetype' : 'javascript', 'filepath' : PathToTestFile( 'requirejs_test.js' ), 'line_num' : 8, 'column_num': 15, }, 'expect': { 'response': requests.codes.ok, 'data': has_entries( { 'completions': contains_inanyorder( CompletionEntryMatcher( 'a_function', 'fn(bar: ?) -> {a_value: string}', { 'detailed_info': ( 'fn(bar: ?) -> {a_value: string}\n' 'This is a short documentation string' ), } ), CompletionEntryMatcher( 'options', 'options' ), CompletionEntryMatcher( 'toString', 'fn() -> string' ), CompletionEntryMatcher( 'toLocaleString', 'fn() -> string' ), CompletionEntryMatcher( 'valueOf', 'fn() -> number' ), CompletionEntryMatcher( 'hasOwnProperty', 'fn(prop: string) -> bool' ), CompletionEntryMatcher( 'isPrototypeOf', 'fn(obj: ?) -> bool' ), CompletionEntryMatcher( 'propertyIsEnumerable', 'fn(prop: string) -> bool' ), ), 'errors': empty(), } ) }, } )
def GetCompletions_Require_NoQuery_test(app): RunTest( app, { "description": "semantic completion works for simple object no query", "request": { "filetype": "javascript", "filepath": PathToTestFile("requirejs_test.js"), "line_num": 2, "column_num": 15, }, "expect": { "response": http.client.OK, "data": has_entries( { "completions": contains_inanyorder( CompletionEntryMatcher("mine_bitcoin", "fn(how_much: ?) -> number"), CompletionEntryMatcher("get_number", "number"), CompletionEntryMatcher("get_string", "string"), CompletionEntryMatcher("get_thing", "fn(a: ?) -> number|string"), CompletionEntryMatcher("toString", "fn() -> string"), CompletionEntryMatcher("toLocaleString", "fn() -> string"), CompletionEntryMatcher("valueOf", "fn() -> number"), CompletionEntryMatcher("hasOwnProperty", "fn(prop: string) -> bool"), CompletionEntryMatcher("isPrototypeOf", "fn(obj: ?) -> bool"), CompletionEntryMatcher("propertyIsEnumerable", "fn(prop: string) -> bool"), ), "errors": empty(), } ), }, }, )
def test_query_only_display_data(self): source = beam.io.BigQuerySource(query='my_query') dd = DisplayData.create_from(source) expected_items = [ DisplayDataItemMatcher('validation', False), DisplayDataItemMatcher('query', 'my_query')] hc.assert_that(dd.items, hc.contains_inanyorder(*expected_items))
def test_date_partitioned_table_name(self): source = beam.io.BigQuerySource('dataset.table$20030102', validate=True) dd = DisplayData.create_from(source) expected_items = [ DisplayDataItemMatcher('validation', True), DisplayDataItemMatcher('table', 'dataset.table$20030102')] hc.assert_that(dd.items, hc.contains_inanyorder(*expected_items))
def test_project_table_display_data(self): sinkq = beam.io.BigQuerySink('PROJECT:dataset.table') dd = DisplayData.create_from(sinkq) expected_items = [ DisplayDataItemMatcher('table', 'PROJECT:dataset.table'), DisplayDataItemMatcher('validation', False)] hc.assert_that(dd.items, hc.contains_inanyorder(*expected_items))
def GetCompletions_AfterRestart_test( app ): filepath = PathToTestFile( 'test.ts' ) app.post_json( '/run_completer_command', BuildRequest( completer_target = 'filetype_default', command_arguments = [ 'RestartServer' ], filetype = 'typescript', filepath = filepath ) ) completion_data = BuildRequest( filepath = filepath, filetype = 'typescript', contents = ReadFile( filepath ), force_semantic = True, line_num = 17, column_num = 6 ) response = app.post_json( '/completions', completion_data ) assert_that( response.json, has_entries( { 'completions': contains_inanyorder( CompletionEntryMatcher( 'methodA', extra_params = { 'menu_text': 'methodA (method) Foo.methodA(): void' } ), CompletionEntryMatcher( 'methodB', extra_params = { 'menu_text': 'methodB (method) Foo.methodB(): void' } ), CompletionEntryMatcher( 'methodC', extra_params = { 'menu_text': ( 'methodC (method) Foo.methodC(a: ' '{ foo: string; bar: number; }): void' ) } ), ) } ) )
def GoToReferences_test( self ): filepath = self._PathToTestFile( 'test.ts' ) contents = ReadFile( filepath ) event_data = self._BuildRequest( filepath = filepath, filetype = 'typescript', contents = contents, event_name = 'BufferVisit' ) self._app.post_json( '/event_notification', event_data ) references_data = self._BuildRequest( completer_target = 'filetype_default', command_arguments = [ 'GoToReferences' ], line_num = 28, column_num = 6, contents = contents, filetype = 'typescript', filepath = filepath ) expected = contains_inanyorder( has_entries( { 'description': 'var bar = new Bar();', 'line_num' : 28, 'column_num' : 5 } ), has_entries( { 'description': 'bar.testMethod();', 'line_num' : 29, 'column_num' : 1 } ) ) actual = self._app.post_json( '/run_completer_command', references_data ).json assert_that( actual, expected )
def GetCompletions_NoQuery_test( app ): RunTest( app, { 'description': 'semantic completion works for simple object no query', 'request': { 'filetype' : 'javascript', 'filepath' : PathToTestFile( 'simple_test.js' ), 'line_num' : 13, 'column_num': 43, }, 'expect': { 'response': requests.codes.ok, 'data': has_entries( { 'completions': contains_inanyorder( CompletionEntryMatcher( 'a_simple_function', 'fn(param: ?) -> string' ), CompletionEntryMatcher( 'basic_type', 'number' ), CompletionEntryMatcher( 'object', 'object' ), CompletionEntryMatcher( 'toString', 'fn() -> string' ), CompletionEntryMatcher( 'toLocaleString', 'fn() -> string' ), CompletionEntryMatcher( 'valueOf', 'fn() -> number' ), CompletionEntryMatcher( 'hasOwnProperty', 'fn(prop: string) -> bool' ), CompletionEntryMatcher( 'isPrototypeOf', 'fn(obj: ?) -> bool' ), CompletionEntryMatcher( 'propertyIsEnumerable', 'fn(prop: string) -> bool' ), ), 'errors': empty(), } ) }, } )
def GetCompletions_MaxDetailedCompletion_test( app ): RunTest( app, { 'expect': { 'data': has_entries( { 'completions': all_of( contains_inanyorder( CompletionEntryMatcher( 'methodA' ), CompletionEntryMatcher( 'methodB' ), CompletionEntryMatcher( 'methodC' ), ), is_not( any_of( has_item( CompletionEntryMatcher( 'methodA', extra_params = { 'menu_text': 'methodA (method) Foo.methodA(): void' } ) ), has_item( CompletionEntryMatcher( 'methodB', extra_params = { 'menu_text': 'methodB (method) Foo.methodB(): void' } ) ), has_item( CompletionEntryMatcher( 'methodC', extra_params = { 'menu_text': ( 'methodC (method) Foo.methodC(a: ' '{ foo: string; bar: number; }): void' ) } ) ) ) ) ) } ) } } )
def GetCompletions_Unicode_InFile_test(app): RunTest( app, { "description": "completions work with unicode chars in the file", "request": { "filetype": "javascript", "filepath": PathToTestFile("unicode.js"), "line_num": 3, "column_num": 16, }, "expect": { "response": http.client.OK, "data": has_entries( { "completions": contains_inanyorder( CompletionEntryMatcher("charAt", "fn(i: number) -> string"), CompletionEntryMatcher("charCodeAt", "fn(i: number) -> number"), ), "completion_start_column": 13, "errors": empty(), } ), }, }, )
def GoToReferences_test( self ): self._RunTest( { 'description': 'GoToReferences works within file', 'request': { 'command': 'GoToReferences', 'line_num': 17, 'column_num': 29, 'filepath': self._PathToTestFile( 'coollib', 'cool_object.js' ), }, 'expect': { 'response': httplib.OK, 'data': contains_inanyorder( has_entries( { 'filepath': self._PathToTestFile( 'coollib', 'cool_object.js' ), 'line_num': 17, 'column_num': 29, } ), has_entries( { 'filepath': self._PathToTestFile( 'coollib', 'cool_object.js' ), 'line_num': 12, 'column_num': 9, } ) ) } } )
def test_group_and_collect_with_false_values(self): self._save_all('foo_bar', { 'foo': 'one', 'bar': False }, { 'foo': 'two', 'bar': True }, { 'foo': 'two', 'bar': True }, { 'foo': 'one', 'bar': False }) results = self.engine.execute_query( 'foo_bar', Query.create(group_by=['foo'], collect=[('bar', 'sum')])) assert_that( results, contains_inanyorder(has_entries({'bar': [False, False]}), has_entries({'bar': [True, True]})))
def test_list_participants_with_two_participants(self): calld = self.make_calld() conference_id = CONFERENCE1_ID self.confd.set_conferences( MockConference(id=conference_id, name='conference'), ) self.given_call_in_conference(CONFERENCE1_EXTENSION, caller_id_name='participant1') self.given_call_in_conference(CONFERENCE1_EXTENSION, caller_id_name='participant2') participants = calld.conferences.list_participants(conference_id) assert_that( participants, has_entries({ 'total': 2, 'items': contains_inanyorder( has_entry('caller_id_name', 'participant1'), has_entry('caller_id_name', 'participant2'), ) }))
def GetCompletions_UnicodeIdentifier_test(app): filepath = PathToTestFile(DEFAULT_PROJECT_DIR, 'src', 'com', 'youcompleteme', 'Test.java') RunTest( app, { 'description': 'Completion works for unicode identifier', 'request': { 'filetype': 'java', 'filepath': filepath, 'line_num': 16, 'column_num': 35, 'force_semantic': True }, 'expect': { 'response': requests.codes.ok, 'data': has_entries({ 'completion_start_column': 35, 'completions': contains_inanyorder(*WithObjectMethods( CompletionEntryMatcher( 'a_test', 'Test.TéstClass', { 'kind': 'Field', 'detailed_info': 'a_test : int\n\n', }), CompletionEntryMatcher('testywesty', 'Test.TéstClass', { 'kind': 'Field', }), )), 'errors': empty(), }) }, })
def test_that_format_results_marks_favorites(self): result1 = self.SourceResult( { 'id': 1, 'firstname': 'Alice', 'lastname': 'AAA', 'telephoneNumber': '5555555555' }, self.xivo_id, None, 1, UUID1, None) result2 = self.SourceResult( { 'id': 2, 'firstname': 'Bob', 'lastname': 'BBB', 'telephoneNumber': '5555556666' }, self.xivo_id, 'agent_id', 2, UUID2, 'endpoint_id') display = [ DisplayColumn('Firstname', None, 'Unknown', 'firstname'), DisplayColumn('Lastname', None, '', 'lastname'), DisplayColumn('Number', 'office_number', None, 'telephoneNumber'), DisplayColumn('Favorite', 'favorite', None, None), ] formatter = _ResultFormatter(display) result = formatter.format_results([result1, result2], { 'my_source': ['2'], 'my_other_source': ['1', '2', '3'] }) assert_that( result, has_entry( 'results', contains_inanyorder( has_entry('column_values', contains('Alice', 'AAA', '5555555555', False)), has_entry('column_values', contains('Bob', 'BBB', '5555556666', True)))))
def test_multiple_numbers(self): google_contact = { 'gd$phoneNumber': [ { 'rel': 'http://schemas.google.com/g/2005#mobile', 'uri': 'tel:+1-555-123-4567', '$t': '+1 555-123-4567', }, { 'rel': 'http://schemas.google.com/g/2005#home', 'uri': 'tel:+1-555-123-9876', '$t': '+1 5551239876', }, { 'label': 'custom', '$t': '(555) 123-1111', 'uri': 'tel:+1-555-123-1111', }, ], } formatted_contact = self.formatter.format(google_contact) assert_that( formatted_contact, has_entries( numbers_by_label=has_entries( mobile='+15551234567', home='+15551239876', custom='5551231111', ), numbers=contains_inanyorder( '+15551239876', '5551231111', '+15551234567', ), ))
def KeywordsFromSyntaxListOutput_PythonSyntax_test(): expected_keywords = ( 'bytearray', 'IndexError', 'all', 'help', 'vars', 'SyntaxError', 'global', 'elif', 'unicode', 'sorted', 'memoryview', 'isinstance', 'except', 'nonlocal', 'NameError', 'finally', 'BytesWarning', 'dict', 'IOError', 'pass', 'oct', 'bin', 'SystemExit', 'return', 'StandardError', 'format', 'TabError', 'break', 'next', 'not', 'UnicodeDecodeError', 'False', 'RuntimeWarning', 'list', 'iter', 'try', 'reload', 'Warning', 'round', 'dir', 'cmp', 'set', 'bytes', 'UnicodeTranslateError', 'intern', 'issubclass', 'yield', 'Ellipsis', 'hash', 'locals', 'BufferError', 'slice', 'for', 'FloatingPointError', 'sum', 'VMSError', 'getattr', 'abs', 'print', 'import', 'True', 'FutureWarning', 'ImportWarning', 'None', 'EOFError', 'len', 'frozenset', 'ord', 'super', 'raise', 'TypeError', 'KeyboardInterrupt', 'UserWarning', 'filter', 'range', 'staticmethod', 'SystemError', 'or', 'BaseException', 'pow', 'RuntimeError', 'float', 'MemoryError', 'StopIteration', 'globals', 'divmod', 'enumerate', 'apply', 'LookupError', 'open', 'basestring', 'from', 'UnicodeError', 'zip', 'hex', 'long', 'IndentationError', 'int', 'chr', '__import__', 'type', 'Exception', 'continue', 'tuple', 'reduce', 'reversed', 'else', 'assert', 'UnicodeEncodeError', 'input', 'with', 'hasattr', 'delattr', 'setattr', 'raw_input', 'PendingDeprecationWarning', 'compile', 'ArithmeticError', 'while', 'del', 'str', 'property', 'def', 'and', 'GeneratorExit', 'ImportError', 'xrange', 'is', 'EnvironmentError', 'KeyError', 'coerce', 'SyntaxWarning', 'file', 'in', 'unichr', 'ascii', 'any', 'as', 'if', 'OSError', 'DeprecationWarning', 'min', 'UnicodeWarning', 'execfile', 'id', 'complex', 'bool', 'ValueError', 'NotImplemented', 'map', 'exec', 'buffer', 'max', 'class', 'object', 'repr', 'callable', 'ZeroDivisionError', 'eval', '__debug__', 'ReferenceError', 'AssertionError', 'classmethod', 'UnboundLocalError', 'NotImplementedError', 'lambda', 'AttributeError', 'OverflowError', 'WindowsError') assert_that( syntax_parse._KeywordsFromSyntaxListOutput( ContentsOfTestFile('python_syntax')), contains_inanyorder(*expected_keywords))
def GetCompletions_FilteredNoResults_Fallback_test(app): # no errors because the semantic completer returned results, but they # were filtered out by the query, so this is considered working OK # (whereas no completions from the semantic engine is considered an # error) # TESTCASE5 (general_fallback/lang_cpp.cc) RunTest( app, { 'description': '. on struct returns IDs after query=do_', 'request': { 'filetype': 'c', 'filepath': PathToTestFile('general_fallback', 'lang_c.c'), 'line_num': 71, 'column_num': 18, 'force_semantic': False, }, 'expect': { 'response': requests.codes.ok, 'data': has_entries({ 'completions': contains_inanyorder( # do_ is an identifier because it is already in the file when we # load it CompletionEntryMatcher('do_', '[ID]'), CompletionEntryMatcher('do_something', '[ID]'), CompletionEntryMatcher('do_another_thing', '[ID]'), CompletionEntryMatcher('DO_SOMETHING_TO', '[ID]'), CompletionEntryMatcher('DO_SOMETHING_VIA', '[ID]')), 'errors': empty() }) }, })
def GetCompletions_ClangCLDriverFlag_SimpleCompletion_test(app): RunTest( app, { 'description': 'basic completion with --driver-mode=cl', 'request': { 'filetype': 'cpp', 'filepath': PathToTestFile('driver_mode_cl', 'flag', 'driver_mode_cl.cpp'), 'line_num': 8, 'column_num': 18, 'force_semantic': True, }, 'expect': { 'response': requests.codes.ok, 'data': has_entries({ 'completion_start_column': 3, 'completions': contains_inanyorder( CompletionEntryMatcher( 'driver_mode_cl_include_func', 'void\n"driver_mode_cl_include.h"'), CompletionEntryMatcher( 'driver_mode_cl_include_int', 'int\n"driver_mode_cl_include.h"'), ), 'errors': empty(), }) } })
def Subcommands_FixIt_test(app): filepath = PathToTestFile('test.js') RunTest( app, { 'description': 'FixIt works on a non-existing method', 'request': { 'command': 'FixIt', 'line_num': 32, 'column_num': 19, 'filepath': filepath, }, 'expect': { 'response': requests.codes.ok, 'data': has_entries({ 'fixits': contains_inanyorder( has_entries({ 'text': "Declare method 'nonExistingMethod'", 'chunks': contains( ChunkMatcher( matches_regexp( '^\r?\n' ' nonExistingMethod\\(\\) {\r?\n' ' throw new Error\\("Method not implemented."\\);\r?\n' ' }$', ), LocationMatcher(filepath, 22, 12), LocationMatcher(filepath, 22, 12))), 'location': LocationMatcher(filepath, 32, 19) })) }) } })
def test_list_lines(self): self._set_cache(lines=[ { 'id': 1, 'name': s.name_1, 'protocol': 'sip', 'tenant_uuid': s.tenant_uuid, }, { 'id': 2, 'name': s.name_2, 'protocol': 'sccp', 'tenant_uuid': s.tenant_uuid, }, { 'id': 3, 'name': s.interface, 'protocol': 'custom', 'tenant_uuid': s.tenant_uuid, }, { 'id': 4, 'protocol': 'sip', 'name': 'ignored', 'tenant_uuid': s.other_tenant_uuid, }, ]) result = self.client.list_lines(s.tenant_uuid) assert_that( result, contains_inanyorder( has_entries(id=1), has_entries(id=2), has_entries(id=3), ))
def Subcommands_RefactorRename_SimpleUnicode_test( app ): RunTest( app, { 'description': 'RefactorRename works with Unicode characters', 'request': { 'command': 'RefactorRename', 'arguments': [ 'ø' ], 'line_num': 14, 'column_num': 3, 'filepath': PathToTestFile( 'unicode.ts' ), }, 'expect': { 'response': requests.codes.ok, 'data': has_entries( { 'fixits': contains_exactly( has_entries( { 'chunks': contains_inanyorder( ChunkMatcher( 'ø', LocationMatcher( PathToTestFile( 'unicode.ts' ), 14, 3 ), LocationMatcher( PathToTestFile( 'unicode.ts' ), 14, 5 ) ), ChunkMatcher( 'ø', LocationMatcher( PathToTestFile( 'unicode.ts' ), 20, 27 ), LocationMatcher( PathToTestFile( 'unicode.ts' ), 20, 29 ) ), ChunkMatcher( 'ø', LocationMatcher( PathToTestFile( 'unicode.ts' ), 23, 5 ), LocationMatcher( PathToTestFile( 'unicode.ts' ), 23, 7 ) ), ChunkMatcher( 'ø', LocationMatcher( PathToTestFile( 'unicode.ts' ), 27, 17 ), LocationMatcher( PathToTestFile( 'unicode.ts' ), 27, 19 ) ), ), 'location': LocationMatcher( PathToTestFile( 'unicode.ts' ), 14, 3 ) } ) ) } ) } } )
def test_column_field_can_be_marked_as_internal(): Base = declarative_base() class AuthorRecord(Base): __tablename__ = "author" c_id = Column(Integer, primary_key=True) c_name = Column(Unicode, nullable=False) class Author(SqlAlchemyObjectType): __model__ = AuthorRecord @staticmethod def __primary_key__(): return [AuthorRecord.c_name] id = column_field(AuthorRecord.c_id, internal=True) name = column_field(AuthorRecord.c_name) class Root(RootType): authors = many(lambda: select(Author)) engine = create_engine("sqlite:///:memory:") Base.metadata.create_all(engine) result = executor(Root)("""{ authors { id } }""", context=QueryContext(session=Session(engine))) assert_that( result, is_invalid_result(errors=contains_inanyorder( has_string(starts_with('Cannot query field "id"')), )), )
def GetCompletions_WithFixIt_test( app ): filepath = ProjectPath( 'TestFactory.java' ) RunTest( app, { 'description': 'semantic completion with when additional textEdit', 'request': { 'filetype' : 'java', 'filepath' : filepath, 'line_num' : 19, 'column_num': 25, }, 'expect': { 'response': requests.codes.ok, 'data': has_entries( { 'completion_start_column': 22, 'completions': contains_inanyorder( CompletionEntryMatcher( 'CUTHBERT', 'com.test.wobble.Wibble', { 'kind': 'EnumMember', 'extra_data': has_entries( { 'fixits': contains( has_entries( { 'chunks': contains( ChunkMatcher( 'Wibble', LocationMatcher( filepath, 19, 15 ), LocationMatcher( filepath, 19, 21 ) ), # OK, so it inserts the import ChunkMatcher( '\n\nimport com.test.wobble.Wibble;\n\n', LocationMatcher( filepath, 1, 18 ), LocationMatcher( filepath, 3, 1 ) ), ), } ) ), } ), } ), ), 'errors': empty(), } ) }, } )
def GetCompletions_AutoImport_test( app ): filepath = PathToTestFile( 'test.ts' ) RunTest( app, { 'description': 'Symbol from external module can be completed and ' 'its completion contains fixits to automatically import it', 'request': { 'line_num': 39, 'column_num': 5, 'filepath': filepath, }, 'expect': { 'response': requests.codes.ok, 'data': has_entries( { 'completions': has_item( has_entries( { 'insertion_text': 'Bår', 'extra_menu_info': 'class Bår', 'detailed_info': 'class Bår', 'kind': 'class', 'extra_data': has_entries( { 'fixits': contains_inanyorder( has_entries( { 'text': 'Import \'Bår\' from module "./unicode"', 'chunks': contains_exactly( ChunkMatcher( matches_regexp( '^import { Bår } from "./unicode";\r?\n' ), LocationMatcher( filepath, 1, 1 ), LocationMatcher( filepath, 1, 1 ) ) ), 'location': LocationMatcher( filepath, 39, 5 ) } ) ) } ) } ) ) } ) } } )
def test_list(self, a, b, c, user1_uuid, user2_uuid): def build_list_matcher(*names): return [ has_entries(name=name, address=base.ADDRESS_NULL) for name in names ] result = self._tenant_dao.list_() expected = build_list_matcher('foo c', 'bar b', 'baz a', 'master') assert_that(result, contains_inanyorder(*expected)) result = self._tenant_dao.list_(tenant_uuids=[a, b]) expected = build_list_matcher('bar b', 'baz a') assert_that(result, contains_inanyorder(*expected)) result = self._tenant_dao.list_() expected = build_list_matcher('foo c', 'bar b', 'baz a', 'master') assert_that(result, contains_inanyorder(*expected)) result = self._tenant_dao.list_(search='ba') expected = build_list_matcher('bar b', 'baz a') assert_that(result, contains_inanyorder(*expected)) result = self._tenant_dao.list_(search='yahoo') expected = build_list_matcher('bar b', 'foo c') assert_that(result, contains_inanyorder(*expected)) result = self._tenant_dao.list_(domain_name='outlook.fr') expected = build_list_matcher('foo c') assert_that(result, contains_inanyorder(*expected)) result = self._tenant_dao.list_(order='name', direction='desc') expected = build_list_matcher('master', 'foo c', 'baz a', 'bar b') assert_that(result, contains_exactly(*expected)) result = self._tenant_dao.list_(limit=1, order='name', direction='asc') expected = build_list_matcher('bar b') assert_that(result, contains_exactly(*expected)) result = self._tenant_dao.list_(offset=1, order='name', direction='asc') expected = build_list_matcher('baz a', 'foo c', 'master') assert_that(result, contains_exactly(*expected))
def test_order_by(self): sip = self.add_endpoint_sip() template_1 = self.add_endpoint_sip(template=True) template_2 = self.add_endpoint_sip(template=True) template_3 = self.add_endpoint_sip(template=True) sip.templates = [template_2, template_3, template_1] self.session.flush() self.session.expire_all() assert_that(sip.templates, contains( template_2, template_3, template_1, )) templates = self.session.query(EndpointSIPTemplate).all() assert_that( templates, contains_inanyorder( has_properties(parent_uuid=template_2.uuid, priority=0), has_properties(parent_uuid=template_3.uuid, priority=1), has_properties(parent_uuid=template_1.uuid, priority=2), ))
def test_getter(self): user1 = self.add_user() user2 = self.add_user() group = self.add_group() self.add_queue_member( queue_name=group.name, category='group', usertype='user', userid=user1.id, ) self.add_queue_member( queue_name=group.name, category='group', usertype='user', userid=user2.id, ) call_pickup = self.add_pickup() pickup_member = self.add_pickup_member( pickupid=call_pickup.id, membertype='group', memberid=group.id, ) assert_that(pickup_member.users_from_group, contains_inanyorder(user1, user2))
def Subcommands_GoToImplementation_test( app ): RunTest( app, { 'description': 'GoToImplementation works', 'request': { 'command': 'GoToImplementation', 'line_num': 6, 'column_num': 11, 'filepath': PathToTestFile( 'signatures.ts' ), }, 'expect': { 'response': requests.codes.ok, 'data': contains_inanyorder( has_entries( { 'description': ' return {', 'line_num' : 12, 'column_num' : 10, 'filepath' : PathToTestFile( 'signatures.ts' ) } ), has_entries( { 'description': 'class SomeClass ' 'implements ReturnValue {', 'line_num' : 35, 'column_num' : 7, 'filepath' : PathToTestFile( 'signatures.ts' ) } ), ) } } )
def test_sink_display_data(self): file_name = 'some_parquet_sink' sink = _create_parquet_sink( file_name, self.SCHEMA, 'none', 1024 * 1024, 1000, False, '.end', 0, None, 'application/x-parquet') dd = DisplayData.create_from(sink) expected_items = [ DisplayDataItemMatcher('schema', str(self.SCHEMA)), DisplayDataItemMatcher( 'file_pattern', 'some_parquet_sink-%(shard_num)05d-of-%(num_shards)05d.end'), DisplayDataItemMatcher('codec', 'none'), DisplayDataItemMatcher('row_group_buffer_size', str(1024 * 1024)), DisplayDataItemMatcher('compression', 'uncompressed') ] hc.assert_that(dd.items, hc.contains_inanyorder(*expected_items))
def test_remove_acl_template(self, policy): assert_http_error(404, self.client.policies.remove_acl_template, UNKNOWN_UUID, '#') with self.client_in_subtenant() as (client, _, __): assert_http_error(404, client.policies.remove_acl_template, policy['uuid'], '#') policy_in_subtenant = client.policies.new(name='in sub-tenant', acl_templates=['#']) self.client.policies.remove_acl_template( policy_in_subtenant['uuid'], '#') assert_that( client.policies.get(policy_in_subtenant['uuid']), has_entries(uuid=policy_in_subtenant['uuid'], acl_templates=empty()), ) self.client.policies.remove_acl_template(policy['uuid'], 'ctid-ng.#') response = self.client.policies.get(policy['uuid']) assert_that( response, has_entries(acl_templates=contains_inanyorder('dird.me.#')))
def test_two_suite_params(reports_for): reports = reports_for(test_A=""" 'suite_A' def test(): pass """, test_B=""" 'suite_B' def test(): pass""") assert_that( reports, contains_inanyorder( has_properties({ '{}name': 'test_A', '{}description': 'suite_A' }), has_properties({ '{}name': 'test_B', '{}description': 'suite_B' }), ))
def test_sink_display_data(self): file_name = 'some_avro_sink' sink = AvroSink(file_name, self.SCHEMA, 'null', '.end', 0, None, 'application/x-avro') dd = DisplayData.create_from(sink) expected_items = [ DisplayDataItemMatcher( 'schema', str(self.SCHEMA)), DisplayDataItemMatcher( 'file_pattern', 'some_avro_sink-%(shard_num)05d-of-%(num_shards)05d.end'), DisplayDataItemMatcher( 'codec', 'null'), DisplayDataItemMatcher( 'compression', 'uncompressed')] hc.assert_that(dd.items, hc.contains_inanyorder(*expected_items))
def Diagnostics_MaximumDiagnosticsNumberExceeded_test( app ): filepath = PathToTestFile( 'test.ts' ) contents = ReadFile( filepath ) event_data = BuildRequest( filepath = filepath, filetype = 'typescript', contents = contents, event_name = 'BufferVisit' ) app.post_json( '/event_notification', event_data ) event_data = BuildRequest( filepath = filepath, filetype = 'typescript', contents = contents, event_name = 'FileReadyToParse' ) assert_that( app.post_json( '/event_notification', event_data ).json, contains_inanyorder( has_entries( { 'kind': 'ERROR', 'text': "Property 'm' does not exist on type 'Foo'.", 'location': LocationMatcher( filepath, 17, 5 ), 'location_extent': RangeMatcher( filepath, ( 17, 5 ), ( 17, 6 ) ), 'ranges': contains( RangeMatcher( filepath, ( 17, 5 ), ( 17, 6 ) ) ), 'fixit_available': True } ), has_entries( { 'kind': 'ERROR', 'text': 'Maximum number of diagnostics exceeded.', 'location': LocationMatcher( filepath, 1, 1 ), 'location_extent': RangeMatcher( filepath, ( 1, 1 ), ( 1, 1 ) ), 'ranges': contains( RangeMatcher( filepath, ( 1, 1 ), ( 1, 1 ) ) ), 'fixit_available': False } ), ) )
def test_email_updates_as_user(self, foobar): assert_http_error(404, self.client.users.update_emails, UNKNOWN_UUID, []) assert_http_error(400, self.client.users.update_emails, foobar['uuid'], [ONE, ONE]) email_uuid = foobar['emails'][0]['uuid'] result = self.client.users.update_emails(foobar['uuid'], [ONE, THREE]) assert_that( result, contains_inanyorder( has_entries(uuid=email_uuid, **ONE), has_entries( uuid=uuid_(), address=THREE['address'], main=THREE['main'], confirmed=False, ), # Confirmed is ignored when modifying as a user ), ) result = self.client.users.update_emails(foobar['uuid'], []) assert_that(result, empty())
def test_edit_all_fields(self): transport = self.add_transport( name='transport', options=[ ['bind', '0.0.0.0'], ['protocol', 'wss'], ['symmetric_transport', 'yes'], ['local_net', '192.168.0.0/16'], ], ) self.session.expire_all() transport.name = 'other_transport' transport.options = [ ['bind', '0.0.0.0'], ['protocol', 'udp'], ['cos', '1'], ['local_net', '192.168.0.0/16'], ['local_net', '10.1.42.0/24'], ] dao.edit(transport) self.session.expire_all() assert_that( transport, has_properties( name='other_transport', options=contains_inanyorder( contains('bind', '0.0.0.0'), contains('protocol', 'udp'), contains('cos', '1'), contains('local_net', '192.168.0.0/16'), contains('local_net', '10.1.42.0/24'), ), ))
def test_template_association(self, uuid): self._policy_dao.associate_policy_template(uuid, '#') assert_that( self.get_policy(uuid), has_entries(acl_templates=contains_inanyorder('#')), ) assert_that( calling(self._policy_dao.associate_policy_template).with_args(uuid, '#'), raises(exceptions.DuplicateTemplateException), ) self._policy_dao.dissociate_policy_template(uuid, '#') assert_that( self.get_policy(uuid), has_entries(acl_templates=empty()), ) assert_that( calling(self._policy_dao.associate_policy_template).with_args('unknown', '#'), raises(exceptions.UnknownPolicyException), ) assert_that(self._policy_dao.dissociate_policy_template('unknown', '#'), equal_to(0))
def test_game_move_2(self): """ Tests getting the moves for a knight that can move. """ username = '******' # From fixtures game_id = 1 from_ = 'B1' url = '/chess/user/' + username + '/game/' + str( game_id) + '/move/' + from_ c = Client() response = c.get(url) content = json.loads(response.content) expected = json.loads(u"""{ "from": "B1", "to": [ {"capture": false, "square": "A3"}, {"capture": false, "square": "C3"} ] }""") assert_that(response.status_code, equal_to(HTTP_200_OK)) assert_that(content["from"], equal_to(expected["from"])) assert_that(content["to"], contains_inanyorder(*expected["to"]))
def test_that_results_can_be_ordered_by_an_unknown_column_with_no_effect(self): self.contact_crud.list.return_value = self._contacts result = self.service.list_contact( s.tenant_uuid, s.phonebook_id, search=s.search, order='number', direction='desc', ) self.contact_crud.list.assert_called_once_with( s.tenant_uuid, s.phonebook_id, search=s.search ) assert_that( result, contains_inanyorder( self._manolo, self._antonin, self._annabelle, self._gary_bob, # no number self._simon, ), ) # no number
def test_create_all_parameters(): parameters = { 'name': 'MyQueue', 'label': 'Label', 'data_quality': True, 'dtmf_hangup_callee_enabled': True, 'dtmf_hangup_caller_enabled': True, 'dtmf_transfer_callee_enabled': True, 'dtmf_transfer_caller_enabled': True, 'dtmf_record_callee_enabled': True, 'dtmf_record_caller_enabled': True, 'retry_on_timeout': False, 'ring_on_hold': True, 'announce_hold_time_on_entry': True, 'ignore_forward': True, 'wait_time_threshold': 1, 'wait_time_destination': {'type': 'none'}, 'wait_ratio_threshold': 1, 'wait_ratio_destination': {'type': 'none'}, 'options': ALL_OPTIONS, 'caller_id_mode': 'prepend', 'caller_id_name': 'QUEUE-', 'timeout': 42, 'music_on_hold': 'default', 'preprocess_subroutine': 'subroutine', 'enabled': False, } response = confd.queues.post(**parameters) response.assert_created('queues') options = parameters.pop('options') assert_that(response.item, has_entries(parameters)) assert_that(response.item['options'], contains_inanyorder(*options)) confd.queues(response.item['id']).delete().assert_deleted()
def GetCompletions_WithQuery_test( app ): RunTest( app, { 'description': 'semantic completion works for builtin types (with query)', 'request': { 'filetype' : 'java', 'filepath' : ProjectPath( 'TestFactory.java' ), 'line_num' : 27, 'column_num': 15, }, 'expect': { 'response': requests.codes.ok, 'data': has_entries( { 'completions': contains_inanyorder( CompletionEntryMatcher( 'test', 'TestFactory.Bar', { 'kind': 'Field' } ), CompletionEntryMatcher( 'testString', 'TestFactory.Bar', { 'kind': 'Field' } ) ), 'errors': empty(), } ) }, } )