def test_should_know_if_specific_user_can_start_process(self): prospective_creator = User.objects.create() expect(self.process.is_startable_by_user(prospective_creator)).is_true() self.starter.is_self_assignable = False self.starter.save() expect(self.process.is_startable_by_user(prospective_creator)).is_false()
def test_should_have_process_step_metadata_on_form(self): first_transition = ProcessStep.objects.create(name="Add_report") expect(first_transition.json_schema()).has_subdict({ 'title': "Add_report", 'type': 'object', })
def test_should_create_its_directory_if_it_does_not_exist(self, tempdir): destination = join(tempdir.path, 'foo', 'bar', 'baz') data_interface = DataInterface(destination, dict(foo='bar')) expect(exists(destination)) == False data_interface.dump() expect(exists(destination)) == True
def test_should_raise_if_user_tries_to_start_process_in_role_which_is_not_self_assignable(self): self.starter.is_self_assignable = False self.starter.save() creator = User.objects.create() from django.core.exceptions import PermissionDenied expect(lambda: self.process.create_instance(creator=creator)).to_raise(PermissionDenied)
def test_should_erorr_if_nameservers_are_not_authoritative(self): self.on_command('dig +short NS example.com').provide_output("""\ b.iana-servers.net. a.iana-servers.net.""") self.on_command('dig +short SOA example.com @a.iana-servers.net').provide_output("anything") self.on_command('dig +short SOA example.com @b.iana-servers.net').provide_output("") expect(check_soas_equal_for_domain('example.com')) == (NAGIOS.CRITICAL, 'Nameserver(s) [\'b.iana-servers.net\'] did not return SOA record for domain "example.com"')
def test_should_return_false_if_soas_differ(self): self.on_command('dig +short NS yeepa.de').provide_output("""\ nsc1.schlundtech.de. nsb1.schlundtech.de.""") self.on_command('dig +short SOA yeepa.de @nsc1.schlundtech.de').provide_output("not equal") self.on_command('dig +short SOA yeepa.de @nsb1.schlundtech.de').provide_output("to this") expect(check_soas_equal_for_domain('yeepa.de')) == (NAGIOS.CRITICAL, 'Nameservers do not agree for domain "yeepa.de" [\'not equal\', \'to this\']')
def test_should_not_provide_password(self): dump = MySQLDump(directory='.', options=dict(mysql_user='******', mysql_password=''), sh=self.sh) dump.dump() args, kwargs = self.sh.mysqldump.call_args expect(kwargs).has_subdict( password=False, )
def test_should_allow_to_configure_warning_level_for_number_of_webservers(self): self.on_command('dig +short NS yeepa.de').provide_output("""\ nsc1.schlundtech.de. nsb1.schlundtech.de.""") self.on_command('dig +short SOA yeepa.de @nsc1.schlundtech.de').provide_output("equal") self.on_command('dig +short SOA yeepa.de @nsb1.schlundtech.de').provide_output("equal") expect(check_soas_equal_for_domain('yeepa.de', warning_minimum_nameservers=3)) \ == (NAGIOS.WARNING, 'Expected at least 3 nameservers for domain "yeepa.de", but only found 2 - ' "['nsc1.schlundtech.de', 'nsb1.schlundtech.de']")
def test_should_compare_soas_from_all_web_servers(self): self.on_command('dig +short NS yeepa.de').provide_output("""\ nsc1.schlundtech.de. nsb1.schlundtech.de.""") self.on_command('dig +short SOA yeepa.de @nsc1.schlundtech.de').provide_output("""\ nsa1.schlundtech.de. sh.sntl-publishing.com. 2014090302 43200 7200 1209600 600""") self.on_command('dig +short SOA yeepa.de @nsb1.schlundtech.de').provide_output("""\ nsa1.schlundtech.de. sh.sntl-publishing.com. 2014090302 43200 7200 1209600 600""") expect(check_soas_equal_for_domain('yeepa.de')) == ( NAGIOS.OK, 'nsa1.schlundtech.de. sh.sntl-publishing.com. 2014090302 43200 7200 1209600 600')
def test_should_provide_command_before_postgres_dump(self): options = dict(postgres_username='******', dump_command_prefix='ssh fnord') dump = PostgreSQLDump(directory='/', options=options, sh=self.sh) expect(self.sh.Command.called) == False dump.dump() self.sh.Command('ssh').bake('fnord').pg_dumpall.assert_called_once_with( clean=True, username='******', _out='/dump.sql', )
def test_should_restore_copy(self, tempdir): production_directory = join(tempdir.path, 'production') backup_dir = join(tempdir.path, 'backup') restore = CopyDirectory(backup_dir, options=dict( source=production_directory )) os.makedirs(join(backup_dir, 'important_file')) expect(exists(join(production_directory, 'important_file'))) == False restore.restore() expect(exists(join(production_directory, 'important_file'))) == True
def test_get_nameservers_for_domain(self): self.on_command('dig +short NS yeepa.de').provide_output("""\ nsc1.schlundtech.de. nsb1.schlundtech.de nsa1.schlundtech.de. nsd1.schlundtech.de.""") nameservers = nameservers_for_domain('yeepa.de') expect(nameservers) == [ 'nsc1.schlundtech.de', 'nsb1.schlundtech.de', 'nsa1.schlundtech.de', 'nsd1.schlundtech.de']
def test_should_provide_command_before_mysqldump(self): dump = MySQLDump(directory='/', options=dict( mysql_user='******', mysql_password='******', dump_command_prefix='ssh fnord', ), sh=self.sh) expect(self.sh.Command.called) == False dump.dump() self.sh.Command('ssh').bake('fnord').mysqldump.assert_called_once_with( '--all-databases', '--complete-insert', user='******', password='******', _out='/dump.sql', )
def test_should_hard_link_directory_to_copy(self, tempdir): production_directory = join(tempdir.path, 'production') backup_directory = join(tempdir.path, 'backup') source_file = join(production_directory, 'important_file') os.makedirs(production_directory) touch(source_file) dump = CopyDirectory(directory=backup_directory, options=dict( source=production_directory )) backup_file = join(backup_directory, 'important_file') expect(exists(backup_file)) == False expect(os.stat(source_file).st_nlink) == 1 dump.dump() expect(exists(backup_file)) == True expect(os.stat(source_file).st_nlink) == 2 expect(os.stat(backup_file).st_nlink) == 2
def test_will_remove_files_not_in_backup_in_redumpster_managed_directories(self, tempdir): # Dangerous this test is, as it actually deletes files from the disk. CopyDirectory._default_rsync_args = self.original_default_rsync_args with change_working_directory_to(tempdir.path): production_directory = 'production' backup_directory = 'backup' os.makedirs(production_directory) source_file = join(production_directory, 'important_file') touch(join(tempdir.path, production_directory, '.redumpster_managed')) dump = CopyDirectory(directory=backup_directory, options=dict( source=production_directory )) dump.dump() touch(source_file) dump.restore() expect(os.path.exists(source_file)).is_false()
def test_should_restore(self): dump = PostgreSQLDump(directory='.', options=dict(postgres_username='******'), sh=self.sh) expect(self.sh.psql.called) == False dump.restore() expect(self.sh.psql.called) == True args, kwargs = self.sh.psql.call_args expect(kwargs).has_subdict( file="./dump.sql", username='******', ) expect(args).contains('postgres') # needs to start from the postgres db
def test_should_know_type_and_title_of_fields(self): device_description = FieldDefinition.objects.create( descript="Model Code", fieldtype=1, ) expect(device_description.json_schema()) == { 'title': 'Model Code', 'type': 'string', 'format': 'textarea', } error_description = FieldDefinition.objects.create( descript="Failure or error description", fieldtype=1, ) expect(error_description.json_schema()) == { 'title': 'Failure or error description', 'type': 'string', 'format': 'textarea', }
def test_should_respect_ordering_of_fields(self): first_transition = ProcessStep.objects.create(name="Add_report") error_description = FieldDefinition.objects.create( name='error_description', descript="Failure or error description", fieldtype=1, ) device_description = FieldDefinition.objects.create( name='device_description', descript="Model Code", fieldtype=1, ) first_transition_error_description = FieldPerstep.objects.create( step=first_transition, field_definition=error_description, order=2, ) first_transition_device_description = FieldPerstep.objects.create( step=first_transition, field_definition=device_description, order=1, ) expect(first_transition_error_description.json_schema()).has_subdict(propertyOrder=2) expect(first_transition_device_description.json_schema()).has_subdict(propertyOrder=1) schema = first_transition.json_schema() expect(schema).has_subdict( properties=dict( error_description=first_transition_error_description.json_schema(), device_description=first_transition_device_description.json_schema(), ), defaultProperties=['error_description', 'device_description'] )
def test_ensures_hard_link_path_is_always_absolute(self, tempdir): with change_working_directory_to(tempdir.path): production_directory = 'production' backup_directory = 'backup' os.makedirs(production_directory) source_file = join(production_directory, 'important_file') touch(source_file) dump = CopyDirectory(directory=backup_directory, options=dict( source=production_directory )) source_file = abspath(source_file) backup_file = abspath(join(backup_directory, 'important_file')) expect(exists(backup_file)) == False expect(os.stat(source_file).st_nlink) == 1 dump.dump() expect(exists(backup_file)) == True expect(os.stat(source_file).st_nlink) == 2 expect(os.stat(backup_file).st_nlink) == 2
def test_should_add_role_instance_with_self_if_neccessary_on_instance_creation(self): RoleInstance.objects.filter(pycuser=self.reporter).delete() expect(RoleInstance.objects.filter(pycuser=self.reporter).count()) == 0 instance = self.process.create_instance(creator=self.reporter) expect(RoleInstance.objects.filter(pycuser=self.reporter).count()) == 1 # doesn't create a second one instance = self.process.create_instance(creator=self.reporter) expect(RoleInstance.objects.filter(pycuser=self.reporter).count()) == 1
def test_should_instantiate_multiple_interfaces(self): config = dict( foo=dict( interface_name='noop', ), bar=dict( interface_name='noop', ) ) interfaces = interfaces_from_config(config, 'fnord') expect(interfaces).has_length(2) expect(interfaces[0]).isinstance(NoOp) directories = sorted(i.directory for i in interfaces) expect(directories[0].endswith('fnord/bar')).equals(True) expect(directories[1].endswith('fnord/foo')).equals(True)
def test_will_overwrite_existing_data_in_redumpster_managed_directories(self, tempdir): with change_working_directory_to(tempdir.path): production_directory = 'production' backup_directory = 'backup' os.makedirs(production_directory) source_file = join(production_directory, 'important_file') touch(source_file) touch(join(tempdir.path, production_directory, '.redumpster_managed')) dump = CopyDirectory(directory=backup_directory, options=dict( source=production_directory )) dump.dump() os.remove(source_file) # ensure we don't write into the hardlinked file with open(source_file, 'w') as f: f.write('this is going to be overwritten by restore') dump.restore() with open(source_file) as f: expect(f.read()) == ''
def test_should_call_dump(self): dump = MySQLDump(directory='.', options=dict(mysql_user='******', mysql_password='******'), sh=self.sh) expect(self.sh.mysqldump.called) == False dump.dump() expect(self.sh.mysqldump.called) == True args, kwargs = self.sh.mysqldump.call_args expect(kwargs).has_subdict( password='******', user='******', )
def test_should_dump(self): dump = PostgreSQLDump(directory='.', options=dict(postgres_username='******'), sh=self.sh) expect(self.sh.pg_dumpall.called) == False dump.dump() expect(self.sh.pg_dumpall.called) == True args, kwargs = self.sh.pg_dumpall.call_args expect(kwargs).has_subdict( clean=True, username='******', )
def test_should_restore(self): dump = MySQLDump(directory='.', options=dict(mysql_user='******', mysql_password='******'), sh=self.sh) expect(self.sh.mysql.called) == False dump.restore() expect(self.sh.mysql.called) == True args, kwargs = self.sh.mysql.call_args expect(kwargs).has_subdict( execute="source ./dump.sql", password='******', user='******', )
def test_should_error_when_accessing_missing_attribute(self): class Foo(): pass expect(lambda: _(Foo().missing)).to_raise(AttributeError)
def test_should_make_data_interfaces(self): data_interface = DataInterface.make_data_interface( 'dir', 'noop', 'fnord', dict(foo='bar')) expect(data_interface).isinstance(NoOp) expect(data_interface.directory) == 'dir/fnord' expect(data_interface.config['options']) == dict(foo='bar', name='')
def test_should_produce_callable_on_unary_operator(self): expect(_([3, 5]).map(- _.each)._) == (-3, -5) expect(_([3, 5]).map(~ _.each)._) == (-4, -6)
def test_should_produce_itemgetter_on_item_access(self): expect(_([['foo'], ['bar']]).map(_.each[0])._) == ('foo', 'bar')
def test_lib_and_wrap_have_usefull_repr(self): expect(repr(_.lib)).matches('virtual root module') expect(repr(_.each)).matches('lambda generator')
def test_should_explictly_unwrap(self): foo = 1 expect(_(foo).unwrap).is_(foo)
def test_str_and_repr_work(self): expect(str(_((1,2)))) == 'fluentpy.wrap((1, 2))' expect(repr(_((1,2)))) == 'fluentpy.wrap((1, 2))'
def test_function_expressions_return_unwrapped_objects(self): class Foo(object): bar = 'baz' expect(_.each.bar(Foo())).is_('baz') expect((_.each + 3)(4)).is_(7) expect(_.each['foo'](dict(foo='bar'))).is_('bar')
def test_can_access_original_module(self): import types expect(_.module).instanceof(types.ModuleType)
def test_pprint(self): out = io.StringIO() _([1,2,3]).pprint(stream=out) expect(out.getvalue()) == '[1, 2, 3]\n'
def test_isinstance_issubclass(self): expect(_('foo').isinstance(str)._) == True expect(_('foo').isinstance(int)._) == False expect(_(str).issubclass(object)._) == True expect(_(str).issubclass(str)._) == True expect(_(str).issubclass(int)._) == False
def test_hasattr_getattr_setattr_delattr(self): expect(_((1,2)).hasattr('len')._).is_false() expect(_('foo').getattr('__len__')()._) == 3 class Attr(object): def __init__(self): self.foo = 'bar' expect(_(Attr()).setattr('foo', 'baz').self.foo._) == 'baz' expect(_(Attr()).delattr('foo').unwrap) == None expect(_(Attr()).delattr('foo').self.unwrap).isinstance(Attr) expect(_(Attr()).delattr('foo').self.vars()._) == {}
def test_should_remember_call_chain(self): def foo(): return 'bar' expect(_(foo)().unwrap) == 'bar' expect(_(foo)().previous.unwrap) == foo
def test_import_submodule_that_is_also_a_symbol_in_the_parent_module(self): expect(_.lib.os.name._) == os.name expect(_.lib.os.path.join._) == os.path.join
def test_wrapped_objects_will_wrap_every_action_to_them(self): expect(_('foo').upper()).is_instance(_.Wrapper) expect(_([1,2,3])[1]).is_instance(_.Wrapper) expect(_(lambda: 'foo')()).is_instance(_.Wrapper) expect(_(dict(foo='bar')).foo).is_instance(_.Wrapper)
def test_classes_have_usefull_docstrings(self): expect(_.Wrapper.__doc__).matches(r'Universal wrapper') expect(_.Callable.__doc__).matches(r'Higher order') expect(_.Iterable.__doc__).matches(r'Add iterator methods to any iterable') expect(_.Mapping.__doc__).matches(r'Index into dicts') expect(_.Text.__doc__).matches(r'regex methods') expect(_.Set.__doc__).matches(r'Mostly like Iterable')
def _test_should_allow_creating_callables_without_call(self): # This is likely not possible to attain due to the shortcomming that .foo already # needs to create the attgetter, and we cannot distinguish a call to it from the calls map, etc. do expect(_.each.foo) == attrgetter('foo') expect(_.each.foo(_, 'bar', 'baz')) == methodcaller('foo').curry(_, 'bar', 'baz') expect(_.call.foo('bar', 'baz')) == methodcaller('foo').curry(_, 'bar', 'baz')
(i.high, d.hard): (.5, .3, .2), }) l = letter = Distribution.dependent( ('bad', 'glowing'), { g.good: (.1, .9), g.ok: (.4, .6), g.bad: (.99, .01), }) n = network = Student() # print(n.i) # print(n.i.low) # print(n.s) expect(n.intelligence[n.i.high]) == .3 expect(n.difficulty[n.d.easy]) == .6 expect(n.grade[n.g.ok, n.i.high, n.d.easy]) == .08, expect(n.letter[n.l.bad, n.g.ok]) == .4 # print(n.intelligence.low, n.i[n.i.low]) # print(n.difficulty.easy, n.d[n.d.easy]) # # print(n.sat.bad, n.intelligence.low, n.sat[n.s.bad, n.i.low]) # print(n.intelligence.low, n.difficulty.easy, n.grade.good, n.g[n.i.low, n.d.easy, n.g.good]) print(n.intelligence.low, n.difficulty.easy, n.grade.good, n.g[n.i.low, n.g.good, n.d.easy]) # # print(n.letter.bad, n.grade.good, n.l[n.l.bad, n.g.good]) expect(n.probability_of_event(n.i.high, n.d.easy, n.g.ok, n.l.bad, n.s.good)) == 0.004608
def test_special_proxies_have_usefull_docstrings(self): expect(_.lib.__doc__).matches('Imports as expressions') expect(_.each.__doc__).matches('functions from expressions')
def test_song(): expected = dedent(""" This is the house that Jack built. This is the malt That lay in the house that Jack built. This is the rat, That ate the malt That lay in the house that Jack built. This is the cat, That killed the rat, That ate the malt That lay in the house that Jack built. This is the dog, That worried the cat, That killed the rat, That ate the malt That lay in the house that Jack built. This is the cow with the crumpled horn, That tossed the dog, That worried the cat, That killed the rat, That ate the malt That lay in the house that Jack built. This is the maiden all forlorn, That milked the cow with the crumpled horn, That tossed the dog, That worried the cat, That killed the rat, That ate the malt That lay in the house that Jack built. This is the man all tattered and torn, That kissed the maiden all forlorn, That milked the cow with the crumpled horn, That tossed the dog, That worried the cat, That killed the rat, That ate the malt That lay in the house that Jack built. This is the priest all shaven and shorn, That married the man all tattered and torn, That kissed the maiden all forlorn, That milked the cow with the crumpled horn, That tossed the dog, That worried the cat, That killed the rat, That ate the malt That lay in the house that Jack built. This is the rooster that crow'd in the morn, That waked the priest all shaven and shorn, That married the man all tattered and torn, That kissed the maiden all forlorn, That milked the cow with the crumpled horn, That tossed the dog, That worried the cat, That killed the rat, That ate the malt That lay in the house that Jack built. This is the farmer sowing his corn, That kept the rooster that crow'd in the morn, That waked the priest all shaven and shorn, That married the man all tattered and torn, That kissed the maiden all forlorn, That milked the cow with the crumpled horn, That tossed the dog, That worried the cat, That killed the rat, That ate the malt That lay in the house that Jack built. This is the horse and the hound and the horn, That belong to the farmer sowing his corn, That kept the rooster that crow'd in the morn, That waked the priest all shaven and shorn, That married the man all tattered and torn, That kissed the maiden all forlorn, That milked the cow with the crumpled horn, That tossed the dog, That worried the cat, That killed the rat, That ate the malt That lay in the house that Jack built.""") expect(song()).to_equal(expected)
def test_can_import_public_symbols(self): from fluentpy import lib, each, _ as _f, Wrapper expect(lib.sys._) == sys expect(_f(3)).is_instance(Wrapper) expect((each + 3)(4)) == 7
def test_should_behave_as_if_each_was_wrapped(self): expect(_.each.first(dict(first='foo'))) == 'foo' expect(_([dict(first='foo')]).map(_.each.first)._) == ('foo',)
def test_call_module_from_shell(self): from subprocess import check_output output = check_output( ['python', '-m', 'fluentpy', "lib.sys.stdin.read().split('\\n').imap(each.call.upper()).map(print)"], input=b'foo\nbar\nbaz') expect(output) == b'FOO\nBAR\nBAZ\n'
def test_ensure_default_args_to_rsync_are_safe(self): interface = CopyDirectory(directory='/foo', options=dict(source='/bar')) default_arguments = interface._default_rsync_args() for key in default_arguments.keys(): expect(key).does_not.contain('delete')
def test_should_ease_chaining_off_methods_that_return_none(self): expect(_([3,2,1]).sort().unwrap) == None expect(_([3,2,1]).sort()._) == None expect(_([3,2,1]).sort().previous.previous._) == [1,2,3] expect(_([3,2,1]).sort().self.unwrap) == [1,2,3] expect(_([2,3,1]).sort().self.sort(reverse=True)._) == None expect(_([2,3,1]).sort().self.sort(reverse=True).previous.previous._) == [3,2,1] # sorted, because in place expect(_([2,3,1]).sort().self.sort(reverse=True).self._) == [3,2,1] class Attr(object): foo = 'bar' expect(_(Attr()).setattr('foo', 'baz').self.foo._) == 'baz'
def test_should_produce_callable_on_binary_operator(self): expect(_(['foo', 'bar']).map(_.each == 'foo')._) == (True, False) expect(_([3, 5]).map(_.each + 3)._) == (6, 8) expect(_([3, 5]).map(_.each < 4)._) == (True, False)
def test_should_not_explode_if_process_cannot_be_started_at_all(self): # should only happen for unfinished processes self.start.delete() prospective_creator = User.objects.create() expect(self.process.is_startable_by_user(prospective_creator)).is_false()
def test_should_produce_attrgetter_on_attribute_access(self): class Foo(object): bar = 'baz' expect(_([Foo(), Foo()]).map(_.each.bar)._) == ('baz', 'baz')
def test_smalltalk_like_behaviour_is_not_broken_by_proxy(self): class UnfortunateNames(object): def previous(self, *args): self.args = args expect(_(UnfortunateNames()).proxy.previous('foo').self.args._) == ('foo',)
def test_should_filter_interfaces_by_tags(self): from collections import OrderedDict # easier to test if iteration order isn't randomly different… config = OrderedDict() config['first'] = dict(interface_name='noop', tags=['foo', 'bar']) config['second'] = dict(interface_name='noop', tags=['bar']) config['third'] = dict(interface_name='noop', tags=['foo']) interfaces = interfaces_from_config(config, 'unused_directory', tag='foo') expect(interfaces).has_length(2) expect(interfaces[0].backup_name) == 'first' expect(interfaces[1].backup_name) == 'third' interfaces = interfaces_from_config(config, 'unused_directory', tag='bar') expect(interfaces).has_length(2) expect(interfaces[0].backup_name) == 'first' expect(interfaces[1].backup_name) == 'second' # ensure other parsing of tags works too config = OrderedDict() config['first'] = dict(interface_name='noop', tags='foo, bar') config['second'] = dict(interface_name='noop', tags='bar') config['third'] = dict(interface_name='noop', tags='foo') interfaces = interfaces_from_config(config, 'unused_directory', tag='foo') expect(interfaces).has_length(2) expect(interfaces[0].backup_name) == 'first' expect(interfaces[1].backup_name) == 'third' interfaces = interfaces_from_config(config, 'unused_directory', tag='bar') expect(interfaces).has_length(2) expect(interfaces[0].backup_name) == 'first' expect(interfaces[1].backup_name) == 'second'
def test_chain_is_not_broken_by_proxy_usage(self): class UnfortunateNames(object): def previous(self, *args): return args expect(_(UnfortunateNames()).proxy.previous('foo').previous.previous._).is_instance(UnfortunateNames)
def test_should_raise_on_missing_data_interfaces(self): expect(lambda: DataInterface.make_data_interface(None, None, None, None)).raises(UnknownDataInterfaceError)
def test_should_wrap_according_to_returned_type(self): expect(_('foo')).is_instance(_.Text) expect(_([])).is_instance(_.Iterable) expect(_(iter([]))).is_instance(_.Iterable) expect(_({})).is_instance(_.Mapping) expect(_({1})).is_instance(_.Set) expect(_(lambda: None)).is_instance(_.Callable) class CallMe(object): def __call__(self): pass expect(_(CallMe())).is_instance(_.Callable) expect(_(object())).is_instance(_.Wrapper)
def test_dir_vars(self): expect(_(object()).dir()._).contains('__class__', '__init__', '__eq__') class Foo(object): pass foo = Foo() foo.bar = 'baz' expect(_(foo).vars()._) == {'bar': 'baz'}
def test_wrap_has_usefull_docstring(self): expect(_.__doc__).matches(r'_\(_\)\.dir\(\)\.print\(\)') expect(_.__doc__).matches(r'https://github.com/dwt/fluent')