# Successful opens with default and explicit codecs flatin = os.path.join(T.path_to_Files, 'en-common-ltn.txt') fbts = utilities.path_to_stream(flatin) assert fbts.basename() == 'en-common-ltn' assert b'ISO-8859-1' == fbts.codec().name() fbts = None futf8 = os.path.join(T.path_to_Files, 'en-common-utf.txt') fbts = utilities.path_to_stream(futf8) assert b'UTF-8' == fbts.codec().name() fbts = None fbts = utilities.path_to_stream(futf8, encoding='KOI8-R') assert b'KOI8-R' == fbts.codec().name() # Unsuccessful opens fbts = utilities.path_to_stream(file_nexiste) assert fbts is None assert T.check_log('Request for nonexistent input file', logging.ERROR) fbts = utilities.path_to_stream(file_unreadable) assert fbts is None assert T.check_log('Error 5 (Permission denied) opening file unreadable', logging.ERROR) # Existing file dialog - file dialogs require an App and a parent window # or python will crash T.make_app() T.make_main() T.main.show() fbts = utilities.ask_existing_file('PRESS CANCEL', parent=T.main, starting_path=T.path_to_Files) assert fbts is None fbts = utilities.ask_existing_file('SELECT unreadable.txt', parent=T.main,
'en_US': test_dicts, 'en_GB': test_dicts, 'fr_FR': test_dicts, 'de_DE': test_extras } tag_list = dictionaries.get_tag_list() for (tag, path) in expect_tag_list.items(): assert tag in tag_list assert tag_list[tag] == expect_tag_list[tag] # cause T.path_to_Files to have mismatched foobar.dic dic_path = os.path.join(T.path_to_Files, 'foobar.dic') f = open(dic_path, 'w') f.close() tag_list = dictionaries.get_tag_list(T.path_to_Files) assert T.check_log(".dic but not", logging.ERROR) os.remove(dic_path) # clean up Files # cause T.path_to_Files to have mismatched foobar.aff aff_path = os.path.join(T.path_to_Files, 'foobar.aff') f = open(aff_path, 'w') f.close() tag_list = dictionaries.get_tag_list(T.path_to_Files) assert T.check_log(".aff but not", logging.ERROR) os.remove(aff_path) # clean up Files # "skipping" should appear if we check a path twice tag_list = dictionaries.get_tag_list(test_dicts) assert T.check_log("Skipping", logging.INFO) # Check the spellcheck object: bad input makes not is_valid # and all words are ok
# load the em with minimal data em.setPlainText('ABBCCC') # do a refresh which loads the census cd.refresh() # read it out as metadata. jpat_1 = '{{"{0}":{1}}}' expect = jpat_1.format(C.MD_CC,'[["A",1],["B",2],["C",3]]') assert check_section(mm,C.MD_CC,expect) assert 3 == cd.char_count() assert 'C' == cd.get_char(2) assert ('B',2) == cd.get_tuple(1) # Check error handling in get_tuple etxt = 'Invalid chardata index' assert ('?', 0) == cd.get_tuple(-4) assert T.check_log(etxt,logging.ERROR) assert ('?', 0) == cd.get_tuple(99) assert T.check_log(etxt,logging.ERROR) # check invalid section value # note any call to chardata reader clears the census etxt = 'CHARCENSUS metadata must be a list, ignoring all' expect = jpat_1.format(C.MD_CC,'{"A":2}') # not a dict load_section(mm,expect) assert T.check_log(etxt,logging.ERROR) assert 0 == cd.char_count() # check bad values from metadata etxt = 'Ignoring invalid CHARCENSUS chararacter ' expect = jpat_1.format(C.MD_CC,'[{"X":2}]') # item not a list load_section(mm,expect) assert T.check_log(etxt,logging.ERROR)
] } jd = {C.MD_FR: rbd} load_section(mdmgr, json.dumps(jd)) xd = get_section_value(mdmgr, C.MD_FR) assert 1 == len(xd) assert C.MD_FR in xd xbd = xd[C.MD_FR] for (k, v) in xbd.items(): assert rbd[int(k)] == v # Check errors by loading bad stuff and checking the log errmsg = 'FIND_RB metadata is not a dict value' jd = {C.MD_FR: [0, 'not a dict']} load_section(mdmgr, json.dumps(jd)) assert T.check_log(errmsg, logging.ERROR) errmsg = 'Ignoring invalid FIND_RB' # non-integer button number jd = {C.MD_FR: {'X': ['list', 'of', 'strings']}} load_section(mdmgr, json.dumps(jd)) assert T.check_log(errmsg, logging.ERROR) # list too long jd = {C.MD_FR: {0: ['1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B']}} load_section(mdmgr, json.dumps(jd)) assert T.check_log(errmsg, logging.ERROR) # not an iterable jd = {C.MD_FR: {0: 9}} load_section(mdmgr, json.dumps(jd)) assert T.check_log(errmsg, logging.ERROR) # not a string jd = {C.MD_FR: {0: ['str', 9]}}
# load the em with minimal data em.setPlainText('ABBCCC') # do a refresh which loads the census cd.refresh() # read it out as metadata. jpat_1 = '{{"{0}":{1}}}' expect = jpat_1.format(C.MD_CC, '[["A",1],["B",2],["C",3]]') assert check_section(mm, C.MD_CC, expect) assert 3 == cd.char_count() assert 'C' == cd.get_char(2) assert ('B', 2) == cd.get_tuple(1) # Check error handling in get_tuple etxt = 'Invalid chardata index' assert ('?', 0) == cd.get_tuple(-4) assert T.check_log(etxt, logging.ERROR) assert ('?', 0) == cd.get_tuple(99) assert T.check_log(etxt, logging.ERROR) # check invalid section value # note any call to chardata reader clears the census etxt = 'CHARCENSUS metadata must be a list, ignoring all' expect = jpat_1.format(C.MD_CC, '{"A":2}') # not a dict load_section(mm, expect) assert T.check_log(etxt, logging.ERROR) assert 0 == cd.char_count() # check bad values from metadata etxt = 'Ignoring invalid CHARCENSUS chararacter ' expect = jpat_1.format(C.MD_CC, '[{"X":2}]') # item not a list load_section(mm, expect) assert T.check_log(etxt, logging.ERROR)
'String mit unicode ∑∞' ] } jd = { C.MD_FR : rbd } load_section(mdmgr,json.dumps(jd)) xd = get_section_value(mdmgr, C.MD_FR) assert 1 == len(xd) assert C.MD_FR in xd xbd = xd[C.MD_FR] for (k,v) in xbd.items(): assert rbd[int(k)] == v # Check errors by loading bad stuff and checking the log errmsg = 'FIND_RB metadata is not a dict value' jd = { C.MD_FR : [0,'not a dict'] } load_section(mdmgr,json.dumps(jd)) assert T.check_log(errmsg,logging.ERROR) errmsg = 'Ignoring invalid FIND_RB' # non-integer button number jd = { C.MD_FR : { 'X' : ['list','of','strings'] } } load_section(mdmgr,json.dumps(jd)) assert T.check_log(errmsg,logging.ERROR) # list too long jd = { C.MD_FR : { 0 : ['1','2','3','4','5','6','7','8','9','A','B'] } } load_section(mdmgr,json.dumps(jd)) assert T.check_log(errmsg,logging.ERROR) # not an iterable jd = { C.MD_FR : { 0 : 9} } load_section(mdmgr,json.dumps(jd)) assert T.check_log(errmsg,logging.ERROR) # not a string jd = { C.MD_FR : { 0 : ['str',9]} }
# Successful opens with default and explicit codecs flatin = os.path.join(T.path_to_Files,'en-common-ltn.txt') fbts = utilities.path_to_stream(flatin) assert fbts.basename() == 'en-common-ltn' assert b'ISO-8859-1' == fbts.codec().name() fbts = None futf8 = os.path.join(T.path_to_Files,'en-common-utf.txt') fbts = utilities.path_to_stream(futf8) assert b'UTF-8' == fbts.codec().name() fbts = None fbts = utilities.path_to_stream(futf8,encoding='KOI8-R') assert b'KOI8-R' == fbts.codec().name() # Unsuccessful opens fbts = utilities.path_to_stream(file_nexiste) assert fbts is None assert T.check_log('Request for nonexistent input file',logging.ERROR) fbts = utilities.path_to_stream(file_unreadable) assert fbts is None assert T.check_log('Error 5 (Permission denied) opening file unreadable',logging.ERROR) # Existing file dialog - file dialogs require an App and a parent window # or python will crash T.make_app() T.make_main() T.main.show() fbts = utilities.ask_existing_file('PRESS CANCEL',parent=T.main,starting_path=T.path_to_Files) assert fbts is None fbts = utilities.ask_existing_file('SELECT unreadable.txt',parent=T.main,starting_path=T.path_to_Files) assert fbts is None fbts = utilities.ask_existing_file('SELECT en-common-ltn.txt',parent=T.main,starting_path=T.path_to_Files) assert fbts.basename() == 'en-common-ltn' assert b'ISO-8859-1' == fbts.codec().name()
# There are 3 dicts in Tests/Files/extras/dicts: en_US, en_GB, fr_FR # There are 4 in Tests/Files/extras, those 3 plus de_DE expect_tag_list = {'en_US':test_dicts,'en_GB':test_dicts,'fr_FR':test_dicts,'de_DE':test_extras} tag_list = dictionaries.get_tag_list() for (tag,path) in expect_tag_list.items(): assert tag in tag_list assert tag_list[tag] == expect_tag_list[tag] # cause T.path_to_Files to have mismatched foobar.dic dic_path = os.path.join(T.path_to_Files,'foobar.dic') f = open(dic_path,'w') f.close() tag_list = dictionaries.get_tag_list(T.path_to_Files) assert T.check_log(".dic but not",logging.ERROR) os.remove(dic_path) # clean up Files # cause T.path_to_Files to have mismatched foobar.aff aff_path = os.path.join(T.path_to_Files,'foobar.aff') f = open(aff_path,'w') f.close() tag_list = dictionaries.get_tag_list(T.path_to_Files) assert T.check_log(".aff but not",logging.ERROR) os.remove(aff_path) # clean up Files # "skipping" should appear if we check a path twice tag_list = dictionaries.get_tag_list(test_dicts) assert T.check_log("Skipping",logging.INFO) # Check the spellcheck object: bad input makes not is_valid # and all words are ok
fonts.shutdown(T.settings) assert T.settings.value('fonts/general_family') == palqf.family() assert T.settings.value('fonts/mono_family') == couqf.family() assert T.settings.value('fonts/general_size') == palqf.pointSize() assert T.settings.value('fonts/mono_size') == couqf.pointSize() T.settings.clear() # check scale ps = genqf.pointSize() genqf = fonts.scale(C.CTL_SHFT_EQUAL, genqf) assert (ps + 1) == genqf.pointSize() genqf = fonts.scale(C.CTL_MINUS, genqf) assert ps == genqf.pointSize() genqf.setPointSize(fonts.POINT_SIZE_MINIMUM) genqf = fonts.scale(C.CTL_MINUS, genqf) assert genqf.pointSize() == fonts.POINT_SIZE_MINIMUM assert T.check_log('rejecting zoom', logging.ERROR) genqf.setPointSize(fonts.POINT_SIZE_MAXIMUM) genqf = fonts.scale(C.CTL_SHFT_EQUAL, genqf) assert genqf.pointSize() == fonts.POINT_SIZE_MAXIMUM assert T.check_log('rejecting zoom', logging.ERROR) genqf = fonts.scale(C.CTL_LEFT, genqf) assert genqf.pointSize() == fonts.POINT_SIZE_MAXIMUM assert T.check_log('ignoring non-zoom key', logging.ERROR) # manual: check font dialog # cancel the first one genqf = fonts.choose_font(True) assert genqf is None # accept the second genqf = fonts.choose_font(True) assert genqf is not None
import logging import constants as C import paths T.settings.clear() # test readable folder test_path = T.path_to_Files assert paths.check_path(test_path) # nonexistent file assert not paths.check_path(os.path.join(test_path,'arglebargle')) # file that has the read perms turned off assert not paths.check_path(os.path.join(test_path,'unreadable.aff')) paths.initialize(T.settings) # with null settings, extras defaults to cwd = test_path assert T.check_log('initial extras path is '+test_path ,logging.INFO) assert paths.get_extras_path() == test_path assert paths.get_dicts_path() == '' # check assuming bookloupe is installed if C.PLATFORM_IS_WIN: assert paths.get_loupe_path() == '' # TODO FIX WHEN KNOWN else: assert paths.get_loupe_path() == '/usr/local/bin/bookloupe' # point settings to an extras, expect dicts to follow test_extras = os.path.join(T.path_to_Files,'extras') T.settings.setValue("paths/extras_path", test_extras) paths.initialize(T.settings) assert T.check_log('initial extras path is '+test_extras ,logging.INFO) assert paths.get_extras_path() == test_extras test_dicts = os.path.join(test_extras,'dicts') assert paths.get_dicts_path() == test_dicts
fonts.shutdown(T.settings) assert T.settings.value('fonts/general_family') == palqf.family() assert T.settings.value('fonts/mono_family') == couqf.family() assert T.settings.value('fonts/general_size') == palqf.pointSize() assert T.settings.value('fonts/mono_size') == couqf.pointSize() T.settings.clear() # check scale ps = genqf.pointSize() genqf = fonts.scale(C.CTL_SHFT_EQUAL, genqf) assert (ps+1) == genqf.pointSize() genqf = fonts.scale(C.CTL_MINUS, genqf) assert ps == genqf.pointSize() genqf.setPointSize(fonts.POINT_SIZE_MINIMUM) genqf = fonts.scale(C.CTL_MINUS, genqf) assert genqf.pointSize() == fonts.POINT_SIZE_MINIMUM assert T.check_log('rejecting zoom',logging.ERROR) genqf.setPointSize(fonts.POINT_SIZE_MAXIMUM) genqf = fonts.scale(C.CTL_SHFT_EQUAL, genqf) assert genqf.pointSize() == fonts.POINT_SIZE_MAXIMUM assert T.check_log('rejecting zoom',logging.ERROR) genqf = fonts.scale(C.CTL_LEFT,genqf) assert genqf.pointSize() == fonts.POINT_SIZE_MAXIMUM assert T.check_log('ignoring non-zoom key',logging.ERROR) # manual: check font dialog # cancel the first one genqf = fonts.choose_font(True) assert genqf is None # accept the second genqf = fonts.choose_font(True) assert genqf is not None
import logging import constants as C import paths T.settings.clear() # test readable folder test_path = T.path_to_Files assert paths.check_path(test_path) # nonexistent file assert not paths.check_path(os.path.join(test_path, 'arglebargle')) # file that has the read perms turned off assert not paths.check_path(os.path.join(test_path, 'unreadable.aff')) paths.initialize(T.settings) # with null settings, extras defaults to cwd = test_path assert T.check_log('initial extras path is ' + test_path, logging.INFO) assert paths.get_extras_path() == test_path assert paths.get_dicts_path() == '' # check assuming bookloupe is installed if C.PLATFORM_IS_WIN: assert paths.get_loupe_path() == '' # TODO FIX WHEN KNOWN else: assert paths.get_loupe_path() == '/usr/local/bin/bookloupe' # point settings to an extras, expect dicts to follow test_extras = os.path.join(T.path_to_Files, 'extras') T.settings.setValue("paths/extras_path", test_extras) paths.initialize(T.settings) assert T.check_log('initial extras path is ' + test_extras, logging.INFO) assert paths.get_extras_path() == test_extras test_dicts = os.path.join(test_extras, 'dicts') assert paths.get_dicts_path() == test_dicts