def test_no_auto_update(self): """Update: no update check""" # Make sure there's no cached update data wf = Workflow() wf.reset() self.assertIsNone(self.wf.cached_data('__workflow_update_status')) wf = Workflow() c = WorkflowMock(['script', 'workflow:noautoupdate']) with c: wf.args self.assertFalse(wf.settings.get('__workflow_autoupdate')) self.assertIsNone(self.wf.cached_data('__workflow_update_status')) c = WorkflowMock() with c: wf = Workflow(update_settings={ 'github_slug': TEST_REPO_SLUG, 'version': RELEASE_CURRENT }) self.assertIsNone(self.wf.cached_data('__workflow_update_status'))
def test_folding(infopl): """Magic: folding""" with WorkflowMock(['script', 'workflow:foldingdefault']): wf = Workflow() # Process magic arguments wf.args assert wf.settings.get('__workflow_diacritic_folding') is None with WorkflowMock(['script', 'workflow:foldingon']): wf = Workflow() # Process magic arguments wf.args assert wf.settings.get('__workflow_diacritic_folding') is True with WorkflowMock(['script', 'workflow:foldingdefault']): wf = Workflow() # Process magic arguments wf.args assert wf.settings.get('__workflow_diacritic_folding') is None with WorkflowMock(['script', 'workflow:foldingoff']): wf = Workflow() # Process magic arguments wf.args assert wf.settings.get('__workflow_diacritic_folding') is False wf.reset()
def test_install_update(self): """Update: installs update""" # Make sure there's no cached update data wf = Workflow() wf.reset() # Verify that there's no update available self.assertIsNone(wf.cached_data('__workflow_update_status')) self.assertFalse(update.install_update(TEST_REPO_SLUG, RELEASE_LATEST)) # Get new update data self.assertTrue(update.check_update(TEST_REPO_SLUG, RELEASE_CURRENT)) # Verify new workflow is downloaded and installed c = WorkflowMock() with c: self.assertTrue( update.install_update(TEST_REPO_SLUG, RELEASE_CURRENT)) self.assertEquals(c.cmd[0], 'open') self.assertTrue(c.cmd[1].endswith('.alfredworkflow')) self.assertFalse( wf.cached_data('__workflow_update_status')['available'])
def test_check_update(httpserver, infopl, alfred4): """Check update""" key = "__workflow_latest_version" tests = [ # data, alfred version, pre, expected value (RELEASES_JSON, None, False, True), (RELEASES_JSON, "3", False, True), (RELEASES_4PLUS_JSON, None, False, True), (RELEASES_4PLUS_JSON, "3", False, False), (RELEASES_4PLUS_JSON, "3", True, False), ] for data, alfred, pre, wanted in tests: wf = Workflow() wf.reset() with fakeresponse(httpserver, data, HTTP_HEADERS_JSON): v = update.check_update(TEST_REPO, RELEASE_CURRENT, pre, alfred) assert v == wanted, "unexpected update status" status = wf.cached_data(key) assert status is not None assert status["available"] == wanted assert wf.update_available == wanted if wanted: # other data may not be set if available is False v = update.check_update(TEST_REPO, status["version"], pre, alfred) assert v is False
def test_check_update(httpserver, infopl, alfred4): """Check update""" key = '__workflow_latest_version' tests = [ # data, alfred version, pre, expected value (RELEASES_JSON, None, False, True), (RELEASES_JSON, '3', False, True), (RELEASES_4PLUS_JSON, None, False, True), (RELEASES_4PLUS_JSON, '3', False, False), (RELEASES_4PLUS_JSON, '3', True, False), ] for data, alfred, pre, wanted in tests: wf = Workflow() wf.reset() with fakeresponse(httpserver, data, HTTP_HEADERS_JSON): v = update.check_update(TEST_REPO, RELEASE_CURRENT, pre, alfred) assert v == wanted, "unexpected update status" status = wf.cached_data(key) assert status is not None assert status['available'] == wanted assert wf.update_available == wanted if wanted: # other data may not be set if available is False v = update.check_update(TEST_REPO, status['version'], pre, alfred) assert v is False
def test_install_update(httpserver, info): """Update is installed""" # Clear any cached data wf = Workflow() wf.reset() # Assert cache was cleared assert wf.cached_data('__workflow_update_status') is None with fakeresponse(httpserver, DATA_JSON, HTTP_HEADERS_JSON): # No update for latest release assert update.install_update(TEST_REPO_SLUG, RELEASE_LATEST) is False # Check for updates assert update.check_update(TEST_REPO_SLUG, RELEASE_CURRENT) is True # Verify new workflow is downloaded and installed c = WorkflowMock() with c: assert update.install_update(TEST_REPO_SLUG, RELEASE_CURRENT) is True assert c.cmd[0] == 'open' assert c.cmd[1].endswith('.alfredworkflow') assert wf.cached_data( '__workflow_update_status')['available'] is False
def test_no_auto_update(self): """Update: no update check""" # Make sure there's no cached update data wf = Workflow() wf.reset() self.assertTrue(self.wf.cached_data('__workflow_update_status') is None) wf = Workflow() c = WorkflowMock(['script', 'workflow:noautoupdate']) with c: wf.args self.assertFalse(wf.settings.get('__workflow_autoupdate')) self.assertTrue(self.wf.cached_data('__workflow_update_status') is None) c = WorkflowMock() with c: wf = Workflow(update_settings={'github_slug': TEST_REPO_SLUG, 'version': RELEASE_CURRENT}) self.assertTrue(self.wf.cached_data('__workflow_update_status') is None)
def test_install_update(httpserver, info): """Update is installed""" # Clear any cached data wf = Workflow() wf.reset() # Assert cache was cleared assert wf.cached_data('__workflow_update_status') is None with fakeresponse(httpserver, DATA_JSON, HTTP_HEADERS_JSON): # No update for latest release assert update.install_update(TEST_REPO_SLUG, RELEASE_LATEST) is False # Check for updates assert update.check_update(TEST_REPO_SLUG, RELEASE_CURRENT) is True # Verify new workflow is downloaded and installed c = WorkflowMock() with c: assert update.install_update(TEST_REPO_SLUG, RELEASE_CURRENT) is True assert c.cmd[0] == 'open' assert c.cmd[1].endswith('.alfredworkflow') assert wf.cached_data('__workflow_update_status')['available'] is False
def test_folding(infopl): """Magic: folding""" with WorkflowMock(["script", "workflow:foldingdefault"]): wf = Workflow() # Process magic arguments wf.args assert wf.settings.get("__workflow_diacritic_folding") is None with WorkflowMock(["script", "workflow:foldingon"]): wf = Workflow() # Process magic arguments wf.args assert wf.settings.get("__workflow_diacritic_folding") is True with WorkflowMock(["script", "workflow:foldingdefault"]): wf = Workflow() # Process magic arguments wf.args assert wf.settings.get("__workflow_diacritic_folding") is None with WorkflowMock(["script", "workflow:foldingoff"]): wf = Workflow() # Process magic arguments wf.args assert wf.settings.get("__workflow_diacritic_folding") is False wf.reset()
def test_install_update(self): """Update: installs update""" # Make sure there's no cached update data wf = Workflow() wf.reset() # Verify that there's no update available self.assertIsNone(wf.cached_data('__workflow_update_status')) self.assertFalse(update.install_update(TEST_REPO_SLUG, RELEASE_LATEST)) # Get new update data self.assertTrue(update.check_update(TEST_REPO_SLUG, RELEASE_CURRENT)) # Verify new workflow is downloaded and installed c = WorkflowMock() with c: self.assertTrue(update.install_update(TEST_REPO_SLUG, RELEASE_CURRENT)) self.assertEquals(c.cmd[0], 'open') self.assertTrue(c.cmd[1].endswith('.alfredworkflow')) self.assertFalse(wf.cached_data( '__workflow_update_status')['available'])
class TestDownload(unittest.TestCase): def setUp(self): self.wf = Workflow() self.wf.reset() download.log = self.wf.logger def tearDown(self): self.wf.reset() def test_download(self): download_link = 'http://filepi.com/i/RSpHA1T' ebook_id = '1529159300' ebook_title = 'Expert Oracle and Java Security' args = 'program --download-from-itebooks {} {} {}'.format(download_link, ebook_id, ebook_title).split() with patch.object(sys, 'argv', args): ret = download.main(self.wf) download_folder = os.path.expanduser(itebooks.default_download_folder) file_name = 'Expert Oracle and Java Security.pdf' file_path = os.path.join(download_folder, file_name) self.assertEqual(ret, 0) self.assertTrue(os.path.exists(file_path)) # Removes the downloaded file try: os.remove(file_path) except OSError: pass
def test_cachedir(infopl): """Magic: open cachedir""" with WorkflowMock(["script", "workflow:opencache"]) as c: wf = Workflow() # Process magic arguments wf.args assert c.cmd == ["open", wf.cachedir] wf.reset()
def test_openlog(infopl): """Magic: open logfile""" with WorkflowMock(["script", "workflow:openlog"]) as c: wf = Workflow() # Process magic arguments wf.args assert c.cmd == ["open", wf.logfile] wf.reset()
def test_workflowdir(infopl): """Magic: open workflowdir""" with WorkflowMock(['script', 'workflow:openworkflow']) as c: wf = Workflow() # Process magic arguments wf.args assert c.cmd == ['open', wf.workflowdir] wf.reset()
def test_open_term(infopl): """Magic: open Terminal""" with WorkflowMock(["script", "workflow:openterm"]) as c: wf = Workflow() # Process magic arguments wf.args assert c.cmd == ["open", "-a", "Terminal", wf.workflowdir] wf.reset()
def test_openhelp_no_url(infopl): """Magic: no help URL""" with WorkflowMock(['script', 'workflow:help']) as c: wf = Workflow() # Process magic arguments wf.args assert not c.cmd wf.reset()
def test_openlog(infopl): """Magic: open logfile""" with WorkflowMock(['script', 'workflow:openlog']) as c: wf = Workflow() # Process magic arguments wf.args assert c.cmd == ['open', wf.logfile] wf.reset()
def test_open_term(infopl): """Magic: open Terminal""" with WorkflowMock(['script', 'workflow:openterm']) as c: wf = Workflow() # Process magic arguments wf.args assert c.cmd == ['open', '-a', 'Terminal', wf.workflowdir] wf.reset()
def test_cachedir(infopl): """Magic: open cachedir""" with WorkflowMock(['script', 'workflow:opencache']) as c: wf = Workflow() # Process magic arguments wf.args assert c.cmd == ['open', wf.cachedir] wf.reset()
def test_workflow_update_methods(self): """Workflow update methods""" def fake(wf): return Workflow().reset() # Initialise with outdated version wf = Workflow( update_settings={ 'github_slug': 'deanishe/alfred-workflow-dummy', 'version': 'v2.0', 'frequency': 1, }) wf.run(fake) # Check won't have completed yet self.assertFalse(wf.update_available) # wait for background update check self.assertTrue(is_running('__workflow_update_check')) while is_running('__workflow_update_check'): time.sleep(0.05) time.sleep(1) # There *is* a newer version in the repo self.assertTrue(wf.update_available) # Mock out subprocess and check the correct command is run c = WorkflowMock() with c: self.assertTrue(wf.start_update()) # wf.logger.debug('start_update : {}'.format(c.cmd)) self.assertEquals(c.cmd[0], '/usr/bin/python') self.assertEquals(c.cmd[2], '__workflow_update_install') # Grab the updated release data, then reset the cache update_info = wf.cached_data('__workflow_update_status') wf.reset() # Initialise with latest available release wf = Workflow( update_settings={ 'github_slug': 'deanishe/alfred-workflow-dummy', 'version': update_info['version'], }) wf.run(fake) # Wait for background update check self.assertTrue(is_running('__workflow_update_check')) while is_running('__workflow_update_check'): time.sleep(0.05) # Remote version is same as the one we passed to Workflow self.assertFalse(wf.update_available) self.assertFalse(wf.start_update())
def test_workflow_update_methods(self): """Workflow update methods""" def fake(wf): return Workflow().reset() # Initialise with outdated version wf = Workflow(update_settings={ 'github_slug': 'deanishe/alfred-workflow-dummy', 'version': 'v2.0', 'frequency': 1, }) wf.run(fake) # Check won't have completed yet self.assertFalse(wf.update_available) # wait for background update check self.assertTrue(is_running('__workflow_update_check')) while is_running('__workflow_update_check'): time.sleep(0.05) time.sleep(1) # There *is* a newer version in the repo self.assertTrue(wf.update_available) # Mock out subprocess and check the correct command is run c = WorkflowMock() with c: self.assertTrue(wf.start_update()) # wf.logger.debug('start_update : {}'.format(c.cmd)) self.assertEquals(c.cmd[0], '/usr/bin/python') self.assertEquals(c.cmd[2], '__workflow_update_install') # Grab the updated release data, then reset the cache update_info = wf.cached_data('__workflow_update_status') wf.reset() # Initialise with latest available release wf = Workflow(update_settings={ 'github_slug': 'deanishe/alfred-workflow-dummy', 'version': update_info['version'], }) wf.run(fake) # Wait for background update check self.assertTrue(is_running('__workflow_update_check')) while is_running('__workflow_update_check'): time.sleep(0.05) # Remote version is same as the one we passed to Workflow self.assertFalse(wf.update_available) self.assertFalse(wf.start_update())
def test_list_magic(infopl): """Magic: list magic""" # TODO: Verify output somehow with WorkflowMock(['script', 'workflow:magic']) as c: wf = Workflow() # Process magic arguments wf.args assert not c.cmd wf.reset()
def test_openhelp(infopl): """Magic: open help URL""" url = 'http://www.deanishe.net/alfred-workflow/' with WorkflowMock(['script', 'workflow:help']) as c: wf = Workflow(help_url=url) # Process magic arguments wf.args assert c.cmd == ['open', url] wf.reset()
def test_openhelp(infopl): """Magic: open help URL""" url = "http://www.deanishe.net/alfred-workflow/" with WorkflowMock(["script", "workflow:help"]) as c: wf = Workflow(help_url=url) # Process magic arguments wf.args assert c.cmd == ["open", url] wf.reset()
def test_delete_cache(infopl): """Magic: delete cache""" with WorkflowMock(['script', 'workflow:delcache']): wf = Workflow() testpath = wf.cachefile('file.test') with open(testpath, 'wb') as fp: fp.write('test!') assert os.path.exists(testpath) # Process magic arguments wf.args assert not os.path.exists(testpath) wf.reset()
def test_delete_cache(infopl): """Magic: delete cache""" with WorkflowMock(["script", "workflow:delcache"]): wf = Workflow() testpath = wf.cachefile("file.test") with open(testpath, "w") as fp: fp.write("test!") assert os.path.exists(testpath) # Process magic arguments wf.args assert not os.path.exists(testpath) wf.reset()
def test_update_nondefault_serialiser(httpserver, info): """Check update works when a custom serialiser is set on Workflow https://github.com/deanishe/alfred-workflow/issues/113 """ wf = Workflow() wf.cache_serializer = 'json' wf.reset() with fakeresponse(httpserver, DATA_JSON, HTTP_HEADERS_JSON): assert update.check_update(TEST_REPO_SLUG, RELEASE_CURRENT) is True assert wf.update_available is True
def test_update_nondefault_serialiser(httpserver, infopl, alfred4): """Check update works when a custom serialiser is set on Workflow https://github.com/deanishe/alfred-workflow/issues/113 """ wf = Workflow() wf.cache_serializer = 'json' wf.reset() with fakeresponse(httpserver, RELEASES_JSON, HTTP_HEADERS_JSON): assert update.check_update(TEST_REPO, RELEASE_CURRENT) is True assert wf.update_available is True
def test_check_update(httpserver, info): """Check update""" wf = Workflow() wf.reset() with fakeresponse(httpserver, DATA_JSON, HTTP_HEADERS_JSON): assert update.check_update(TEST_REPO_SLUG, RELEASE_CURRENT) is True update_info = wf.cached_data('__workflow_update_status') assert update_info is not None assert wf.update_available is True assert update.check_update(TEST_REPO_SLUG, update_info['version']) is False
def test_create_directories(alfred4, tempdir): """Workflow creates directories.""" data = os.path.join(tempdir, "data") cache = os.path.join(tempdir, "cache") assert not os.path.exists(data) assert not os.path.exists(cache) with env(alfred_workflow_data=data, alfred_workflow_cache=cache): wf = Workflow() assert wf.datadir == data assert os.path.exists(data) assert wf.cachedir == cache assert os.path.exists(cache) wf.reset()
def ctx(args=None, update_settings=None, clear=True): """Context manager that provides a Workflow and WorkflowMock.""" update_settings = update_settings or UPDATE_SETTINGS args = args or [] if args: # Add placeholder for ARGV[0] args = ['script'] + args create_info_plist() with WorkflowMock(args) as c: wf = Workflow(update_settings=update_settings) yield wf, c if clear: wf.reset() delete_info_plist()
def test_create_directories(alfred4, tempdir): """Workflow creates directories.""" data = os.path.join(tempdir, 'data') cache = os.path.join(tempdir, 'cache') assert not os.path.exists(data) assert not os.path.exists(cache) with env(alfred_workflow_data=data, alfred_workflow_cache=cache): wf = Workflow() assert wf.datadir == data assert os.path.exists(data) assert wf.cachedir == cache assert os.path.exists(cache) wf.reset()
def ctx(args=None, update_settings=None, clear=True): update_settings = update_settings or UPDATE_SETTINGS args = args or [] c = dummy() if args: # Add placeholder for ARGV[0] args = ['script'] + args create_info_plist() if args: c = WorkflowMock(args) with c: wf = Workflow(update_settings=update_settings) yield wf, c if clear: wf.reset() delete_info_plist()
def test_delete_settings(infopl): """Magic: delete settings""" with WorkflowMock(['script', 'workflow:delsettings']): wf = Workflow() wf.settings['key'] = 'value' assert os.path.exists(wf.settings_path) wf2 = Workflow() assert wf2.settings['key'] == 'value' # Process magic arguments wf.args wf3 = Workflow() assert 'key' not in wf3.settings wf.reset()
def test_delete_settings(infopl): """Magic: delete settings""" with WorkflowMock(["script", "workflow:delsettings"]): wf = Workflow() wf.settings["key"] = "value" assert os.path.exists(wf.settings_path) wf2 = Workflow() assert wf2.settings["key"] == "value" # Process magic arguments wf.args wf3 = Workflow() assert "key" not in wf3.settings wf.reset()
def test_prereleases(infopl): """Magic: prereleases""" with WorkflowMock(['script', 'workflow:prereleases']): wf = Workflow() # Process magic arguments wf.args assert wf.settings.get('__workflow_prereleases') is True assert wf.prereleases is True wf.reset() with WorkflowMock(['script', 'workflow:noprereleases']): wf = Workflow() # Process magic arguments wf.args assert wf.settings.get('__workflow_prereleases') is False assert wf.prereleases is False wf.reset()
def test_update_settings_override_magic_prereleases(infopl): """Magic: pre-release updates can be overridden by `update_settings`""" with WorkflowMock(['script', 'workflow:prereleases']): d = {'prereleases': True} wf = Workflow(update_settings=d) # Process magic arguments wf.args assert wf.settings.get('__workflow_prereleases') is True assert wf.prereleases is True wf.reset() with WorkflowMock(['script', 'workflow:noprereleases']): wf = Workflow(update_settings=d) # Process magic arguments wf.args assert wf.settings.get('__workflow_prereleases') is False assert wf.prereleases is True wf.reset()
def test_directories(alfred4): """Workflow directories""" data = ENV_V4.get('alfred_workflow_data') cache = ENV_V4.get('alfred_workflow_cache') wf = Workflow() assert wf.datadir == data assert os.path.exists(wf.datadir) assert wf.cachedir == cache assert os.path.exists(wf.cachedir) wf.reset() # defaults with env(alfred_workflow_data=None, alfred_workflow_cache=None): data = ENV_V2.get('alfred_workflow_data') cache = ENV_V2.get('alfred_workflow_cache') wf = Workflow() assert wf.datadir == data assert os.path.exists(wf.datadir) assert wf.cachedir == cache assert os.path.exists(wf.cachedir) wf.reset()
def test_directories(alfred4): """Workflow directories""" data = ENV_V4.get("alfred_workflow_data") cache = ENV_V4.get("alfred_workflow_cache") wf = Workflow() assert wf.datadir == data assert os.path.exists(wf.datadir) assert wf.cachedir == cache assert os.path.exists(wf.cachedir) wf.reset() # defaults with env(alfred_workflow_data=None, alfred_workflow_cache=None): data = ENV_V2.get("alfred_workflow_data") cache = ENV_V2.get("alfred_workflow_cache") wf = Workflow() assert wf.datadir == data assert os.path.exists(wf.datadir) assert wf.cachedir == cache assert os.path.exists(wf.cachedir) wf.reset()
def test_version_magic(infopl2): """Magic: version magic (Alfred 2)""" vstr = '1.9.7' # Version from version file with env(alfred_workflow_version=None): # Versioned with WorkflowMock(['script', 'workflow:version']) as c: with VersionFile(vstr): wf = Workflow() # Process magic arguments wf.args assert not c.cmd wf.reset() # Unversioned with WorkflowMock(['script', 'workflow:version']) as c: wf = Workflow() # Process magic arguments wf.args assert not c.cmd wf.reset() # Version from environment variable with env(alfred_workflow_version=vstr): with WorkflowMock(['script', 'workflow:version']) as c: wf = Workflow() # Process magic arguments wf.args assert not c.cmd wf.reset()
def test_version_magic(infopl): """Magic: version magic""" # TODO: Verify output somehow vstr = '1.9.7' # Version from file(s) with env(alfred_workflow_version=None): # Version file with WorkflowMock(['script', 'workflow:version']) as c: with VersionFile(vstr): wf = Workflow() # Process magic arguments wf.args assert not c.cmd wf.reset() # info.plist with WorkflowMock(['script', 'workflow:version']) as c: wf = Workflow() # Process magic arguments wf.args assert not c.cmd wf.reset() # Environment variable with env(alfred_workflow_version=vstr): with WorkflowMock(['script', 'workflow:version']) as c: wf = Workflow() # Process magic arguments wf.args assert not c.cmd wf.reset()
def test_no_auto_update(info): """No update check""" wf = Workflow() wf.reset() # Assert cache was cleared assert wf.cached_data('__workflow_update_status') is None c = WorkflowMock(['script', 'workflow:noautoupdate']) with c: wf = Workflow() wf.args assert wf.settings.get('__workflow_autoupdate') is False assert wf.cached_data('__workflow_update_status') is None c = WorkflowMock() with c: wf = Workflow(update_settings={ 'github_slug': TEST_REPO_SLUG, 'version': RELEASE_CURRENT }) assert wf.cached_data('__workflow_update_status') is None
def test_reset(infopl): """Magic: reset""" with WorkflowMock(['script', 'workflow:reset']): wf = Workflow() wf.settings['key'] = 'value' datatest = wf.datafile('data.test') cachetest = wf.cachefile('cache.test') settings_path = wf.datafile('settings.json') for p in (datatest, cachetest): with open(p, 'wb') as file_obj: file_obj.write('test!') for p in (datatest, cachetest, settings_path): assert os.path.exists(p) # Process magic arguments wf.args for p in (datatest, cachetest, settings_path): assert not os.path.exists(p) wf.reset()
def test_install_update(httpserver, infopl, alfred4): """Update is installed.""" key = '__workflow_latest_version' # Clear any cached data wf = Workflow() wf.reset() # Assert cache was cleared assert wf.cached_data(key) is None with fakeresponse(httpserver, RELEASES_JSON, HTTP_HEADERS_JSON): # No update because no update status has been cached assert update.install_update() is False # Check for updates v = update.check_update(TEST_REPO, RELEASE_CURRENT) assert v is True # Verify new workflow is downloaded and installed with WorkflowMock() as c: assert update.install_update() is True assert c.cmd[0] == 'open' assert re.search(r'\.alfred(\d+)?workflow$', c.cmd[1]) assert wf.cached_data(key)['available'] is False # Test mangled update data status = wf.cached_data(key) assert status['available'] is False assert status['download'] is None assert status['version'] is None # Flip available bit, but leave rest invalid status['available'] = True wf.cache_data(key, status) with WorkflowMock(): assert update.install_update() is False
class TestEbooksAPI(unittest.TestCase): def setUp(self): self.wf = Workflow() self.wf.reset() itebooks.log = self.wf.logger itebooks.wf = self.wf def test_search_ebooks(self): ebooks_json = itebooks.do_search('java') # Validate basic response for the api self.assertIsNotNone(ebooks_json) self.assertIn('Error', ebooks_json) self.assertIn('Time', ebooks_json) self.assertIn('Total', ebooks_json) self.assertIn('Page', ebooks_json) self.assertIn('Books', ebooks_json) self.assertTrue(len(ebooks_json['Books']) > 0) # Validate ebook info from the api ebook = ebooks_json['Books'][0] self.assertIn('Description', ebook) self.assertIn('ID', ebook) self.assertIn('Image', ebook) self.assertIn('SubTitle', ebook) self.assertIn('Title', ebook) self.assertIn('isbn', ebook) def test_fetch_many_results(self): ebooks_generator = itebooks.search_ebooks('java', 30, self.wf) search_result = [] for ebooks in ebooks_generator: search_result.extend(ebooks) self.assertIsNotNone(search_result) self.assertEqual(len(search_result), 30, 'Wrong fetched result') def test_fetch_few_results(self): ebooks_generator = itebooks.search_ebooks('Elastic Search', 30, self.wf) search_result = [] for ebooks in ebooks_generator: search_result.extend(ebooks) self.assertIsNotNone(search_result) self.assertLess(len(search_result), 10, 'Wrong fetched results') self.assertGreater(len(search_result), 0, 'Wrong fetched results') def test_get_ebook_info(self): ebook_info = itebooks.get_ebook_info(1529159300) self.assertIsNotNone(ebook_info) self.assertIsInstance(ebook_info, dict, 'Not a dictionary') self.assertIn('Author', ebook_info) self.assertIn('Description', ebook_info) self.assertIn('Download', ebook_info) self.assertIn('Error', ebook_info) self.assertIn('ID', ebook_info) self.assertIn('ISBN', ebook_info) self.assertIn('Image', ebook_info) self.assertIn('Page', ebook_info) self.assertIn('SubTitle', ebook_info) self.assertIn('Time', ebook_info) self.assertIn('Title', ebook_info) self.assertIn('Year', ebook_info) self.assertEqual(ebook_info['Error'], u'0') def test_ebook_not_found(self): ebook_info = itebooks.get_ebook_info(-1) self.assertIsNotNone(ebook_info) self.assertEqual(len(self.wf._items), 1) self.assertEqual(self.wf._items[0].subtitle, 'Book not found!')
class TestEbook(unittest.TestCase): def setUp(self): self.wf = Workflow() self.wf.reset() itebooks.log = self.wf.logger def tearDown(self): self.wf.reset() def test_copy_download_link(self): with patch.object(sys, 'argv', 'program --copy-download-link 1529159300'.split()): ret = itebooks.main(self.wf) self.assertEqual(ret, 0) def test_open_in_browser(self): with patch.object(sys, 'argv', 'program --open-ebook-browser 1529159300'.split()): ret = itebooks.main(self.wf) self.assertEqual(ret, 0) self.assertEqual(len(self.wf._items), 0) def test_search_many_words(self): with patch.object(sys, 'argv', 'program Elastic Search'.split()): ret = itebooks.main(self.wf) self.assertEqual(ret, 0) self.assertGreater(len(self.wf._items), 0) def test_search_with_no_result(self): with patch.object(sys, 'argv', 'program Zupao in the mountains'.split()): ret = itebooks.main(self.wf) self.assertEqual(ret, 1) self.assertEqual(len(self.wf._items), 1) return_msg = self.wf._items[0].title self.assertTrue(return_msg.find('No books found for the query') != -1, 'Should return No books found for the query, returned {}'.format(return_msg)) def test_not_enough_query_character(self): with patch.object(sys, 'argv', 'program ja'.split()): ret = itebooks.main(self.wf) ret_items = self.wf._items ret_expected_msg = 'Please, type at least {} characters to start the search...'.format(itebooks.min_characters) self.assertTrue(ret, 1) self.assertTrue(len(ret_items), 1) self.assertTrue(ret_items[0].title == ret_expected_msg) def test_show_download_progress(self): downloading_books = {} books = [('1234', 'Effective Java', 'DOWNLOADING'), ('1235', 'Effective Akka', 'FINISHED'), ('1236', 'Effective POG', 'DOWNLOADING'), ('1237', 'Effective O Primo Basilio', 'DOWNLOADING'), ('1238', 'Os caras que estao falando', 'FINISHED')] for book in books: downloading_books[book[0]] = {'title': book[0], 'book_id': book[1], 'status': book[2]} self.wf.store_data(download.STORED_DOWNLOADING_BOOKS, downloading_books) with patch.object(sys, 'argv', 'program --status-download'.split()): ret = itebooks.main(self.wf) ret_items = self.wf._items self.assertEqual(ret, 0) self.assertEqual(len(ret_items), 5) def test_show_download_progress_no_results(self): with patch.object(sys, 'argv', 'program --status-download'.split()): ret = itebooks.main(self.wf) ret_items = self.wf._items self.assertEqual(ret, 1) self.assertEqual(len(ret_items), 1) self.assertEqual(ret_items[0].title, 'No downloads running at moment.')