def test_maildir(self): "Sends mail to maildir" with TemporaryMaildir() as maildir: maildir_cfg = """\ [DEFAULT] to = [email protected] email-protocol = maildir maildir-path = {maildir_path} maildir-mailbox = {maildir_mailbox} """.format(maildir_path=maildir.path, maildir_mailbox=maildir.inbox_name) with ExecContext(maildir_cfg) as ctx: self.httpd_queue.put("next") ctx.call( "add", 'test', 'http://127.0.0.1:{port}/gmane/feed.rss'.format( port=self.httpd_port)) ctx.call("run") # quick check to make sure right number of messages sent # and subjects are right msgs = maildir.inbox.values() # type: List[mailbox.MaildirMessage] self.assertEqual(len(msgs), 5) self.assertEqual( len([ msg for msg in msgs if msg["subject"] == "split massive package into modules" ]), 1) self.assertEqual( len([ msg for msg in msgs if msg["subject"] == "Re: new maintainer and mailing list for rss2email" ]), 4)
def test_user_agent_sub_fixed(self): "Badly substituted user agent from v3.11 is corrected" # If someone added feeds with version 3.11, they would've had badly # substituted user agent strings written to their configs. We want to # fix them and write in unsubstituted values. Note: we only fix the # config if the user had the default 3.11 user agent, we can't know # what they really meant if they have a non-default one. bad_sub_cfg = """[DEFAULT] to = [email protected] [feed.test] url = https://example.com/feed.xml user-agent = rss2email/3.11 (https://github.com/rss2email/rss2email) """ with ExecContext(bad_sub_cfg) as ctx: # Modify the config to trigger a rewrite ctx.call("add", "other", "https://example.com/other.xml") with ctx.cfg_path.open("r") as f: lines = f.readlines() feed_cfg_start = lines.index("[feed.test]\n") # The bad user-agent should have been removed from the old feed # config and not added to the feed we just added. for line in lines[feed_cfg_start:]: self.assertFalse("user-agent" in line)
def test_delay(self): "Waits before fetching repeatedly from the same server" wait_time = 0.3 delay_cfg = """[DEFAULT] to = [email protected] same-server-fetch-interval = {} """.format(wait_time) num_requests = 3 queue = multiprocessing.Queue() webserver_proc = multiprocessing.Process( target=webserver_for_test_fetch, args=(queue, num_requests, wait_time)) webserver_proc.start() port = queue.get() with ExecContext(delay_cfg) as ctx: for i in range(num_requests): ctx.call( "add", 'test{i}'.format(i=i), 'http://127.0.0.1:{port}/disqus/feed.rss'.format( port=port)) ctx.call("run", "--no-send") result = queue.get() if result == "too fast": raise Exception("r2e did not delay long enough!")
def test_opml_export(self): with ExecContext(self.cfg) as ctx: ctx.call("add", self.feed_name, self.feed_url) ctx.call("opmlexport", str(ctx.opml_path)) self.assertTrue(ctx.opml_path.is_file()) read_content = ctx.opml_path.read_bytes() self.assertEqual(self.opml_content, read_content)
def test_verbose_setting_info(self): "Verbose setting set to info in configuration should be respected" cfg = """[DEFAULT] verbose = info """ with ExecContext(cfg) as ctx: p = ctx.call("run", "--no-send") self.assertNotIn('[DEBUG]', p.stderr)
def test_opml_import(self): with ExecContext(self.cfg) as ctx: ctx.opml_path.write_bytes(self.opml_content) ctx.call("opmlimport", str(ctx.opml_path)) with ctx.data_path.open('r') as f: content = json.load(f) self.assertEqual(content["feeds"][0]["name"], self.feed_name)
def test_opml_export_without_arg(self): with ExecContext(self.cfg) as ctx: # This is just a smoke test for now, it'd be better to check # stdout but this is enough to check for non-regression res = ctx.call("opmlexport") self.assertEqual(res.returncode, 0) ctx.call("add", self.feed_name, self.feed_url) res = ctx.call("opmlexport") self.assertEqual(res.returncode, 0)
def test_fetch_parallel(self): if not UNIX: self.skipTest("No locking on Windows.") "Reads/writes to data file are sequenced correctly for multiple instances" num_processes = 5 process_cfg = """[DEFAULT] to = [email protected] """ # All r2e instances will output here input_fd, output_fd = _os.pipe() with ExecContext(process_cfg) as ctx: # We don't need to add any feeds - we are testing that the copy # and replace dance on the data file is sequenced correctly. r2e # always does the copy/replace, it must be sequenced correctly or # some processes will exit with a failure since their temp data # file was moved out from under them. Proper locking prevents that. command = [ sys.executable, r2e_path, "-VVVVV", "-c", str(ctx.cfg_path), "-d", str(ctx.data_path), "run", "--no-send" ] processes = [ subprocess.Popen(command, stdout=output_fd, stderr=output_fd, close_fds=True) for _ in range(num_processes) ] _os.close(output_fd) # Bad locking will cause the victim process to exit with failure. all_success = True for p in processes: p.wait() all_success = all_success and (p.returncode == 0) self.assertTrue(all_success) # We check that each time the lock was acquired, the previous process # had finished writing to the data file. i.e. no process ever reads # the data file while another has it open. previous_line = None finish_precedes_acquire = True with _io.open(input_fd, 'r', buffering=1) as file: for line in file: if "acquired lock" in line and previous_line is not None: finish_precedes_acquire = finish_precedes_acquire and \ "save feed data" in previous_line previous_line = line self.assertTrue(finish_precedes_acquire)
def test_no_send(self): config = self._config({"reply-changes": True}) with ExecContext(config) as ctx: shutil.copyfile("data/nodejs/feed1.xml", str(self.feed_path)) ctx.call("run") shutil.copyfile("data/nodejs/feed2.xml", str(self.feed_path)) ctx.call("run", "--no-send") shutil.copyfile("data/nodejs/feed3.xml", str(self.feed_path)) ctx.call("run") messages = self.maildir.inbox_messages( ) # type: List[mailbox.MaildirMessage] self.assertEqual(2, len(messages)) self.assertIsNone(messages[0]['In-Reply-To']) self.assertEqual(messages[0]['Message-ID'], messages[1]['In-Reply-To'])
def test_user_agent_substitutions(self): "User agent with substitutions done is not written to config" # Previously, if e.g. "r2e __VERSION__" was in the top level # user-agent config var, the substituted version (e.g. "r2e 3.11") # was written to the per-feed configs due to the fact the # substitution happened when we loaded the config. We want the # un-substituted versions written. sub_cfg = """[DEFAULT] to = [email protected] user-agent = rss2email __VERSION__ """ with ExecContext(sub_cfg) as ctx: ctx.call("add", "test", "https://example.com/feed.xml") # The old bug was that in the feed-specific config, we would # see "user-agent = rss2email 3.11" when in fact user-agent # shouldn't appear at all. with ctx.cfg_path.open("r") as f: lines = f.readlines() feed_cfg_start = lines.index("[feed.test]\n") for line in lines[feed_cfg_start:]: self.assertFalse("user-agent" in line)
def _test_sendmail(self, exitcode, shouldlog, verbose='error'): with TemporarySendmail(exitcode) as sendmail: cfg = """\ [DEFAULT] to = [email protected] sendmail = {sendmail} sendmail_config = {sendmail_config} verbose = {verbose} """.format(sendmail=sendmail.bin, sendmail_config=sendmail.config, verbose=verbose) with ExecContext(cfg) as ctx: self.httpd_queue.put("next") ctx.call( "add", 'test', 'http://127.0.0.1:{port}/gmane/feed.rss'.format( port=self.httpd_port)) p = ctx.call("run") assertion = self.assertIn if shouldlog else self.assertNotIn assertion("Sendmail failing for reasons...", p.stderr)
def test_only_new(self): "Add and fetch contents" standard_cfg = """[DEFAULT] to = [email protected]""" queue = multiprocessing.Queue() webserver_proc = multiprocessing.Process( target=webserver_for_test_if_fetch, args=(queue, 10)) webserver_proc.start() port = queue.get() with ExecContext(standard_cfg) as ctx: ctx.call( "add", '--only-new', 'test', 'http://127.0.0.1:{port}/disqus/feed.rss'.format(port=port)) # check if data is written self.assertTrue(_os.path.exists(ctx.data_path)) with ctx.data_path.open('r') as f: content = json.load(f) # check if entries in seen self.assertIn("seen", content["feeds"][0]) self.assertEqual(queue.get(), "done")
def _call(self, config: str): with ExecContext(config) as ctx: shutil.copyfile("data/nodejs/feed1.xml", str(self.feed_path)) ctx.call("run") shutil.copyfile("data/nodejs/feed2.xml", str(self.feed_path)) ctx.call("run")