def setUp(self):
     self.mocked_proc = mock.MagicMock()
     self.mocked_proc.communicate.return_value = ([], None)
     self.mocked_proc.wait.return_value = 0
     self.mocked_popen = mock.Mock()
     self.mocked_popen.return_value = self.mocked_proc
     self.patch = mock.patch("subprocess.Popen", self.mocked_popen)
     crawler_mock = mock.Mock()
     crawler_mock.settings = Settings(
         {
             "DOTSCRAPY_ENABLED": True,
             "ADDONS_S3_BUCKET": "test-bucket",
             "ADDONS_AWS_ACCESS_KEY_ID": "access-key",
             "ADDONS_AWS_SECRET_ACCESS_KEY": "secret-key",
             "ADDONS_AWS_USERNAME": "******",
         }
     )
     os.environ.update(
         {
             "SCRAPY_JOB": "123/45/67",
             "SCRAPY_PROJECT_ID": "123",
             "SCRAPY_SPIDER": "testspider",
             "HOME": "/home/user",
             "DOTSCRAPY_DIR": "/tmp/.scrapy",
         }
     )
     self.patch.start()
     self.instance = DotScrapyPersistence.from_crawler(crawler_mock)
 def test_from_crawler(self):
     crawler_mock = mock.Mock()
     crawler_mock.settings = Settings()
     self.assertRaises(NotConfigured, DotScrapyPersistence.from_crawler, crawler_mock)
     # add needed settings for from_crawler()
     crawler_mock.settings.set("DOTSCRAPY_ENABLED", True)
     crawler_mock.settings.set("ADDONS_S3_BUCKET", "s3-test-bucket")
     instance = DotScrapyPersistence.from_crawler(crawler_mock)
     assert isinstance(instance, DotScrapyPersistence)
 def test_from_crawler(self):
     crawler_mock = mock.Mock()
     crawler_mock.settings = Settings()
     self.assertRaises(NotConfigured, DotScrapyPersistence.from_crawler,
                       crawler_mock)
     # add needed settings for from_crawler()
     crawler_mock.settings.set('DOTSCRAPY_ENABLED', True)
     crawler_mock.settings.set('ADDONS_S3_BUCKET', 's3-test-bucket')
     instance = DotScrapyPersistence.from_crawler(crawler_mock)
     assert isinstance(instance, DotScrapyPersistence)
 def setUp(self):
     self.mocked_proc = mock.MagicMock()
     self.mocked_proc.communicate.return_value = ([], None)
     self.mocked_proc.wait.return_value = 0
     self.mocked_popen = mock.Mock()
     self.mocked_popen.return_value = self.mocked_proc
     self.patch = mock.patch('subprocess.Popen', self.mocked_popen)
     crawler_mock = mock.Mock()
     crawler_mock.settings = Settings({
         'DOTSCRAPY_ENABLED': True,
         'ADDONS_S3_BUCKET': 'test-bucket',
         'ADDONS_AWS_ACCESS_KEY_ID': 'access-key',
         'ADDONS_AWS_SECRET_ACCESS_KEY': 'secret-key',
         'ADDONS_AWS_USERNAME': '******',
     })
     os.environ.update({
         'SCRAPY_JOB': '123/45/67',
         'SCRAPY_PROJECT_ID': '123',
         'SCRAPY_SPIDER': 'testspider',
         'HOME': '/home/user',
         'DOTSCRAPY_DIR': '/tmp/.scrapy',
     })
     self.patch.start()
     self.instance = DotScrapyPersistence.from_crawler(crawler_mock)
 def setUp(self):
     self.mocked_proc = mock.MagicMock()
     self.mocked_proc.communicate.return_value = ([], None)
     self.mocked_proc.wait.return_value = 0
     self.mocked_popen = mock.Mock()
     self.mocked_popen.return_value = self.mocked_proc
     self.patch = mock.patch('subprocess.Popen', self.mocked_popen)
     crawler_mock = mock.Mock()
     crawler_mock.settings = Settings({
         'DOTSCRAPY_ENABLED': True,
         'ADDONS_S3_BUCKET': 'test-bucket',
         'ADDONS_AWS_ACCESS_KEY_ID': 'access-key',
         'ADDONS_AWS_SECRET_ACCESS_KEY': 'secret-key',
         'ADDONS_AWS_USERNAME': '******',
     })
     os.environ.update({
         'SCRAPY_JOB': '123/45/67',
         'SCRAPY_PROJECT_ID': '123',
         'SCRAPY_SPIDER': 'testspider',
         'HOME': '/home/user',
         'DOTSCRAPY_DIR': '/tmp/.scrapy',
     })
     self.patch.start()
     self.instance = DotScrapyPersistence.from_crawler(crawler_mock)