コード例 #1
0
    def setUp(self):
        # Clean all tables
        import os
        self.pill = StreamPill(debug=False)
        self.session = boto3.session.Session()
        boto3.DEFAULT_SESSION = self.session
        self.pill.attach(
            self.session,
            os.path.abspath(os.path.join(os.path.dirname(__file__), 'pill')))
        self.pill.playback()

        for table in reversed(self.db.metadata.sorted_tables):
            self.db.session.execute(table.delete())
        self.db.session.commit()
        self.db.session.expunge_all()

        # Loading fixtures
        from api.accounts.fixtures import UserData
        if UserData not in self.datasets:
            self.datasets = list(self.datasets)
            self.datasets.append(UserData)

        try:
            self.load_fixtures(*self.datasets)
        except Exception, exc:
            logging.warning('Problem with loading fixture %s: %s',
                            self.datasets, exc)
コード例 #2
0
 def setUp(self):
     super(AmazonEC2HelperTests, self).setUp()
     self.pill = StreamPill(debug=False)
     self.session = boto3.session.Session()
     boto3.DEFAULT_SESSION = self.session
     self.credentials = {
         'token': 'token',
         'secret': 'secret',
         'region': 'region'
     }
コード例 #3
0
class PigXMLPlanTest(unittest.TestCase):
    PIG_DS = 'cloudml.importhandler.datasources.PigDataSource'

    def setUp(self):
        super(PigXMLPlanTest, self).setUp()
        self.pill = StreamPill(debug=True)
        self.session = boto3.session.Session()
        boto3.DEFAULT_SESSION = self.session

    @patch('subprocess.Popen')
    @patch('time.sleep', return_value=None)
    def test_pig_datasource(self, sleep_mock, sqoop_mock):
        # Amazon mock
        self.pill.attach(
            self.session,
            os.path.abspath(
                os.path.join(os.path.dirname(__file__),
                             'placebo_responses/importhandler/pigxml')))
        self.pill.playback()

        self._plan = ExtractionPlan(
            os.path.join(BASEDIR, 'extractorxml',
                         'pig-train-import-handler.xml'))

        # Sqoop import subprocess mock
        process_mock = Mock()
        attrs = {'wait.return_value': 0, 'stdout.readlines.return_value': []}
        process_mock.configure_mock(**attrs)
        sqoop_mock.return_value = process_mock

        with patch('psycopg2.extras.DictCursor.execute'):
            with patch('psycopg2.connect'):
                self._extractor = ImportHandler(self._plan, PARAMS)

        pig_ds = self._extractor.plan.datasources['pig']
        # Checking iterator
        row = self._extractor.next()
        self.assertEquals(row['opening_id'], 57)
コード例 #4
0
class AmazonEMRHelperTests(TestCase):
    """ Tests of AmazonEMRHelper class. """

    PILL_RESPONSES_DIR = os.path.abspath(
        os.path.join(os.path.dirname(__file__), 'pill/emr/'))

    def setUp(self):
        super(AmazonEMRHelperTests, self).setUp()
        self.pill = StreamPill(debug=False)
        self.session = boto3.session.Session()
        boto3.DEFAULT_SESSION = self.session
        self.credentials = {
            'token': 'token',
            'secret': 'secret',
            'region': 'region'
        }

    def create_app(self):
        return app

    def test_terminate_jobflow(self):
        # Amazon mock
        self.pill.attach(self.session,
                         os.path.join(self.PILL_RESPONSES_DIR, 'terminate'))
        self.pill.playback()

        helper = AmazonEMRHelper(**self.credentials)
        self.assertFalse(helper.terminate_jobflow('job_flow_id'))

    def test_describe_jobflow(self):
        # Amazon mock
        self.pill.attach(self.session,
                         os.path.join(self.PILL_RESPONSES_DIR, 'describe'))
        self.pill.playback()

        helper = AmazonEMRHelper(**self.credentials)
        result = helper.describe_jobflow('job_flow_id')
        self.assertTrue(isinstance(result, dict))
        self.assertTrue('ExecutionStatusDetail' in result)
        self.assertTrue('State' in result['ExecutionStatusDetail'])
コード例 #5
0
ファイル: datasource_tests.py プロジェクト: pybender/cloudml
 def setUp(self):
     super(PigDataSourceTests, self).setUp()
     self.pill = StreamPill(debug=True)
     self.session = boto3.session.Session()
     boto3.DEFAULT_SESSION = self.session
コード例 #6
0
ファイル: datasource_tests.py プロジェクト: pybender/cloudml
class PigDataSourceTests(unittest.TestCase):
    PLACEBO_RESPONSES_DIR = os.path.abspath(
        os.path.join(os.path.dirname(__file__),
                     'placebo_responses/datasource/'))

    def setUp(self):
        super(PigDataSourceTests, self).setUp()
        self.pill = StreamPill(debug=True)
        self.session = boto3.session.Session()
        boto3.DEFAULT_SESSION = self.session

    @patch('time.sleep', return_value=None)
    def test_get_iter_existing_job(self, sleep_mock):
        # Amazon mock
        self.pill.attach(
            self.session,
            os.path.join(self.PLACEBO_RESPONSES_DIR, 'get_iter_existing_job'))
        self.pill.playback()
        pig_import = 'cloudml.importhandler.datasources.PigDataSource'

        ds = PigDataSource(DataSourcesTest.PIG)

        # test correct case with existing job
        # (DescribeJobFlows_1: 2 steps exist before adding a new one)
        # (DescribeJobFlows_2: RUNNING, RUNNING)
        # (DescribeJobFlows_3: WAITING, COMPLETED)
        ds.jobid = "1234"
        with patch("{}.get_result".format(pig_import), MagicMock()):
            with patch("{}._process_running_state".format(pig_import)) \
                    as run_handler:
                with patch("{}._process_waiting_state".format(pig_import)) \
                        as wait_handler:
                    ds._get_iter('query here', 'query target')
                    # step_number is 3
                    run_handler.assert_called_with(ANY, 'RUNNING', 3)
                    wait_handler.assert_called_with(ANY, 'COMPLETED', 3)

    @patch('time.sleep', return_value=None)
    def test_get_iter_create_job(self, sleep_mock):
        # Amazon mock
        self.pill.attach(
            self.session,
            os.path.join(self.PLACEBO_RESPONSES_DIR, 'get_iter_create_job'))
        self.pill.playback()
        pig_import = 'cloudml.importhandler.datasources.PigDataSource'

        ds = PigDataSource(DataSourcesTest.PIG)
        ds.jobid = None
        with patch("{}.get_result".format(pig_import), MagicMock()):
            with patch("{}._process_completed_state".format(pig_import)) as \
                    complete_handler:
                ds._get_iter('query here', 'query target')
                self.assertEqual("234", ds.jobid)
                # step_number is 1
                complete_handler.assert_called_with(ANY, 'COMPLETED', 1)

    @patch('time.sleep', return_value=None)
    def test_get_iter_check_statuses(self, sleep_mock):
        # Amazon mock
        self.pill.attach(
            self.session,
            os.path.join(self.PLACEBO_RESPONSES_DIR, 'get_iter_statuses'))
        self.pill.playback()
        pig_import = 'cloudml.importhandler.datasources.PigDataSource'

        ds = PigDataSource(DataSourcesTest.PIG)

        self.assertRaises(ProcessException, ds._get_iter, 'query here')

        _store_query_to_s3 = MagicMock(return_value="s3://bucket/script.jar")
        clear_output_folder = MagicMock()
        _run_steps_on_existing_jobflow = MagicMock(return_value=1)
        get_result = MagicMock()
        _get_log = MagicMock(return_value="Some log")

        ds.jobid = "234"
        with patch("{}._store_query_to_s3".format(pig_import),
                   _store_query_to_s3):
            with patch("{}.clear_output_folder".format(pig_import),
                       clear_output_folder):
                with patch("{}.get_result".format(pig_import), get_result):
                    with patch("{}._run_steps_on_existing_jobflow".format(
                            pig_import, _run_steps_on_existing_jobflow)):
                        with patch("{}._get_log".format(pig_import), _get_log):
                            # test failed case with new job
                            # (DescribeJobFlows_1: FAILED, FAILED)
                            self.assertRaises(ImportHandlerException,
                                              ds._get_iter, "query here",
                                              "query target")

                            # test failed case with new job
                            # (DescribeJobFlows_2: COMPLETED, FAILED)
                            self.assertRaises(ImportHandlerException,
                                              ds._get_iter, "query here",
                                              "query target")

                            # test failed case with new job
                            # (DescribeJobFlows_3: WAITING, FAILED)
                            self.assertRaises(ImportHandlerException,
                                              ds._get_iter, "query here",
                                              "query target")

                            # unexpected status check
                            # (DescribeJobFlows_4: COMPLETED, UNEXPECTED)
                            with patch("{}._process_completed_state".format(
                                    pig_import)) as complete_handler:
                                ds._get_iter('query here', 'query target')
                                complete_handler.assert_called_with(
                                    ANY, 'UNEXPECTED', 1)

                            # unexpected and completed status check
                            # (DescribeJobFlows_5: UNEXPECTED, UNEXPECTED)
                            # (DescribeJobFlows_6: WAITING, PENDING)
                            # (DescribeJobFlows_7: COMPLETED, COMPLETED)
                            with patch("{}._process_waiting_state".format(
                                    pig_import)) as waiting_handler:
                                with patch("{}._process_completed_state".
                                           format(pig_import)) as \
                                        complete_handler:
                                    ds._get_iter('query here', 'query target')
                                    waiting_handler.assert_called_with(
                                        ANY, 'PENDING', 1)
                                    complete_handler.assert_called_with(
                                        ANY, 'COMPLETED', 1)

                            # running and completed status check
                            # (DescribeJobFlows_8: RUNNING, RUNNING)
                            # (DescribeJobFlows_9: WAITING, COMPLETED)
                            with patch("{}._process_running_state".format(
                                    pig_import)) as run_handler:
                                with patch("{}._process_waiting_state".format(
                                        pig_import)) as wait_handler:
                                    ds._get_iter('query here', 'query target')
                                    run_handler.assert_called_with(
                                        ANY, 'RUNNING', 1)
                                    wait_handler.assert_called_with(
                                        ANY, 'COMPLETED', 1)

                            # DescribeJobFlows_10 - corrupted response
                            # (no ExecutionStatusDetail)
                            self.assertRaises(ImportHandlerException,
                                              ds._get_iter, "query here",
                                              "query target")

                            # DescribeJobFlows_11 - corrupted response
                            # (no State)
                            self.assertRaises(ImportHandlerException,
                                              ds._get_iter, "query here",
                                              "query target")

    def test_generate_download_url(self):
        # Amazon mock
        self.pill.attach(
            self.session,
            os.path.join(self.PLACEBO_RESPONSES_DIR, 'download_url'))
        self.pill.playback()

        ds = PigDataSource(DataSourcesTest.PIG)
        url = ds.generate_download_url(step=0, log_type='stdout')
        self.assertTrue(url)

    def test_get_pig_step(self):
        # Amazon mock
        self.pill.attach(
            self.session,
            os.path.join(self.PLACEBO_RESPONSES_DIR, 'get_pig_step'))
        self.pill.playback()

        ds = PigDataSource(DataSourcesTest.PIG)
        pig_step = ds.get_pig_step('query')
        self.assertTrue(pig_step)

    def test_get_result_job(self):
        response = open(
            os.path.join(
                self.PLACEBO_RESPONSES_DIR,
                'get_iter_existing_job/elasticmapreduce.DescribeJobFlows_1.json'
            ), 'r').read()
        res = json.loads(response)

        ds = PigDataSource(DataSourcesTest.PIG)

        # job has been found
        job = ds._get_result_job(res['data'], "1234")
        self.assertTrue(job)
        self.assertEqual("1234", job["JobFlowId"])

        # no job with this id
        self.assertRaises(ImportHandlerException, ds._get_result_job,
                          res['data'], "1235")

        # error response
        self.assertRaises(ImportHandlerException, ds._get_result_job,
                          {"Error": "error"}, "1234")
コード例 #7
0
class AmazonEC2HelperTests(TestCase):
    """ Tests of AmazonEC2Helper class. """

    PILL_RESPONSES_DIR = os.path.abspath(
        os.path.join(os.path.dirname(__file__), 'pill/ec2/'))

    def setUp(self):
        super(AmazonEC2HelperTests, self).setUp()
        self.pill = StreamPill(debug=False)
        self.session = boto3.session.Session()
        boto3.DEFAULT_SESSION = self.session
        self.credentials = {
            'token': 'token',
            'secret': 'secret',
            'region': 'region'
        }

    def create_app(self):
        return app

    def test_terminate_instance(self):
        # Amazon mock
        self.pill.attach(self.session,
                         os.path.join(self.PILL_RESPONSES_DIR, 'terminate'))
        self.pill.playback()

        helper = AmazonEC2Helper(**self.credentials)
        self.assertTrue(helper.terminate_instance('instance_id'))

    def test_request_spot_instance(self):
        # Amazon mock
        self.pill.attach(self.session,
                         os.path.join(self.PILL_RESPONSES_DIR, 'request_spot'))
        self.pill.playback()

        helper = AmazonEC2Helper(**self.credentials)
        result = helper.request_spot_instance()
        self.assertTrue(result)
        self.assertTrue('SpotInstanceRequestId' in result)

    def test_get_instance(self):
        import logging
        # Amazon mock
        self.pill.attach(self.session,
                         os.path.join(self.PILL_RESPONSES_DIR, 'get_instance'))
        self.pill.playback()

        helper = AmazonEC2Helper(**self.credentials)
        self.assertTrue(helper.get_instance('instance_id'))

    def test_get_request_spot_instance(self):
        # Amazon mock
        self.pill.attach(self.session,
                         os.path.join(self.PILL_RESPONSES_DIR, 'get_request'))
        self.pill.playback()

        helper = AmazonEC2Helper(**self.credentials)
        result = helper.get_request_spot_instance('request_id')
        self.assertTrue(result)
        self.assertTrue('SpotInstanceRequestId' in result)

    def test_cancel_request_spot_instance(self):
        # Amazon mock
        self.pill.attach(self.session,
                         os.path.join(self.PILL_RESPONSES_DIR, 'cancel'))
        self.pill.playback()

        helper = AmazonEC2Helper(**self.credentials)
        result = helper.cancel_request_spot_instance('request_id')
        self.assertTrue(result)
        self.assertTrue('SpotInstanceRequestId' in result)
コード例 #8
0
class AmazonS3HelperTests(TestCase):
    """ Tests of AmazonS3Helper class. """

    PILL_RESPONSES_DIR = os.path.abspath(
        os.path.join(os.path.dirname(__file__), 'pill/s3/'))

    def setUp(self):
        super(AmazonS3HelperTests, self).setUp()
        self.pill = StreamPill(debug=False)
        self.session = boto3.session.Session()
        boto3.DEFAULT_SESSION = self.session
        self.credentials = {
            'token': 'token',
            'secret': 'secret',
            'bucket_name': 'bucket_name'
        }

    def test_get_dnowload_url(self):
        # Amazon mock
        self.pill.attach(self.session,
                         os.path.join(self.PILL_RESPONSES_DIR, 'download_url'))
        self.pill.playback()

        helper = AmazonS3Helper(**self.credentials)
        self.assertTrue(helper.get_download_url('test', 3600))
        self.assertRaises(ValueError, helper.get_download_url, 'test', 'time')

    def test_list_keys(self):
        # Amazon mock
        self.pill.attach(self.session,
                         os.path.join(self.PILL_RESPONSES_DIR, 'list_keys'))
        self.pill.playback()

        helper = AmazonS3Helper(**self.credentials)
        # ListObjects_1
        res = helper.list_keys('prefix')
        self.assertEqual(set(['a', 'b', 'c']), set([k['Key'] for k in res]))
        # ListObjects_2
        self.assertEqual([], helper.list_keys('another_prefix'))
        self.assertRaises(ParamValidationError, helper.list_keys, None)

    def test_load_key(self):
        # Amazon mock
        self.pill.attach(self.session,
                         os.path.join(self.PILL_RESPONSES_DIR, 'load_key'))
        self.pill.playback()

        helper = AmazonS3Helper(**self.credentials)
        # GetObject_1
        res = helper.load_key('name')
        self.assertTrue(isinstance(res, basestring))

        # GetObject_2
        res = helper.load_key('name', with_metadata=True)
        self.assertTrue(isinstance(res, dict))
        self.assertTrue(isinstance(res['Body'], StreamingBody))
        self.assertEqual(res['Metadata']['Name'], 'name')

        # GetObject_3-6
        self.assertRaises(AmazonS3ObjectNotFound, helper.load_key, 'any')

    def test_put_file(self):
        # Amazon mock
        self.pill.attach(self.session,
                         os.path.join(self.PILL_RESPONSES_DIR, 'put'))
        self.pill.playback()

        helper = AmazonS3Helper(**self.credentials)
        with patch("boto3.s3.transfer.S3Transfer._multipart_upload") as mu:
            app.config['MULTIPART_UPLOAD_CHUNK_SIZE'] = 128
            helper.save_gz_file(
                'name',
                os.path.join(self.PILL_RESPONSES_DIR, 'put/test_file.py'),
                {'model_id': 234})
            mu.assert_called_with(
                os.path.join(self.PILL_RESPONSES_DIR, 'put/test_file.py'),
                'bucket_name', 'name', ANY, ANY)

        # PutObject_1
        self.assertTrue(
            helper.save_key('name',
                            os.path.join(self.PILL_RESPONSES_DIR,
                                         'put/test_file.py'),
                            {'model_id': 234},
                            compressed=False))
        # PutObject_2
        self.assertTrue(
            helper.save_key(
                'name',
                os.path.join(self.PILL_RESPONSES_DIR, 'put/test_file.py'),
                {'model_id': 234}))
        # PutObject_3
        self.assertTrue(
            helper.save_key_string('name', 'data', {'model_id': 234}))

    def test_set_key_metadata(self):
        # Amazon mock
        self.pill.attach(self.session,
                         os.path.join(self.PILL_RESPONSES_DIR, 'metadata'))
        self.pill.playback()

        helper = AmazonS3Helper(**self.credentials)
        # Key not found (HeadObject_1)
        self.assertRaises(AmazonS3ObjectNotFound, helper.set_key_metadata,
                          'name', {})
        # Key exists, empty metadata
        self.assertTrue(
            helper.set_key_metadata('name',
                                    meta={
                                        'Name': 'new_name',
                                        'Other': 'value',
                                        'Third': '3value'
                                    },
                                    store_previous=True))

    def test_delete_key(self):
        # Amazon mock
        self.pill.attach(self.session,
                         os.path.join(self.PILL_RESPONSES_DIR, 'delete_key'))
        self.pill.playback()

        helper = AmazonS3Helper(**self.credentials)
        self.assertTrue(helper.delete_key('name'))

    def test_key_exists(self):
        # Amazon mock
        self.pill.attach(self.session,
                         os.path.join(self.PILL_RESPONSES_DIR, 'key_exists'))
        self.pill.playback()

        helper = AmazonS3Helper(**self.credentials)
        # HeadObject_1
        self.assertFalse(helper.key_exists('name'))
        # HeadObject_2
        self.assertTrue(helper.key_exists('name'))

    def test_check_or_create_bucket(self):
        # Amazon mock
        self.pill.attach(self.session,
                         os.path.join(self.PILL_RESPONSES_DIR, 'bucket'))
        self.pill.playback()

        helper = AmazonS3Helper(**self.credentials)
        # HeadBucket_1
        self.assertRaises(S3ResponseError, helper._check_or_create_bucket)

        # HeadBucket_2
        self.assertTrue(helper._check_or_create_bucket())

        # HeadBucket_3
        self.assertTrue(helper._check_or_create_bucket())

    def create_app(self):
        return app
コード例 #9
0
 def setUp(self):
     super(AmazonDynamoDBHelperTests, self).setUp()
     self.pill = StreamPill(debug=False)
     self.session = boto3.session.Session()
     boto3.DEFAULT_SESSION = self.session
コード例 #10
0
class AmazonDynamoDBHelperTests(TestCase):
    """
    You need local dynamodb to be running for these tests
    see api/logs/dynamodb/dynamodb_local.sh to be
    """
    TEST_TABLE_NAME = 'test_table'
    PILL_RESPONSES_DIR = os.path.abspath(
        os.path.join(os.path.dirname(__file__), 'pill/dynamodb/'))

    def setUp(self):
        super(AmazonDynamoDBHelperTests, self).setUp()
        self.pill = StreamPill(debug=False)
        self.session = boto3.session.Session()
        boto3.DEFAULT_SESSION = self.session

    def test_create_table(self):
        # Amazon mock
        self.pill.attach(self.session,
                         os.path.join(self.PILL_RESPONSES_DIR, 'create_table'))
        self.pill.playback()
        SCHEMA = [{
            'AttributeName': 'object_id',
            'KeyType': 'HASH'
        }, {
            'AttributeName': 'id',
            'KeyType': 'RANGE'
        }]

        SCHEMA_TYPES = [{
            'AttributeName': 'object_id',
            'AttributeType': 'N'
        }, {
            'AttributeName': 'id',
            'AttributeType': 'S'
        }]

        helper = AmazonDynamoDBHelper()
        self.assertFalse(self.TEST_TABLE_NAME in helper._tables)
        helper.create_table(self.TEST_TABLE_NAME, SCHEMA, SCHEMA_TYPES)
        self.assertTrue(self.TEST_TABLE_NAME in helper._tables)
        self.assertTrue(helper._get_table(self.TEST_TABLE_NAME))

    def test_put_get_delete(self):
        # Amazon mock
        self.pill.attach(self.session,
                         os.path.join(self.PILL_RESPONSES_DIR, 'put_get_del'))
        self.pill.playback()
        helper = AmazonDynamoDBHelper()
        # put
        self.assertTrue(
            helper.put_item(AmazonDynamoDBHelperTests.TEST_TABLE_NAME, {
                'object_id': 1,
                'id': 'one',
                'data': 'd1'
            }))

        # should work without exceptions
        helper.batch_write(AmazonDynamoDBHelperTests.TEST_TABLE_NAME,
                           [{
                               'object_id': 2,
                               'id': 'two',
                               'data': 'd1'
                           }, {
                               'object_id': 3,
                               'id': 'three',
                               'data': 'd3'
                           }])

        # get
        item = helper.get_item(AmazonDynamoDBHelperTests.TEST_TABLE_NAME,
                               object_id=1)
        self.assertEqual(item['object_id'], 1)

        items = helper.get_items(AmazonDynamoDBHelperTests.TEST_TABLE_NAME,
                                 KeyConditionExpression=Key('object_id').eq(1)
                                 & Key('id').eq('one'))
        self.assertTrue(isinstance(items, list))
        self.assertEqual(items[0]['object_id'], 1)

        # delete
        self.assertTrue(
            helper.delete_item(AmazonDynamoDBHelperTests.TEST_TABLE_NAME,
                               object_id=1))
        # should work without exceptions
        helper.delete_items(AmazonDynamoDBHelperTests.TEST_TABLE_NAME,
                            ['object_id', 'id'],
                            KeyConditionExpression=Key('object_id').eq(2))

    def create_app(self):
        return app
コード例 #11
0
 def setUp(self):
     super(PigXMLPlanTest, self).setUp()
     self.pill = StreamPill(debug=True)
     self.session = boto3.session.Session()
     boto3.DEFAULT_SESSION = self.session
コード例 #12
0
class BaseDbTestCase(TestCase):
    """
    Base class for TestCases that uses database.
    """
    datasets = []

    @property
    def db(self):
        return self.app.sql_db

    @classmethod
    def setUpClass(cls):
        app.config.from_object('api.test_config')
        app.config['MODIFY_DEPLOYED_MODEL'] = False
        app.config['MODIFY_DEPLOYED_IH'] = False
        app.config['CLOUDML_PREDICT_BUCKET_NAME'] = 'test-predict-bucket'

        # if Model is not defined, try get it from resource.
        if hasattr(cls, 'RESOURCE') and not hasattr(cls, 'Model'):
            cls.Model = cls.RESOURCE.Model

        cls.engine = create_engine(app.config['SQLALCHEMY_DATABASE_URI'])
        # TODO: do we need this or need to create test db manually?
        try:
            conn = cls.engine.connect()
            conn.close()
        except Exception:  # TODO: catch OperationalError
            logging.info(
                "Can't connect to the test database. Try to create a new one.")
            cls.exec_db_level_sql("create database %s" % app.config['DB_NAME'])

        app.sql_db.session.expunge_all()
        app.sql_db.drop_all()
        app.sql_db.metadata.create_all(cls.engine)
        app.sql_db.create_all()

    @classmethod
    def tearDownClass(cls):
        app.sql_db.session.expunge_all()
        app.sql_db.session.remove()
        app.sql_db.drop_all()
        # cls.dynamodb_mock.stop()
        # cls.s3_mock.stop()

    def setUp(self):
        # Clean all tables
        import os
        self.pill = StreamPill(debug=False)
        self.session = boto3.session.Session()
        boto3.DEFAULT_SESSION = self.session
        self.pill.attach(
            self.session,
            os.path.abspath(os.path.join(os.path.dirname(__file__), 'pill')))
        self.pill.playback()

        for table in reversed(self.db.metadata.sorted_tables):
            self.db.session.execute(table.delete())
        self.db.session.commit()
        self.db.session.expunge_all()

        # Loading fixtures
        from api.accounts.fixtures import UserData
        if UserData not in self.datasets:
            self.datasets = list(self.datasets)
            self.datasets.append(UserData)

        try:
            self.load_fixtures(*self.datasets)
        except Exception, exc:
            logging.warning('Problem with loading fixture %s: %s',
                            self.datasets, exc)

        from api import celery
        celery.conf['CELERY_ALWAYS_EAGER'] = True
        celery.conf['CELERY_EAGER_PROPAGATES_EXCEPTIONS'] = False
コード例 #13
0
ファイル: script_tests.py プロジェクト: pybender/cloudml
class ScriptTest(unittest.TestCase):
    BASE_DIR = os.path.abspath(
        os.path.join(os.path.dirname(__file__), '../../../testdata'))
    PLACEBO_RESPONSES_DIR = os.path.abspath(
        os.path.join(os.path.dirname(__file__), 'placebo_responses/script/'))
    EMPTY_SRC = objectify.fromstring("""<script src="" />""")
    EMPTY_ALL = objectify.fromstring("""<script />""")
    TEXT = objectify.fromstring("""<script><![CDATA[1+1]]></script>""")
    LOCAL_SCRIPT_CORRECT = objectify.fromstring(
        """<script src="%s" />""" % os.path.join(BASE_DIR, "local_script.py"))
    LOCAL_SCRIPT_INCORRECT = objectify.fromstring(
        """<script src="%s" />""" % os.path.join(BASE_DIR, "local_script1.py"))
    PRIORITY_SCRIPT = objectify.fromstring(
        """<script src="%s"><![CDATA[2+2]]></script>""" %
        os.path.join(BASE_DIR, "local_script.py"))
    AMAZON_CORRECT = objectify.fromstring(
        """<script src="amazon_script.py" />""")
    AMAZON_INCORRECT = objectify.fromstring(
        """<script src="amazon_script1.py" />""")

    def setUp(self):
        super(ScriptTest, self).setUp()
        self.pill = StreamPill(debug=True)
        self.session = boto3.session.Session()
        boto3.DEFAULT_SESSION = self.session

    def test_empty_values(self):
        script = Script(self.EMPTY_SRC)
        self.assertEqual('', script.get_script_str())
        self.assertEqual('', script.src)
        self.assertEqual(None, script.text)

        script = Script(self.EMPTY_ALL)
        self.assertEqual('', script.get_script_str())
        self.assertEqual(None, script.src)
        self.assertEqual(None, script.text)

    def test_text_exists(self):
        script = Script(self.TEXT)
        self.assertEqual('1+1', script.get_script_str())
        self.assertEqual(None, script.src)
        self.assertEqual('1+1', script.text)

    def test_local_file(self):
        # Amazon mock
        self.pill.attach(self.session,
                         os.path.join(self.PLACEBO_RESPONSES_DIR, 'incorrect'))
        self.pill.playback()

        script = Script(self.LOCAL_SCRIPT_INCORRECT)
        self.assertRaises(LocalScriptNotFoundException,
                          script._process_local_file)
        self.assertRaises(ImportHandlerException, script.get_script_str)
        script = Script(self.LOCAL_SCRIPT_CORRECT)
        self.assertEqual('def always99(a):\n    return 99',
                         script.get_script_str())
        self.assertEqual(os.path.join(self.BASE_DIR, "local_script.py"),
                         script.src)
        self.assertEqual(None, script.text)

        script = Script(self.PRIORITY_SCRIPT)
        self.assertEqual('def always99(a):\n    return 99',
                         script.get_script_str())
        self.assertEqual(os.path.join(self.BASE_DIR, "local_script.py"),
                         script.src)
        self.assertEqual('2+2', script.text)

    def test_incorrect_amazon_file(self):
        # Amazon mock
        self.pill.attach(self.session,
                         os.path.join(self.PLACEBO_RESPONSES_DIR, 'incorrect'))
        self.pill.playback()

        script = Script(self.AMAZON_INCORRECT)
        self.assertRaises(ImportHandlerException, script.get_script_str)

    def test_correct_amazon_file(self):
        # Amazon mock
        self.pill.attach(self.session,
                         os.path.join(self.PLACEBO_RESPONSES_DIR, 'correct'))
        self.pill.playback()
        script = Script(self.AMAZON_CORRECT)
        self.assertEqual(None, script.text)
        self.assertEqual("amazon_script.py", script.src)
        self.assertEqual("3+5", script.get_script_str())