def test_result_is_cached(self):
        view = self.get_view()
        self.assertNotIn('purple', view.generate_css(),
                         'Unexpectedly found "purple" in the CSS')

        # Setting a custom style automatically invalidates the cache.
        # For testing that things are cached, we stub the cache invalidation,
        # so that the cache persists.
        mocker = Mocker()
        invalidate_cache_mock = mocker.replace(invalidate_cache)
        expect(invalidate_cache_mock()).count(1, None)
        mocker.replay()

        ICustomStyles(self.layer['portal']).set('css.body-background',
                                                'purple')
        self.assertNotIn('purple', view.generate_css(),
                         'The result was not cached.')

        # Removing the stub and invalidating the cache should update the result.
        mocker.restore()
        mocker.verify()
        invalidate_cache()
        self.assertIn(
            'purple', view.generate_css(),
            'Expected "purple" in CSS - does the style'
            ' css.body-background no longer work?')
    def test_read_tags(self):
        mocker = Mocker()
        csv_reader = mocker.mock()

        headers = [
            'title', 'isbn', 'publisher', 'list_price', 'publish_date',
            'class_', 'sheet_numbers', 'folio', 'print_type', 'author',
            'barcode', 'comments', 'audience', 'awards']
        first_line = [
            'a', '1234', 'aph', '30', '2012', 'I.', 18, '4', 'mono', 'sb', 'a',
            'blahblahblah', 'ms', 'annual']
        field_map = {'title': 0, 'isbn': 1, 'publisher': 2, 'list_price': 3,
            'publish_date': 4, 'class_': 5, 'sheet_numbers': 6, 'folio': 7, 
            'print_type': 8, 'author': 9, 'barcode': 10, 'comments': 11 }

        csv_reader.next() # read headers
        mocker.result(headers)
        
        csv_reader.next() # read first line
        mocker.result(first_line)

        mocker.replay()

        reader = CatalogReader(csv_reader, field_map)
        item = reader.read()
        
        self.assertIsInstance(item, CatalogItem)
        self.assertEquals(2, len(item.tags))
        
        self.assertEquals('ms', item.tags['audience'])
        self.assertEquals('annual', item.tags['awards'])

        mocker.restore()
        mocker.verify()
    def test_find_match_matched(self):
        key = "chr1:154000-230000"
        mocker = Mocker()

        junction = mocker.mock()
        junction.coverage
        mocker.result(40)
        mocker.count(1, None)

        container = mocker.mock()
        container.keys()
        mocker.result([key])

        container[key]
        mocker.result(junction)
        mocker.count(1, None)

        mocker.replay()

        self.common, self.diff = jc.findMatch(container, container)
        self.assertEqual(self.common.keys(), ["chr1:154000-230000"])
        self.assertEqual(self.diff.keys(), [])

        mocker.restore()
        mocker.verify()
Exemple #4
0
class BundleDeserializationTests(TestCaseWithScenarios):

    scenarios = [
        ('dummy_import_failure', {
            'pathname': '/public/personal/admin/',
            'is_public': 'true',
            'content': 'bogus',
            'content_filename': 'test1.json',
        }),
    ]

    def setUp(self):
        super(BundleDeserializationTests, self).setUp()
        self.bundle = fixtures.create_bundle(
            self.pathname, self.content, self.content_filename)
        self.mocker = Mocker()

    def tearDown(self):
        self.bundle.delete_files()
        self.mocker.restore()
        self.mocker.verify()
        super(BundleDeserializationTests, self).tearDown()

    def test_deserialize_failure_leaves_trace(self):
        mock = self.mocker.patch(self.bundle)
        expect(mock._do_deserialize(False)).throw(Exception("boom"))
        self.mocker.replay()
        self.bundle.deserialize(False)
        self.assertFalse(self.bundle.is_deserialized)
        self.assertEqual(self.bundle.deserialization_error.error_message, "boom")

    def test_deserialize_ignores_deserialized_bundles(self):
        # just reply as we're not using mocker in this test case
        self.mocker.replay()
        self.bundle.is_deserialized = True
        self.bundle.deserialize(False)
        self.assertTrue(self.bundle.is_deserialized)

    def test_deserialize_sets_is_serialized_on_success(self):
        mock = self.mocker.patch(self.bundle)
        expect(mock._do_deserialize(False))
        self.mocker.replay()
        self.bundle.deserialize(False)
        self.assertTrue(self.bundle.is_deserialized)

    def test_deserialize_clears_old_error_on_success(self):
        BundleDeserializationError.objects.create(
            bundle=self.bundle,
            error_message="not important").save()
        mock = self.mocker.patch(self.bundle)
        expect(mock._do_deserialize(False))
        self.mocker.replay()
        self.bundle.deserialize(False)
        # note we cannot check for self.bundle.deserialization_error
        # directly due to the way django handles operations that affect
        # existing instances (it does not touch them like storm would
        # IIRC).
        self.assertRaises(
            BundleDeserializationError.DoesNotExist,
            BundleDeserializationError.objects.get, bundle=self.bundle)
Exemple #5
0
    def test_without_when(self):
        mocker = Mocker()
        mock_time = mocker.replace('time.time')
        mock_time()
        mocker.result(1.0)
        mock_time()
        mocker.result(2.0)
        mock_time()
        mocker.result(3.0)
        mock_time()
        mocker.result(4.0)
        mock_time()
        mocker.result(5.0)

        mocker.replay()

        controller = pid.PID(P = 0.5, I = 0.5, D = 0.5,
                             setpoint = 0, initial = 12)

        self.assertEqual(controller.calculate_response(6), -3)
        self.assertEqual(controller.calculate_response(3), -4.5)
        self.assertEqual(controller.calculate_response(-1.5), -0.75)
        self.assertEqual(controller.calculate_response(-2.25), -1.125)

        mocker.restore()
        mocker.verify()
    def test_read_basic_fields(self):
        mocker = Mocker()
        csv_reader = mocker.mock()

        csv_reader.next()
        header_line = [
            'title', 'isbn', 'publisher', 'list_price', 'publish_date',
            'class_', 'sheet_numbers', 'folio', 'print_type', 'author',
            'barcode', 'comments']
        mocker.result(header_line)

        csv_reader.next()
        first_line = [
            'a', '1234', 'aph', '30', '2012', 'I.', 18, '4', 'mono', 'sb', 'a',
            'blahblahblah' ]
        mocker.result(first_line)

        mocker.replay()

        field_map = {'title': 0, 'isbn': 1, 'publisher': 2, 'list_price': 3,
            'publish_date': 4, 'class_': 5, 'sheet_numbers': 6, 'folio': 7, 
            'print_type': 8, 'author': 9, 'barcode': 10, 'comments': 11 }

        reader = CatalogReader(csv_reader, field_map)
        item = reader.read()
        
        self.assertIsInstance(item, CatalogItem)

        mocker.restore()
        mocker.verify()
class BundleDeserializationTests(TestCaseWithScenarios):

    scenarios = [
        ('dummy_import_failure', {
            'pathname': '/public/personal/admin/',
            'is_public': 'true',
            'content': 'bogus',
            'content_filename': 'test1.json',
        }),
    ]

    def setUp(self):
        super(BundleDeserializationTests, self).setUp()
        self.bundle = fixtures.create_bundle(self.pathname, self.content,
                                             self.content_filename)
        self.mocker = Mocker()

    def tearDown(self):
        self.bundle.delete_files()
        self.mocker.restore()
        self.mocker.verify()
        super(BundleDeserializationTests, self).tearDown()

    def test_deserialize_failure_leaves_trace(self):
        mock = self.mocker.patch(self.bundle)
        expect(mock._do_deserialize(False)).throw(Exception("boom"))
        self.mocker.replay()
        self.bundle.deserialize(False)
        self.assertFalse(self.bundle.is_deserialized)
        self.assertEqual(self.bundle.deserialization_error.error_message,
                         "boom")

    def test_deserialize_ignores_deserialized_bundles(self):
        # just reply as we're not using mocker in this test case
        self.mocker.replay()
        self.bundle.is_deserialized = True
        self.bundle.deserialize(False)
        self.assertTrue(self.bundle.is_deserialized)

    def test_deserialize_sets_is_serialized_on_success(self):
        mock = self.mocker.patch(self.bundle)
        expect(mock._do_deserialize(False))
        self.mocker.replay()
        self.bundle.deserialize(False)
        self.assertTrue(self.bundle.is_deserialized)

    def test_deserialize_clears_old_error_on_success(self):
        BundleDeserializationError.objects.create(
            bundle=self.bundle, error_message="not important").save()
        mock = self.mocker.patch(self.bundle)
        expect(mock._do_deserialize(False))
        self.mocker.replay()
        self.bundle.deserialize(False)
        # note we cannot check for self.bundle.deserialization_error
        # directly due to the way django handles operations that affect
        # existing instances (it does not touch them like storm would
        # IIRC).
        self.assertRaises(BundleDeserializationError.DoesNotExist,
                          BundleDeserializationError.objects.get,
                          bundle=self.bundle)
Exemple #8
0
    def test_find_match_not_matched(self):
        key1 = 'chr1:154000-230000'
        mocker = Mocker()

        junction = mocker.mock()
        junction.coverage
        mocker.result(40)

        container1 = mocker.mock()
        container2 = mocker.mock()

        container1.keys()
        mocker.result([key1])

        container1[key1]
        mocker.result(junction)

        container2[key1]
        mocker.throw(KeyError)
        mocker.count(1)

        mocker.replay()

        self.common, self.diff = jc.findMatch(container1, container2)
        self.assertEqual(self.common.keys(), [])
        self.assertEqual(self.diff.keys(), [key1])

        mocker.restore()
        mocker.verify()
Exemple #9
0
    def test_find_match_matched(self):
        key = 'chr1:154000-230000'
        mocker = Mocker()

        junction = mocker.mock()
        junction.coverage
        mocker.result(40)
        mocker.count(1, None)

        container = mocker.mock()
        container.keys()
        mocker.result([key])

        container[key]
        mocker.result(junction)
        mocker.count(1, None)

        mocker.replay()

        self.common, self.diff = jc.findMatch(container, container)
        self.assertEqual(self.common.keys(), ['chr1:154000-230000'])
        self.assertEqual(self.diff.keys(), [])

        mocker.restore()
        mocker.verify()
    def test_result_is_cached(self):
        viewlet = self.get_viewlet()
        viewlet.update()
        self.assertNotIn('purple', viewlet.generate_css(),
                         'Unexpectedly found "purple" in the CSS')

        # Setting a custom style automatically invalidates the cache.
        # For testing that things are cached, we stub the cache invalidation,
        # so that the cache persists.
        mocker = Mocker()
        invalidate_cache_mock = mocker.replace(invalidate_cache)
        expect(invalidate_cache_mock()).count(1, None)
        mocker.replay()

        ICustomStyles(self.layer['portal']).set('css.body-background', 'purple')
        self.assertNotIn('purple', viewlet.generate_css(),
                         'The result was not cached.')

        # Removing the stub and invalidating the cache should update the result.
        mocker.restore()
        mocker.verify()
        invalidate_cache()
        self.assertIn('purple', viewlet.generate_css(),
                      'Expected "purple" in CSS - does the style'
                      ' css.body-background no longer work?')
    def test_write(self):
        mocker = Mocker()
        db = mocker.mock()

        insert_book ='''
                    insert into book (title, isbn, publisher, list_price,
                    publish_date, class, sheet_numbers, folio, print_type,
                    author, barcode, comments) values
                    ('a', '1234', 'aph', '30', '2012', 'I.',
                    18, '4', 'mono', 'sb', 'a', 'blahblahblah')
                    '''
        db.query(insert_book)

        insert_tags = '''
                    '''
        db.query(insert_tags)
        db.commit()
        mocker.replay()

        writer = CatalogMySQLWriter(db)
        item = CatalogItem(
            'a', '1234', 'aph', '30', '2012', 'I.', 18, '4', 'mono', 'sb',
            'a', 'blahblahblah', { 'audience': 'ms', 'awards': 'annual' })
        writer.write(item)

        mocker.restore()
        mocker.verify()
    def test_required_field_missing(self):
        mocker = Mocker()
        csv_reader = mocker.mock()
        
        headers = [
            'title', 'isbn', 'publisher', 'list_price', 'publish_date',
            'class_', 'sheet_numbers', 'folio', 'print_type', 'author',
            'barcode']
        first_line = [
            'a', '1234', 'aph', '30', '2012', 'I.', 18, '4', 'mono', 'sb', 'a']
        field_map = {
            'title': 0, 'isbn': 1, 'publisher': 2, 'list_price': 3,
            'publish_date': 4, 'class_': 5, 'sheet_numbers': 6, 'folio': 7,
            'print_type': 8, 'author': 9, 'barcode': 10, 'comments': 11 }

        csv_reader.next() # read headers
        mocker.result(headers)
            
        csv_reader.next() # read first line
        mocker.result(first_line)

        mocker.replay()

        reader = CatalogReader(csv_reader, field_map)
        item = reader.read()

        self.assertIsNone(item.comments)

        mocker.restore()
        mocker.verify()
    def test_find_match_not_matched(self):
        key1 = "chr1:154000-230000"
        mocker = Mocker()

        junction = mocker.mock()
        junction.coverage
        mocker.result(40)

        container1 = mocker.mock()
        container2 = mocker.mock()

        container1.keys()
        mocker.result([key1])

        container1[key1]
        mocker.result(junction)

        container2[key1]
        mocker.throw(KeyError)
        mocker.count(1)

        mocker.replay()

        self.common, self.diff = jc.findMatch(container1, container2)
        self.assertEqual(self.common.keys(), [])
        self.assertEqual(self.diff.keys(), [key1])

        mocker.restore()
        mocker.verify()
Exemple #14
0
def mocker(verify_calls=True):
    m = Mocker()
    try:
        yield m
    finally:
        m.restore()
        if verify_calls:
            m.verify()
Exemple #15
0
def mocker(verify_calls=True):
    m = Mocker()
    try:
        yield m
    finally:
        m.restore()
        if verify_calls:
            m.verify()
Exemple #16
0
class FreezedClock(object):

    def __init__(self, new_now):
        self.new_now = new_now

    def forward(self, **kwargs):
        self.new_now = datetime.now() + timedelta(**kwargs)
        self.__exit__(None, None, None)
        self.__enter__()

    def backward(self, **kwargs):
        self.new_now = datetime.now() - timedelta(**kwargs)
        self.__exit__(None, None, None)
        self.__enter__()

    def __enter__(self):
        if type(self.new_now) != datetime:
            raise ValueError(
                'The freeze_date argument must be a datetime.datetime'
                ' instance, got %s' % type(self.new_now).__name__)

        self.mocker = Mocker()

        # Replace "datetime.datetime.now" classmethod
        self._previous_datetime_now = datetime.now

        @classmethod
        def freezed_now(klass, tz=None):
            if not tz:
                return self.new_now.replace(tzinfo=None)
            elif self.new_now.tzinfo != tz:
                return tz.normalize(self.new_now.astimezone(tz))
            else:
                return self.new_now

        curse(datetime, 'now', freezed_now)

        # Replace "time.time" function
        new_time = (calendar.timegm(self.new_now.timetuple()) +
                    (self.new_now.timetuple().tm_isdst * 60 * 60) +
                    (self.new_now.microsecond * 0.000001))
        time_class = self.mocker.replace('time.time')
        expect(time_class()).call(lambda: new_time).count(0, None)

        self.mocker.replay()
        return self

    def __exit__(self, exc_type, exc_value, traceback):
        self.mocker.restore()
        self.mocker.verify()
        curse(datetime, 'now', self._previous_datetime_now)
Exemple #17
0
 def test_without_when(self):
     mocker = Mocker()
     mock_time = mocker.replace('time.time')
     mock_time()
     mocker.result(1.0)
     mocker.replay()
     controller = pid.PID(P=0.5, I=0.5, D=0.5, setpoint=0, initial=12)
     mocker.restore()
     mocker.verify()
     self.assertEqual(controller.gains, (0.5, 0.5, 0.5))
     self.assertAlmostEqual(controller.setpoint[0], 0.0)
     self.assertEqual(len(controller.setpoint), 1)
     self.assertAlmostEqual(controller.previous_time, 1.0)
     self.assertAlmostEqual(controller.previous_error, -12.0)
     self.assertAlmostEqual(controller.integrated_error, 0)
Exemple #18
0
class MockEnvironment(object):
    def __init__(self):
        self.mocker = Mocker()
        _setup_fileio(self.mocker)
        _setup_mysqlclient(self.mocker)
        _setup_subprocess(self.mocker)

    def replace_environment(self):
        self.mocker.replay()

    def restore_environment(self):
        # restore MySQLClient
        # restore normal file io
        # restore normal subprocess
        self.mocker.restore()
Exemple #19
0
class MockEnvironment(object):
    def __init__(self):
        self.mocker = Mocker()
        _setup_fileio(self.mocker)
        _setup_mysqlclient(self.mocker)
        _setup_subprocess(self.mocker)

    def replace_environment(self):
        self.mocker.replay()

    def restore_environment(self):
        # restore MySQLClient
        # restore normal file io
        # restore normal subprocess
        self.mocker.restore()
Exemple #20
0
    def test_delete(self):
        to_test_dir = path('some\\non-existing\\file')
        to_test_file = path(self.fn1)

        mocker = Mocker()
        mock_exists = mocker.replace(to_test_file.exists)
        mock_exists()
        mocker.result(True)
        mocker.replay()

        to_test_file.delete()

        mocker.restore()
        mocker.verify()

        self.assertFalse(os.path.exists(self.fn1), msg='File was not deleted: "%s"' % self.fn1)
        self.assertRaises(ValueError, to_test_dir.delete)
Exemple #21
0
    def test_rmdir(self):
        to_test_dir = path(self.tmpdir2)
        to_test_file = path(self.fn1)

        mocker = Mocker()
        mock_is_dir = mocker.replace(to_test_dir.isdir)
        mock_is_dir()
        mocker.result(True)
        mocker.replay()

        to_test_dir.rmdir()

        mocker.restore()
        mocker.verify()

        self.assertFalse(os.path.exists(self.tmpdir2), msg='Directory was not deleted: "%s"' % self.tmpdir2)
        self.assertRaises(ValueError, to_test_file.rmdir)
    def test_authorized_when_plugin_returns_True(self):
        clientmanager = getUtility(IClientManager)
        client = clientmanager.get_client_by_id('foo')
        mocker = Mocker()
        request = DummyRequest(headers={'X-BRIDGE-ORIGIN': 'foo'})

        plugin = mocker.mock()
        expect(plugin(request)).result(plugin)
        expect(plugin.is_authorized(client)).result(True)
        sm = get_current_registry()
        sm.registerAdapter(plugin, [Interface], IAuthorizationPlugin,
                           name='foo', event=False)

        mocker.replay()

        manager = queryAdapter(request, IAuthorizationManager)
        self.assertTrue(manager.authorize())

        mocker.restore()
        mocker.verify()
Exemple #23
0
    def test_without_when(self):
        mocker = Mocker()
        mock_time = mocker.replace('time.time')
        mock_time()
        mocker.result(1.0)

        mocker.replay()

        controller = pid.PID(P = 0.5, I = 0.5, D = 0.5,
                             setpoint = 0, initial = 12)

        mocker.restore()
        mocker.verify()

        self.assertEqual(controller.gains, (0.5, 0.5, 0.5))
        self.assertAlmostEqual(controller.setpoint[0], 0.0)
        self.assertEqual(len(controller.setpoint), 1)
        self.assertAlmostEqual(controller.previous_time, 1.0)
        self.assertAlmostEqual(controller.previous_error, -12.0)
        self.assertAlmostEqual(controller.integrated_error, 0)
    def test_authorized_when_plugin_returns_True(self):
        clientmanager = getUtility(IClientManager)
        client = clientmanager.get_client_by_id('foo')
        mocker = Mocker()
        request = DummyRequest(headers={'X-BRIDGE-ORIGIN': 'foo'})

        plugin = mocker.mock()
        expect(plugin(request)).result(plugin)
        expect(plugin.is_authorized(client)).result(True)
        sm = get_current_registry()
        sm.registerAdapter(plugin, [Interface],
                           IAuthorizationPlugin,
                           name='foo',
                           event=False)

        mocker.replay()

        manager = queryAdapter(request, IAuthorizationManager)
        self.assertTrue(manager.authorize())

        mocker.restore()
        mocker.verify()
    def test_find_match_match_not_match(self):
        key1 = ["chr1:154000-230000", "chr1:155000-230000"]
        key2 = ["chr1:154000-230000"]
        mocker = Mocker()
        junction = mocker.mock()
        junction.coverage
        mocker.result(40)
        mocker.count(1, None)

        container1 = mocker.mock()
        container2 = mocker.mock()

        container1.keys()
        mocker.result(key1)

        container1[key1[0]]
        mocker.result(junction)
        mocker.count(1, None)

        container1[key1[1]]
        mocker.result(junction)
        mocker.count(1, None)

        container2[key1[0]]
        mocker.result(junction)
        mocker.count(1, None)

        container2[key1[1]]
        mocker.throw(KeyError)
        mocker.count(1)

        mocker.replay()
        self.common, self.diff = jc.findMatch(container1, container2)
        self.assertEqual(self.common.keys(), [key1[0]])
        self.assertEqual(self.diff.keys(), [key1[1]])

        mocker.restore()
        mocker.verify()
Exemple #26
0
    def test_find_match_match_not_match(self):
        key1 = ['chr1:154000-230000', 'chr1:155000-230000']
        key2 = ['chr1:154000-230000']
        mocker = Mocker()
        junction = mocker.mock()
        junction.coverage
        mocker.result(40)
        mocker.count(1, None)

        container1 = mocker.mock()
        container2 = mocker.mock()

        container1.keys()
        mocker.result(key1)

        container1[key1[0]]
        mocker.result(junction)
        mocker.count(1, None)

        container1[key1[1]]
        mocker.result(junction)
        mocker.count(1, None)

        container2[key1[0]]
        mocker.result(junction)
        mocker.count(1, None)

        container2[key1[1]]
        mocker.throw(KeyError)
        mocker.count(1)

        mocker.replay()
        self.common, self.diff = jc.findMatch(container1, container2)
        self.assertEqual(self.common.keys(), [key1[0]])
        self.assertEqual(self.diff.keys(), [key1[1]])

        mocker.restore()
        mocker.verify()
Exemple #27
0
class ExpectGeocodingRequest(object):
    """Mock geopy requests for geocoding a location.

    Example:

    with ExpectGeocodingRequest('Bern, Switzerland', (46.9479222, 7.444608499999999)):
        do_something()
    """

    def __init__(self, place="Bern, Switzerland", coords=(46.947922, 7.444608)):
        self.place = place
        self.coords = coords

    def __enter__(self):
        self.mocker = Mocker()
        expect = Expect(self.mocker)
        method = self.mocker.replace("ftw.geo.handlers.geocode_location")
        expect(method(ARGS, KWARGS)).result((self.place, self.coords, None))
        self.mocker.replay()

    def __exit__(self, exc_type, exc_value, traceback):
        if not exc_type:
            self.mocker.verify()
            self.mocker.restore()
Exemple #28
0
class TestDataLoader(unittest.TestCase):
    def setUp(self):
        self.mocker = Mocker()
        self.file = cStringIO.StringIO()
        self.file2 = cStringIO.StringIO()

    def tearDown(self):
        self.mocker.restore()
        self.mocker.verify()
        self.file.close()
        self.file2.close()

    def test_single_file_happy_path(self):
        self.file.write(
            "418|12|2012-07-18 14:43:38|37.6164|-122.386|41059b00f964a520850b1fe3|empty_message\n418|12|2012-07-18 14:43:38|37.6164|-122.386|41059b00f964a520850b1fe3|empty_message"
        )
        self.file.seek(0)
        expected = {
            '418': [{
                'venue_id': '41059b00f964a520850b1fe3',
                'latitude': 37.6164,
                'check_in_message': 'empty_message',
                'check_in_id': '12',
                'longitude': -122.386,
                'date': datetime.datetime(2012, 7, 18, 14, 43, 38)
            }, {
                'venue_id': '41059b00f964a520850b1fe3',
                'latitude': 37.6164,
                'check_in_message': 'empty_message',
                'check_in_id': '12',
                'longitude': -122.386,
                'date': datetime.datetime(2012, 7, 18, 14, 43, 38)
            }]
        }
        actual = DataLoader.load_check_ins_from_file(self.file)
        self.assertDictEqual(expected, actual)

    def test_invalid_number_of_check_in_parameters(self):
        self.file.write(
            "418|12|2012-07-18 14:43:38|37.6164|-122.386|41059b00f964a520850b1fe3|empty_message\n418|2012-07-18 14:43:38|37.6164|-122.386|41059b00f964a520850b1fe3|empty_message"
        )
        self.file.seek(0)
        with self.assertRaises(ValueError) as cm:
            DataLoader.load_check_ins_from_file(self.file)
        self.assertEqual(
            cm.exception.message,
            "Error in line 2: the line should contain user_id, check-in_id, date, latitude, longitude, venue_id and check-in_message, separated by |"
        )

    def test_empty_strings_in_middle(self):
        self.file.write(
            "\n418|2012-07-18 14:43:38|37.6164|-122.386|41059b00f964a520850b1fe3|empty_message"
        )
        self.file.seek(0)
        with self.assertRaises(ValueError) as cm:
            DataLoader.load_check_ins_from_file(self.file)
        self.assertEqual(
            cm.exception.message,
            "Error in line 1: the line should contain user_id, check-in_id, date, latitude, longitude, venue_id and check-in_message, separated by |"
        )

    def test_empty_strings_in_end(self):
        self.file.write(
            "418|23|2012-07-18 14:43:38|37.6164|-122.386|41059b00f964a520850b1fe3|empty_message\n "
        )
        self.file.seek(0)
        with self.assertRaises(ValueError) as cm:
            DataLoader.load_check_ins_from_file(self.file)
        self.assertEqual(
            cm.exception.message,
            "Error in line 2: the line should contain user_id, check-in_id, date, latitude, longitude, venue_id and check-in_message, separated by |"
        )

    def test_invalid_date(self):
        self.file.write(
            "418|12|2012-07-18 14:43:38|37.6164|-122.386|41059b00f964a520850b1fe3|empty_message\n418|12|123asd|37.6164|-122.386|41059b00f964a520850b1fe3|empty_message"
        )
        self.file.seek(0)
        with self.assertRaises(ValueError) as cm:
            DataLoader.load_check_ins_from_file(self.file)
        self.assertEqual(
            cm.exception.message,
            'Error in line 2: invalid format of date, should be YYYY-MM-DD HH:MM:SS'
        )

    def test_longitude_not_a_number(self):
        self.file.write(
            "418|12|2012-07-18 14:43:38|37.6164|-122.386|41059b00f964a520850b1fe3|empty_message\n418|12|2012-07-18 12:34:45|45.54|a|41059b00f964a520850b1fe3|empty_message"
        )
        self.file.seek(0)
        with self.assertRaises(ValueError) as cm:
            DataLoader.load_check_ins_from_file(self.file)
        self.assertEqual(
            cm.exception.message,
            'Error in line 2: longitude should be a float number')

    def test_longitude_out_of_bounds(self):
        self.file.write(
            "418|12|2012-07-18 14:43:38|37.6164|-122.386|41059b00f964a520850b1fe3|empty_message\n418|12|2012-07-18 12:34:45|45.5|-190.386|41059b00f964a520850b1fe3|empty_message"
        )
        self.file.seek(0)
        with self.assertRaises(ValueError) as cm:
            DataLoader.load_check_ins_from_file(self.file)
        self.assertEqual(
            cm.exception.message,
            'Error in line 2: longitude should be between -90 and 90')

    def test_latitude_not_a_number(self):
        self.file.write(
            "418|12|2012-07-18 14:43:38|37.6164|-122.386|41059b00f964a520850b1fe3|empty_message\n418|12|2012-07-18 12:34:45|abcd|-122.386|41059b00f964a520850b1fe3|empty_message"
        )
        self.file.seek(0)
        with self.assertRaises(ValueError) as cm:
            DataLoader.load_check_ins_from_file(self.file)
        self.assertEqual(cm.exception.message,
                         'Error in line 2: latitude should be a float number')

    def test_latitude_out_of_bounds(self):
        self.file.write(
            "418|12|2012-07-18 14:43:38|37.6164|-122.386|41059b00f964a520850b1fe3|empty_message\n418|12|2012-07-18 12:34:45|100|-122.386|41059b00f964a520850b1fe3|empty_message"
        )
        self.file.seek(0)
        with self.assertRaises(ValueError) as cm:
            DataLoader.load_check_ins_from_file(self.file)
        self.assertEqual(
            cm.exception.message,
            'Error in line 2: latitude should be between -90 and 90')

    def test_invalid_venue(self):
        self.file.write(
            "418|12|2012-07-18 14:43:38|37.6164|-122.386|41059b00f964a520850b1fe3|empty_message\n418|12|2012-07-18 12:34:45|34|-122.386||empty_message"
        )
        self.file.seek(0)
        with self.assertRaises(ValueError) as cm:
            DataLoader.load_check_ins_from_file(self.file)
        self.assertEqual(
            cm.exception.message,
            'Error in line 2: venue_id can not be an empty string')

    def test_single_directory_happy_path(self):
        self.file.write(
            "418|12|2012-07-18 14:43:38|37.6164|-122.386|41059b00f964a520850b1fe3|empty_message\n418|13|2012-07-18 12:34:45|45.54|45.6|41059b00f964a520850b1fe3|empty_message"
        )
        self.file.seek(0)
        self.file2.write(
            "418|14|2012-07-18 14:43:38|37.6164|-122.386|41059b00f964a520850b1fe3|empty_message\n418|15|2012-07-18 12:34:45|45.54|45.6|41059b00f964a520850b1fe3|empty_message"
        )
        self.file2.seek(0)

        mock_glob = self.mocker.replace('glob.glob')
        mock_glob("some_directory/*")
        self.mocker.result(['.', 'file1', 'file2'])

        mock_open = self.mocker.replace('__builtin__.open')
        mock_open("file1", 'rU')
        self.mocker.result(self.file)
        mock_open("file2", 'rU')
        self.mocker.result(self.file2)

        self.mocker.replay()
        expected_dict = {
            '418': [{
                'venue_id': '41059b00f964a520850b1fe3',
                'latitude': 37.6164,
                'check_in_message': 'empty_message',
                'check_in_id': '12',
                'longitude': -122.386,
                'date': datetime.datetime(2012, 7, 18, 14, 43, 38)
            }, {
                'venue_id': '41059b00f964a520850b1fe3',
                'latitude': 45.54,
                'check_in_message': 'empty_message',
                'check_in_id': '13',
                'longitude': 45.6,
                'date': datetime.datetime(2012, 7, 18, 12, 34, 45)
            }, {
                'venue_id': '41059b00f964a520850b1fe3',
                'latitude': 37.6164,
                'check_in_message': 'empty_message',
                'check_in_id': '14',
                'longitude': -122.386,
                'date': datetime.datetime(2012, 7, 18, 14, 43, 38)
            }, {
                'venue_id': '41059b00f964a520850b1fe3',
                'latitude': 45.54,
                'check_in_message': 'empty_message',
                'check_in_id': '15',
                'longitude': 45.6,
                'date': datetime.datetime(2012, 7, 18, 12, 34, 45)
            }]
        }
        actual_dict = DataLoader.load_check_ins_from_directory(
            "some_directory")
        self.assertDictEqual(expected_dict, actual_dict)

    def test_same_check_in_ids_in_different_files(self):
        self.file.write(
            "418|12|2012-07-18 14:43:38|37.6164|-122.386|41059b00f964a520850b1fe3|empty_message\n418|13|2012-07-18 12:34:45|45.54|45.6|41059b00f964a520850b1fe3|empty_message"
        )
        self.file.seek(0)
        self.file2.write(
            "418|12|2012-07-18 14:43:38|37.6164|-122.386|41059b00f964a520850b1fe3|empty_message\n418|15|2012-07-18 12:34:45|45.54|45.6|41059b00f964a520850b1fe3|empty_message"
        )
        self.file2.seek(0)

        mock_glob = self.mocker.replace('glob.glob')
        mock_glob("some_directory/*")
        self.mocker.result(['.', 'file1', 'file2'])

        mock_open = self.mocker.replace('__builtin__.open')
        mock_open("file1", 'rU')
        self.mocker.result(self.file)
        mock_open("file2", 'rU')
        self.mocker.result(self.file2)

        self.mocker.replay()
        with self.assertRaises(ValueError) as cm:
            DataLoader.load_check_ins_from_directory("some_directory")
        self.assertEqual(
            cm.exception.message,
            'Error processing file file2: check-in with ID 12 has already been encountered for user 418'
        )

    def test_same_check_in_ids_in_same_file(self):
        self.file.write(
            "418|12|2012-07-18 14:43:38|37.6164|-122.386|41059b00f964a520850b1fe3|empty_message\n418|12|2012-07-18 12:34:45|45.54|45.6|41059b00f964a520850b1fe3|empty_message"
        )
        self.file.seek(0)

        mock_glob = self.mocker.replace('glob.glob')
        mock_glob("some_directory/*")
        self.mocker.result(['.', 'file1'])

        mock_open = self.mocker.replace('__builtin__.open')
        mock_open("file1", 'rU')
        self.mocker.result(self.file)

        self.mocker.replay()
        with self.assertRaises(ValueError) as cm:
            DataLoader.load_check_ins_from_directory("some_directory")
        self.assertEqual(
            cm.exception.message,
            'Error processing file file1: check-in with ID 12 has already been encountered for user 418'
        )

    def test_empty_directory(self):
        mock_glob = self.mocker.replace('glob.glob')
        mock_glob("some_directory/*")
        self.mocker.result(['.'])
        self.mocker.replay()
        with self.assertRaises(ValueError) as cm:
            DataLoader.load_check_ins_from_directory("some_directory")
        self.assertEqual(cm.exception.message,
                         'Error: directory some_directory is empty')
Exemple #29
0
class TestBuildJunctionDict(TestCase):
    def setUp(self):
        self.mocker = Mocker()
        self.junctions = self.mocker.mock()
        self.junction1 = self.mocker.mock()
        self.junction2 = self.mocker.mock()
        self.junction3 = self.mocker.mock()

        self.junction1.start
        self.mocker.result(100)
        self.mocker.count(0, None)

        self.junction1.end
        self.mocker.result(200)
        self.mocker.count(0, None)

        self.junction1.chrom
        self.mocker.result('chr1')
        self.mocker.count(0, None)

        self.junction2.start
        self.mocker.result(100)
        self.mocker.count(0, None)

        self.junction2.end
        self.mocker.result(300)
        self.mocker.count(0, None)

        self.junction2.chrom
        self.mocker.result('chr1')
        self.mocker.count(0, None)

        self.junction3.start
        self.mocker.result(200)
        self.mocker.count(0, None)

        self.junction3.end
        self.mocker.result(300)
        self.mocker.count(0, None)

        self.junction3.chrom
        self.mocker.result('chr1')
        self.mocker.count(0, None)

    def test_no_overlaps(self):

        self.junctions.iteritems()
        self.mocker.generate([('chr1:100-200', self.junction1)])

        self.mocker.replay()

        self.container = jc.buildJunctionDict(self.junctions)
        self.assertEqual(self.container['chr1:100'], [200])
        self.assertEqual(len(self.container), 1)

        self.mocker.restore()
        self.mocker.verify()

    def test_overlaps(self):

        self.junctions.iteritems()
        self.mocker.generate([('chr1:100-200', self.junction1),
                              ('chr1:100-300', self.junction2)])

        self.mocker.replay()
        self.container = jc.buildJunctionDict(self.junctions)
        self.assertEqual(self.container['chr1:100'], [200, 300])
        self.assertEqual(len(self.container), 1)

        self.mocker.restore()
        self.mocker.verify()

    def test_multiple_junctions_with_overlaps(self):
        self.junctions.iteritems()
        self.mocker.generate([('chr1:100-200', self.junction1),
                              ('chr1:100-300', self.junction2),
                              ('chr1:200-300', self.junction3)], )

        self.mocker.replay()
        self.container = jc.buildJunctionDict(self.junctions)
        self.assertEqual(self.container['chr1:100'], [200, 300])
        self.assertEqual(self.container['chr1:200'], [300])
        self.assertEqual(len(self.container), 2)

        self.mocker.restore()
        self.mocker.verify()
class Test_sascalc_Prop_calcpmi(MockerTestCase):
    def setUp(self):
        self.centertmp = sasop.Move.center

        self.m = Mocker()
        sasop.Move.center = self.m.mock()
        sasop.Move.center(ARGS)
        self.m.result(None)
        self.m.count(0, None)

        self.m.replay()

        self.o = sasmol.SasMol(0)

    def assert_list_almost_equal_flip_sign_allowed(self, a, b, places=5):
        if (len(a) != len(b)):
            raise TypeError
        else:
            sign = 1
            for i in range(len(a)):
                if isinstance(a[i], (int, float)):
                    if (numpy.isnan(a[i]) and numpy.isnan(b[i])): continue
                    if (a[i] * b[i] < 0.0): sign = -1
                    self.assertAlmostEqual(a[i], sign * b[i], places)
                else:
                    self.assert_list_almost_equal_flip_sign_allowed(
                        a[i], b[i], places)

    def reorder_eigens(self, result_eigenvalues, result_eigenvectors):
        idx = result_eigenvalues.argsort()
        idx = idx[::-1]
        result_eigenvalues = result_eigenvalues[idx]
        result_eigenvectors = result_eigenvectors[idx]
        result_eigenvectors[2] *= -1
        return result_eigenvalues, result_eigenvectors

    def test_one_atom(self):
        return
        '''
        
        self.o.setCoor(numpy.array([[[-1.0, 2.0, 3.0]]],floattype))
        self.o.setElement(['C'])
        self.o.setNatoms(len(self.o.element()))
        result = self.o.calcpmi(0)
        result_eigenvalues = result[0]
        result_eigenvectors = result[1].T
        result_I = result[2]
        result_eigenvalues, result_eigenvectors = self.reorder_eigens(result_eigenvalues, result_eigenvectors)
        print list(result_I), '\n',list(result_eigenvalues), '\n', list(result_eigenvectors)
        expected_I = numpy.array([[156.14, 24.022, 36.032], [24.022, 120.108, -72.065], [36.032, -72.065, 60.054]], floattype)
        expected_eigenvalues = numpy.array([168.151, 168.151, -5.329e-15], floattype)
        expected_eigenvectors = numpy.array([[0.103, -0.812, 0.575], [0.964, 0.148, 0.222], [-0.267, 0.535, 0.802]], floattype)
        self.assert_list_almost_equal_flip_sign_allowed(expected_I, result_I, 3)        
        #self.assert_list_almost_equal_flip_sign_allowed(expected_eigenvalues, result_eigenvalues,3)
        #self.assert_list_almost_equal_flip_sign_allowed(expected_eigenvectors, result_eigenvectors,3)
        '''

    def test_two_centered_atoms(self):
        return
        '''
        self.o.setCoor(numpy.array([[[-1.0, -2.0, -3.0],[1.0, 2.0, 3.0]]],floattype))
        self.o.setElement(['C','C'])
        self.o.setNatoms(len(self.o.element()))
        result = self.o.calcpmi(0)
        result_eigenvalues = result[0]
        result_eigenvectors = result[1].T
        result_I = result[2]
        result_eigenvalues, result_eigenvectors = self.reorder_eigens(result_eigenvalues, result_eigenvectors)
        print list(result_eigenvalues), '\n', list(result_eigenvectors)
        expected_I = numpy.array([[26.,  -4.,  -6.], [-4.,  20., -12.], [-6., -12.,  10.]], floattype)     
        expected_eigenvalues = numpy.array([336.302, 336.302, -7.105e-15], floattype)
        expected_eigenvectors = numpy.array([[-0.103, -0.812, 0.575], [0.964, -0.148, -0.222], [0.267, 0.535, 0.802]],floattype)
        #self.assert_list_almost_equal_flip_sign_allowed(expected_eigenvalues, result_eigenvalues,3)
        #self.assert_list_almost_equal_flip_sign_allowed(expected_eigenvectors, result_eigenvectors,3)

        '''

    def test_two_uncentered_atoms(self):
        self.o.setCoor(
            numpy.array([[[-2.0, -2.0, -3.0], [1.0, 2.0, 3.0]]], floattype))
        self.o.setElement(['C', 'N'])
        self.o.setNatoms(len(self.o.element()))
        result = self.o.calcpmi(0)
        result_eigenvalues = result[0]
        result_eigenvectors = result[1].T
        result_I = result[2]
        result_eigenvalues, result_eigenvectors = self.reorder_eigens(
            result_eigenvalues, result_eigenvectors)
        print result_I, '\n', result_eigenvalues, '\n', result_eigenvectors
        expected_eigenvalues = numpy.array([400.277, 394.737, 5.54], floattype)
        expected_eigenvectors = numpy.array(
            [[-6.274e-15, -8.321e-01, 5.547e-01],
             [9.246e-01, -2.114e-01, -3.170e-01],
             [3.810e-01, 5.129e-01, 7.693e-01]], floattype)
        self.assert_list_almost_equal_flip_sign_allowed(
            expected_eigenvalues, result_eigenvalues, 3)
        self.assert_list_almost_equal_flip_sign_allowed(
            expected_eigenvectors, result_eigenvectors, 3)

    def test_six_uncentered_atoms(self):
        self.o.setCoor(
            numpy.array([[[1.0, 2.0, 3.0], [4.0, 5.0, 6.0], [7.0, 8.0, 9.0],
                          [1.0, 3.0, 5.0], [2.0, 4.0, 6.0], [0.0, 2.0, 3.0]]],
                        floattype))
        self.o.setElement(['C', 'N', 'O', 'C', 'N', 'O'])
        self.o.setNatoms(len(self.o.element()))
        result = self.o.calcpmi(0)
        result_eigenvalues = result[0]
        result_eigenvectors = result[1].T
        result_I = result[2]
        result_eigenvalues, result_eigenvectors = self.reorder_eigens(
            result_eigenvalues, result_eigenvectors)
        print result_I, '\n', result_eigenvalues, '\n', result_eigenvectors
        expected_eigenvalues = numpy.array([5761.418, 5625.53, 139.66],
                                           floattype)
        expected_eigenvectors = numpy.array(
            [[0.351, -0.821, 0.451], [-0.837, -0.059, 0.544],
             [0.42, 0.568, 0.708]], floattype)
        self.assert_list_almost_equal_flip_sign_allowed(
            expected_eigenvalues, result_eigenvalues, 2)
        self.assert_list_almost_equal_flip_sign_allowed(
            expected_eigenvectors, result_eigenvectors, 3)

    def test_six_uncentered_atoms_inf1(self):
        self.o.setCoor(
            numpy.array(
                [[[util.HUGE, 2.0, 3.0], [4.0, 5.0, 6.0], [7.0, 8.0, 9.0],
                  [1.0, 3.0, 5.0], [2.0, 4.0, 6.0], [0.0, 2.0, 3.0]]],
                floattype))
        self.o.setMass([1.0, 2.0, 3.2, 3.6, 5.2, 2.8])
        self.o.setNatoms(len(self.o.mass()))
        with self.assertRaises(Exception):
            result = self.o.calcpmi(0)

    def test_six_uncentered_atoms_inf2(self):
        self.o.setCoor(
            numpy.array(
                [[[util.INF, 2.0, 3.0], [4.0, 5.0, 6.0], [7.0, 8.0, 9.0],
                  [1.0, 3.0, 5.0], [2.0, 4.0, 6.0], [0.0, 2.0, 3.0]]],
                floattype))
        self.o.setMass([1.0, 2.0, 3.2, 3.6, 5.2, 2.8])
        self.o.setNatoms(len(self.o.mass()))
        with self.assertRaises(Exception):
            result = self.o.calcpmi(0)

    def test_six_uncentered_atoms_nan(self):
        self.o.setCoor(
            numpy.array(
                [[[util.NAN, 2.0, 3.0], [4.0, 5.0, 6.0], [7.0, 8.0, 9.0],
                  [1.0, 3.0, 5.0], [2.0, 4.0, 6.0], [0.0, 2.0, 3.0]]],
                floattype))
        self.o.setMass([1.0, 2.0, 3.2, 3.6, 5.2, 2.8])
        self.o.setNatoms(len(self.o.mass()))
        with self.assertRaises(Exception):
            result = self.o.calcpmi(0)

    def test_six_uncentered_atoms_tiny(self):
        self.o.setCoor(
            numpy.array(
                [[[util.TINY, 2.0, 3.0], [4.0, 5.0, 6.0], [7.0, 8.0, 9.0],
                  [1.0, 3.0, 5.0], [2.0, 4.0, 6.0], [0.0, 2.0, 3.0]]],
                floattype))
        self.o.setElement(['C', 'N', 'O', 'C', 'N', 'O'])
        self.o.setNatoms(len(self.o.element()))
        result = self.o.calcpmi(0)
        result_eigenvalues = result[0]
        result_eigenvectors = result[1].T
        result_I = result[2]
        result_eigenvalues, result_eigenvectors = self.reorder_eigens(
            result_eigenvalues, result_eigenvectors)
        print list(result_I), '\n', list(result_eigenvalues), '\n', list(
            result_eigenvectors)
        expected_I = numpy.array(
            [[4675.176, -1324.189, -1572.26], [-1324.189, 3932.916, -2256.545],
             [-1572.26, -2256.545, 2894.494]], floattype)
        expected_eigenvalues = numpy.array([5748.699, 5591.441, 162.447],
                                           floattype)
        expected_eigenvectors = numpy.array(
            [[0.321, -0.821, 0.472], [-0.852, -0.032, 0.523],
             [0.414, 0.57, 0.709]], floattype)
        self.assert_list_almost_equal_flip_sign_allowed(
            expected_I, result_I, 3)
        self.assert_list_almost_equal_flip_sign_allowed(
            expected_eigenvalues, result_eigenvalues, 2)
        self.assert_list_almost_equal_flip_sign_allowed(
            expected_eigenvectors, result_eigenvectors, 3)

    def test_six_uncentered_atoms_ZERO(self):
        self.o.setCoor(
            numpy.array(
                [[[util.ZERO, 2.0, 3.0], [4.0, 5.0, 6.0], [7.0, 8.0, 9.0],
                  [1.0, 3.0, 5.0], [2.0, 4.0, 6.0], [0.0, 2.0, 3.0]]],
                floattype))
        self.o.setElement(['C', 'N', 'O', 'C', 'N', 'O'])
        self.o.setNatoms(len(self.o.element()))
        result = self.o.calcpmi(0)
        result_eigenvalues = result[0]
        result_eigenvectors = result[1].T
        result_I = result[2]
        result_eigenvalues, result_eigenvectors = self.reorder_eigens(
            result_eigenvalues, result_eigenvectors)
        print list(result_I), '\n', list(result_eigenvalues), '\n', list(
            result_eigenvectors)
        expected_I = numpy.array(
            [[4675.176, -1324.189, -1572.26], [-1324.189, 3932.916, -2256.545],
             [-1572.26, -2256.545, 2894.494]], floattype)
        expected_eigenvalues = numpy.array([5748.699, 5591.441, 162.447],
                                           floattype)
        expected_eigenvectors = numpy.array(
            [[0.321, -0.821, 0.472], [-0.852, -0.032, 0.523],
             [0.414, 0.57, 0.709]], floattype)
        self.assert_list_almost_equal_flip_sign_allowed(
            expected_I, result_I, 3)
        self.assert_list_almost_equal_flip_sign_allowed(
            expected_eigenvalues, result_eigenvalues, 2)
        self.assert_list_almost_equal_flip_sign_allowed(
            expected_eigenvectors, result_eigenvectors, 3)

    def tearDown(self):
        self.m.restore()
        self.m.verify()

        sasop.Move.center = self.centertmp
class TestDelivery(TwistedTestCase):
    """More delivery testing."""
    @inlineCallbacks
    def setUp(self):
        """Set up test."""
        yield super(TestDelivery, self).setUp()
        self.mocker = Mocker()
        self.fake_reactor = DummyReactor()
        self.content = self.mocker.mock()
        self.node_owner_id = 1
        self.node_uuid = uuid.uuid4()
        self.node_hash = "hash:blah"
        self.owner_id = 0
        self.free_bytes = 0
        self.node_volume_id = uuid.uuid4()
        self.content_node = self.mocker.mock()
        content_class = lambda _: self.content
        MetricsConnector.register_metrics("sli", instance=ExtendedMetrics())
        MetricsConnector.register_metrics("root", instance=ExtendedMetrics())
        MetricsConnector.register_metrics("user", instance=ExtendedMetrics())
        self.factory = StorageServerFactory(
            s3_host=None, s3_port=None, s3_key=None, s3_ssl=False,
            s3_secret=None, content_class=content_class,
            reactor=self.fake_reactor)

    @inlineCallbacks
    def tearDown(self):
        """Tear down test."""
        MetricsConnector.unregister_metrics()
        try:
            self.mocker.verify()
        finally:
            yield super(TestDelivery, self).tearDown()
            self.mocker.restore()

    @inlineCallbacks
    def test_new_volume_generation_ok(self):
        """Test new volume generation delivery ok."""
        user = self.mocker.mock()
        expect(self.content.get_user_by_id('user_id')
               ).count(1).result(succeed(user))
        expect(user.broadcast).count(1).result(lambda *a, **kw: None)

        # test
        self.mocker.replay()
        notif = VolumeNewGeneration('user_id', 'vol_id', 23)
        yield self.factory.deliver_volume_new_generation(notif)

    @inlineCallbacks
    def test_new_volume_generation_not_connected(self):
        """Test new volume generation delivery for a not connected user."""
        expect(self.content.get_user_by_id('user_id')
               ).count(1).result(succeed(None))

        # test
        self.mocker.replay()
        notif = VolumeNewGeneration('user_id', 'vol_id', 23)
        yield self.factory.deliver_volume_new_generation(notif)

    @inlineCallbacks
    def test_new_volume_generation_broadcasting_message(self):
        """Test new volume generation delivery with correct message."""
        deferred = defer.Deferred()
        protocol = self.mocker.mock()

        def test(resp, filter):
            """Check that the broadcast message info is ok."""
            self.assertEqual(resp.type,
                             protocol_pb2.Message.VOLUME_NEW_GENERATION)
            self.assertEqual(resp.volume_new_generation.volume, 'vol_id')
            self.assertEqual(resp.volume_new_generation.generation, 66)

            # other session, and generations
            self.mocker.reset()
            expect(protocol.session_id).count(0, 1).result(uuid.uuid4())
            expect(protocol.working_caps).count(0, 1).result(['generations'])
            self.mocker.replay()
            self.assertTrue(filter(protocol))

            # same session, and generations
            self.mocker.reset()
            expect(protocol.session_id).count(0, 1).result(session_id)
            expect(protocol.working_caps).count(0, 1).result(['generations'])
            self.mocker.replay()
            self.assertFalse(filter(protocol))

            deferred.callback(None)

        user = self.mocker.mock()
        expect(self.content.get_user_by_id('user_id')
               ).count(1).result(succeed(user))
        expect(user.broadcast).result(test)

        # test
        self.mocker.replay()
        session_id = uuid.uuid4()
        notif = VolumeNewGeneration('user_id', 'vol_id', 66, session_id)
        yield self.factory.deliver_volume_new_generation(notif)
        yield deferred

    @inlineCallbacks
    def test_share_accepted_broadcasting_message(self):
        """Test that ShareAccepted gets broadcast to both users properly."""
        deferred_from = defer.Deferred()
        deferred_to = defer.Deferred()
        share_id = uuid.uuid4()
        from_user = 1
        to_user = 2
        root_id = uuid.uuid4()

        def test_from(resp, filter):
            """Check that the broadcast message info is ok."""
            self.assertEqual(resp.type,
                             protocol_pb2.Message.SHARE_ACCEPTED)
            self.assertEqual(resp.share_accepted.share_id, str(share_id))
            self.assertEqual(resp.share_accepted.answer,
                             protocol_pb2.ShareAccepted.YES)
            deferred_from.callback(None)

        def test_to(resp, filter):
            """Check that the broadcast message info is ok."""
            self.assertEqual(resp.type,
                             protocol_pb2.Message.VOLUME_CREATED)
            self.assertEqual(resp.volume_created.share.share_id, str(share_id))
            self.assertEqual(resp.volume_created.share.subtree, str(root_id))
            self.assertEqual(resp.volume_created.share.direction,
                             protocol_pb2.Shares.TO_ME)
            deferred_to.callback(None)

        user = self.mocker.mock()
        user2 = self.mocker.mock()

        for i in range(2):
            expect(
                self.content.get_user_by_id(from_user)).result(succeed(user))
            expect(
                self.content.get_user_by_id(to_user)).result(succeed(user2))

        expect(user.id).count(2).result(1)
        expect(user.broadcast).count(1).result(test_from)
        expect(user.username).count(1).result(u"username")
        expect(user.visible_name).count(1).result(u"username")
        expect(user2.id).count(2).result(2)
        expect(user2.broadcast).count(1).result(test_to)

        # test
        self.mocker.replay()
        notif_to = ShareAccepted(share_id, u"name", root_id, from_user,
                                 to_user, Share.VIEW, True)
        notif_from = ShareAccepted(share_id, u"name", root_id, from_user,
                                   to_user, Share.VIEW, True)
        yield self.factory.deliver_share_accepted(notif_to,
                                                  recipient_id=to_user)
        yield self.factory.deliver_share_accepted(notif_from,
                                                  recipient_id=from_user)
        yield deferred_from
        yield deferred_to

    @inlineCallbacks
    def test_share_accepted_broadcasting_no_from(self):
        """Test ShareAccepted when the from user isn't present."""
        deferred_to = defer.Deferred()
        share_id = uuid.uuid4()
        to_user = 1
        from_user = 2
        root_id = uuid.uuid4()

        def test_to(resp, filter):
            """Check that the broadcast message info is ok."""
            self.assertEqual(resp.type,
                             protocol_pb2.Message.VOLUME_CREATED)
            self.assertEqual(resp.volume_created.share.share_id, str(share_id))
            self.assertEqual(resp.volume_created.share.subtree, str(root_id))
            self.assertEqual(resp.volume_created.share.direction,
                             protocol_pb2.Shares.TO_ME)
            deferred_to.callback(None)

        user = self.mocker.mock()
        user2 = self.mocker.mock()
        for i in range(2):
            expect(self.content.get_user_by_id(from_user)
                   ).result(succeed(None))
            expect(self.content.get_user_by_id(to_user)).result(succeed(user2))
        expect(self.content.get_user_by_id(from_user, required=True)
               ).result(succeed(user))
        expect(user.username).count(1).result(u"username")
        expect(user.visible_name).count(1).result(u"username")
        expect(user2.id).count(2).result(2)
        expect(user2.broadcast).count(1).result(test_to)
        # test
        self.mocker.replay()
        notif = ShareAccepted(share_id, u"name", root_id, from_user, to_user,
                              Share.VIEW, True)
        notif2 = ShareAccepted(share_id, u"name", root_id, from_user, to_user,
                               Share.VIEW, True)
        yield self.factory.deliver_share_accepted(notif,
                                                  recipient_id=from_user)
        yield self.factory.deliver_share_accepted(notif2, recipient_id=to_user)
        yield deferred_to
Exemple #32
0
class TestResolveOGUIDView(MockTestCase, TestCase):

    def setUp(self):
        super(TestResolveOGUIDView, self).setUp()

        self.testcase_mocker = Mocker()
        expect = Expect(self.testcase_mocker)

        sm = getGlobalSiteManager()
        siteroot = self.create_dummy(
            id='siteroot',
            getSiteManager=lambda: sm)
        alsoProvides(siteroot, IPloneSiteRoot)
        setSite(siteroot)

        registry = self.testcase_mocker.mock()
        self.mock_utility(registry, IRegistry)

        proxy = self.create_dummy(client_id='client1')
        expect(registry.forInterface(IClientConfiguration)).result(
            proxy).count(0, None)

        self.testcase_mocker.replay()

    def tearDown(self):
        setSite(None)

        self.testcase_mocker.restore()
        self.testcase_mocker.verify()

    def test_check_permissions_fails_with_nobody(self):
        mtool = self.mocker.mock()
        self.expect(mtool.getAuthenticatedMember()).result(
            SpecialUsers.nobody)
        self.mock_tool(mtool, 'portal_membership')

        self.replay()

        view = ResolveOGUIDView(object(), object())

        with TestCase.assertRaises(self, Unauthorized):
            view._check_permissions(object())


    def test_check_permission_fails_without_view_permission(self):
        obj = self.mocker.mock()

        mtool = self.mocker.mock()
        self.expect(mtool.getAuthenticatedMember().checkPermission(
                'View', obj)).result(False)
        self.mock_tool(mtool, 'portal_membership')

        self.replay()

        view = ResolveOGUIDView(object(), object())

        with TestCase.assertRaises(self, Unauthorized):
            view._check_permissions(obj)

    def test_redirect_to_other_client(self):
        oguid = 'client2:5'
        client2_url = 'http://otherhost/client2'
        target_url = '%s/@@resolve_oguid?oguid=%s' % (client2_url, oguid)

        info = self.mocker.mock()
        self.mock_utility(info, IContactInformation)
        self.expect(info.get_client_by_id('client2').public_url).result(
            client2_url)

        request = self.mocker.mock()
        self.expect(request.get('oguid')).result('client2:5')
        self.expect(request.RESPONSE.redirect(target_url)).result('REDIRECT')

        self.replay()

        view = ResolveOGUIDView(object(), request)
        self.assertEqual(view.render(), 'REDIRECT')

    def test_redirect_if_correct_client(self):
        absolute_url = 'http://anyhost/client1/somedossier'
        obj = self.mocker.mock()
        self.expect(obj.absolute_url()).result(absolute_url)

        context = object()

        request = self.mocker.mock()
        self.expect(request.get('oguid')).result('client1:444')
        self.expect(request.RESPONSE.redirect(absolute_url)).result(
            'redirected')

        intids = self.mocker.mock()
        self.expect(intids.getObject(444)).result(obj)
        self.mock_utility(intids, IIntIds)

        mtool = self.mocker.mock()
        self.expect(mtool.getAuthenticatedMember().checkPermission(
                'View', obj)).result(True)
        self.mock_tool(mtool, 'portal_membership')

        self.replay()

        view = ResolveOGUIDView(context, request)
        self.assertEqual(view.render(), 'redirected')
Exemple #33
0
class AccountGroupTests (TestCase):
    fixtures = ['customer_tests.json', ]

    def setUp(self):
        self.mocker = Mocker()

    def tearDown(self):
        self.mocker.restore() 

    def test_process_from_queue_without_messages (self):
        account = Account.objects.get(name = 'AccessWithPurchase')
        ag = AccountGroup (account)
        retcode = ag.process_from_queue()
        self.assertEqual (retcode, False)

    def test_process_from_queue_with_messages (self):
        ev = threading.Event()
        mock_Event = self.mocker.replace(ev)

        mock_Event ()
        self.mocker.result (mock_Event)
        self.mocker.count (3)

        mock_Event.set()
        self.mocker.result (True)
        self.mocker.count (3)

        mock_Queue = self.mocker.replace('Queue')
        mock_Queue()
        self.mocker.result (True)
        self.mocker.count (3)

        self.mocker.replay ()

        ms = MessageScheduler ({})
        processor =  SMSProcessor ()
        for i in range (0, 10):
            processor.sendSMS (WebServiceTests.username, WebServiceTests.password,
                'AccessWithPurchase', 'foo', 'bar', datetime.datetime.now())
        account = Account.objects.get(name = 'AccessWithPurchase')
        ag = AccountGroup (account)
        ms.account_groups[account.id] = ag
        ms.enqueue_messages ()
        retcode = ag.process_from_queue()
        self.assertEqual (retcode, True)
        self.assertEqual (ag.work.qsize (),  10)

    def test_create_accountThreads (self):
        dummy_AccountThread = self.mocker.replace('singularmsd.AccountThread')

        dummy_AccountThread(ANY, ANY)
        self.mocker.result (dummy_AccountThread)
        self.mocker.count (4)

        dummy_AccountThread.num_threads
        self.mocker.result (4)
        self.mocker.count (4)

        dummy_AccountThread.start ()
        self.mocker.result (True)
        self.mocker.count (4)
        self.mocker.replay ()

        account = Account.objects.get(name = 'AccessWithPurchase')
        ag = AccountGroup (account)
        ag.create_accountThreads()
        self.assertEqual (len(ag.threads), 4)
class TestBuildJunctionDict(TestCase):
    def setUp(self):
        self.mocker = Mocker()
        self.junctions = self.mocker.mock()
        self.junction1 = self.mocker.mock()
        self.junction2 = self.mocker.mock()
        self.junction3 = self.mocker.mock()

        self.junction1.start
        self.mocker.result(100)
        self.mocker.count(0, None)

        self.junction1.end
        self.mocker.result(200)
        self.mocker.count(0, None)

        self.junction1.chrom
        self.mocker.result("chr1")
        self.mocker.count(0, None)

        self.junction2.start
        self.mocker.result(100)
        self.mocker.count(0, None)

        self.junction2.end
        self.mocker.result(300)
        self.mocker.count(0, None)

        self.junction2.chrom
        self.mocker.result("chr1")
        self.mocker.count(0, None)

        self.junction3.start
        self.mocker.result(200)
        self.mocker.count(0, None)

        self.junction3.end
        self.mocker.result(300)
        self.mocker.count(0, None)

        self.junction3.chrom
        self.mocker.result("chr1")
        self.mocker.count(0, None)

    def test_no_overlaps(self):

        self.junctions.iteritems()
        self.mocker.generate([("chr1:100-200", self.junction1)])

        self.mocker.replay()

        self.container = jc.buildJunctionDict(self.junctions)
        self.assertEqual(self.container["chr1:100"], [200])
        self.assertEqual(len(self.container), 1)

        self.mocker.restore()
        self.mocker.verify()

    def test_overlaps(self):

        self.junctions.iteritems()
        self.mocker.generate([("chr1:100-200", self.junction1), ("chr1:100-300", self.junction2)])

        self.mocker.replay()
        self.container = jc.buildJunctionDict(self.junctions)
        self.assertEqual(self.container["chr1:100"], [200, 300])
        self.assertEqual(len(self.container), 1)

        self.mocker.restore()
        self.mocker.verify()

    def test_multiple_junctions_with_overlaps(self):
        self.junctions.iteritems()
        self.mocker.generate(
            [("chr1:100-200", self.junction1), ("chr1:100-300", self.junction2), ("chr1:200-300", self.junction3)]
        )

        self.mocker.replay()
        self.container = jc.buildJunctionDict(self.junctions)
        self.assertEqual(self.container["chr1:100"], [200, 300])
        self.assertEqual(self.container["chr1:200"], [300])
        self.assertEqual(len(self.container), 2)

        self.mocker.restore()
        self.mocker.verify()
class NotificationTestCase(TestCase):
    """Test the Messaging API."""

    @defer.inlineCallbacks
    def setUp(self):
        yield super(NotificationTestCase, self).setUp()
        self.mocker = Mocker()

    @defer.inlineCallbacks
    def tearDown(self):
        yield super(NotificationTestCase, self).tearDown()
        self.mocker.restore()
        self.mocker.verify()

    def _set_up_mock_notify(self, title, message, icon):
        """Set up the mock_notify expectations."""
        mock_notify = self.mocker.replace('gi.repository.Notify')
        mock_notify.init(FAKE_APP_NAME)
        mock_notify.Notification.new(title, message, icon)

    def test_send_notification(self):
        """On notification, pynotify receives the proper calls."""
        self._set_up_mock_notify(FAKE_TITLE, FAKE_MESSAGE, ICON_NAME)
        mock_notification = self.mocker.mock()
        self.mocker.result(mock_notification)
        mock_notification.set_hint_int32('transient', int(True))
        mock_notification.show()
        self.mocker.replay()
        Notification(FAKE_APP_NAME).send_notification(FAKE_TITLE, FAKE_MESSAGE)

    def test_send_two_notifications(self):
        """On notification, pynotify receives the proper calls."""
        self._set_up_mock_notify(FAKE_TITLE, FAKE_MESSAGE, ICON_NAME)
        mock_notification = self.mocker.mock()
        self.mocker.result(mock_notification)
        mock_notification.set_hint_int32('transient', int(True))
        mock_notification.show()
        mock_notification.update(
            FAKE_TITLE + '2', FAKE_MESSAGE + '2', ICON_NAME)
        mock_notification.set_hint_int32('transient', int(True))
        mock_notification.show()
        self.mocker.replay()
        notifier = Notification(FAKE_APP_NAME)
        notifier.send_notification(FAKE_TITLE, FAKE_MESSAGE)
        notifier.send_notification(FAKE_TITLE + '2', FAKE_MESSAGE + '2')

    def test_send_notification_with_icon(self):
        """On notification with icon, pynotify receives the proper calls."""
        self._set_up_mock_notify(FAKE_TITLE, FAKE_MESSAGE, FAKE_ICON)
        mock_notification = self.mocker.mock()
        self.mocker.result(mock_notification)
        mock_notification.set_hint_int32('transient', int(True))
        mock_notification.show()
        self.mocker.replay()
        Notification(FAKE_APP_NAME).send_notification(
            FAKE_TITLE, FAKE_MESSAGE, FAKE_ICON)

    def test_append_notification(self):
        """On notification append, pynotify receives the proper calls."""
        self._set_up_mock_notify(FAKE_TITLE, FAKE_MESSAGE, ICON_NAME)
        mock_notification = self.mocker.mock()
        self.mocker.result(mock_notification)
        mock_notification.set_hint_string('x-canonical-append', '')
        mock_notification.set_hint_int32('transient', int(True))
        mock_notification.show()
        mock_notification.update(FAKE_TITLE, FAKE_APPENDAGE, ICON_NAME)
        mock_notification.set_hint_string('x-canonical-append', '')
        mock_notification.set_hint_int32('transient', int(True))
        mock_notification.show()
        self.mocker.replay()
        notifier = Notification(FAKE_APP_NAME)
        notifier.send_notification(FAKE_TITLE, FAKE_MESSAGE, append=True)
        notifier.send_notification(FAKE_TITLE, FAKE_APPENDAGE, append=True)
class TestAvailableListingViewsVocabulary(MockTestCase):

    def setUp(self):
        super(TestAvailableListingViewsVocabulary, self).setUp()
        self.testcase_mocker = Mocker()

        provideUtility(tile.availableListingViewsVocabulary,
                       name= u"Available Listing Views")

        # mock the registry, so that we have a static
        # configuration in our tests. we test functionality,
        # not configuration..
        proxy = self.testcase_mocker.mock()
        proxy.listing_views
        self.testcase_mocker.result({
                'listing': 'List contents',
                'summary': 'Summarize contents'})
        self.testcase_mocker.count(0, None)

        registry = self.testcase_mocker.mock()
        provideUtility(provides=IRegistry, component=registry)
        registry.forInterface(IContentListingTileSettings)
        self.testcase_mocker.result(proxy)
        self.testcase_mocker.count(0, None)

        # we need to register the vocabulary utility in the
        # vocabulary registry manually at this point:
        vocabulary_registry = getVocabularyRegistry()
        try:
            vocabulary_registry.get(None, u"Available Listing Views")
        except VocabularyRegistryError:
            factory = getUtility(IVocabularyFactory,
                                 name=u"Available Listing Views")
            vocabulary_registry.register(u"Available Listing Views", factory)

        self.testcase_mocker.replay()

    def tearDown(self):
        self.testcase_mocker.verify()
        self.testcase_mocker.restore()
        super(TestAvailableListingViewsVocabulary, self).tearDown()

    def test_returns_vocabulary_with_terms(self):
        vocabulary_registry = getVocabularyRegistry()
        vocabulary = vocabulary_registry.get(None, u"Available Listing Views")

        self.assertTrue(IVocabularyTokenized.providedBy(vocabulary))
        self.assertEqual(len(vocabulary), 2)

    def test_keys_and_labels(self):
        vocabulary_registry = getVocabularyRegistry()
        vocabulary = vocabulary_registry.get(None, u"Available Listing Views")

        terms = list(vocabulary)

        self.assertEqual(terms[0].token, 'listing')
        self.assertEqual(terms[0].value, 'listing')
        self.assertEqual(terms[0].title, 'List contents')

        self.assertEqual(terms[1].token, 'summary')
        self.assertEqual(terms[1].value, 'summary')
        self.assertEqual(terms[1].title, 'Summarize contents')
Exemple #37
0
class FreezedClock(object):
    """Freeze the clock.

    Supported:
      time.time()
      datetime.now()
      datetime.utcnow()
    """

    def __init__(self, new_now, ignore_modules):
        self.new_now = new_now
        self.ignore_modules = ignore_modules or ()

    def forward(self, **kwargs):
        self.new_now += timedelta(**kwargs)
        self.__exit__(None, None, None)
        self.__enter__()

    def backward(self, **kwargs):
        self.new_now -= timedelta(**kwargs)
        self.__exit__(None, None, None)
        self.__enter__()

    def __enter__(self):
        if type(self.new_now) != datetime:
            raise ValueError(
                'The freeze_date argument must be a datetime.datetime'
                ' instance, got %s' % type(self.new_now).__name__)

        def is_caller_ignored(frames_up):
            """Inspect the stack for n frames up for a blacklisted caller.

            Stack inspection is very expensive, so we skip this per default as
            we hit this on every access to a frozen time. A fine case example
            of catastrophic access density is a bunch of Plone workflow event
            handlers firing off a Dexterity ``createdInContainer`` event.
            """
            if self.ignore_modules:
                caller_frame = inspect.stack()[frames_up][0]
                module_name = inspect.getmodule(caller_frame).__name__
                return module_name in self.ignore_modules
            return False

        self.mocker = Mocker()

        # Replace "datetime.datetime.now" classmethod
        self._previous_datetime_now = datetime.now

        # Replace "datetime.datetime.utcnow" classmethod
        self._previous_datetime_utcnow = datetime.utcnow

        @classmethod
        def freezed_now(klass, tz=None):
            if is_caller_ignored(2):
                return self._previous_datetime_now(tz)

            if not tz:
                return self.new_now.replace(tzinfo=None)

            # Time was frozen to a naive DT, but a TZ-aware time is being requested
            # from now(). We assume the same TZ for freezing as requested by now.
            elif self.new_now.tzinfo is None:
                return self.new_now.replace(tzinfo=tz)

            elif self.new_now.tzinfo != tz:
                return tz.normalize(self.new_now.astimezone(tz))

            return self.new_now

        @classmethod
        def freezed_utcnow(klass):
            if is_caller_ignored(2):
                return self._previous_datetime_utcnow()

            if self.new_now.tzinfo and self.new_now.tzinfo != pytz.UTC:
                return pytz.UTC.normalize(self.new_now.astimezone(pytz.UTC))
            return self.new_now

        curse(datetime, 'now', freezed_now)
        curse(datetime, 'utcnow', freezed_utcnow)

        # Replace "time.time" function
        # datetime.timetuple does not contain any timezone information, so this
        # information will be lost. Moreover time.time should be in the system
        # timezone, so we need to correct for the offset of timezone used in
        # the freezing relative to the system timezone.
        local_tz = pytz.timezone(tzname[0])
        if self.new_now.tzinfo is None:
            new_time = mktime(self.new_now.timetuple())
        else:
            new_time = mktime(self.new_now.tzinfo.normalize(self.new_now + local_tz._utcoffset).utctimetuple())
        time_class = self.mocker.replace('time.time')

        def frozen_time():
            if is_caller_ignored(7):
                if self.new_now.tzinfo is None:
                    return mktime(self._previous_datetime_now().timetuple())
                else:
                    return mktime(self._previous_datetime_now().tzinfo.normalize(
                        self.new_now + local_tz._utcoffset).utctimetuple())
            return new_time

        expect(time_class()).call(frozen_time).count(0, None)

        self.mocker.replay()
        return self

    def __exit__(self, exc_type, exc_value, traceback):
        self.mocker.restore()
        self.mocker.verify()
        curse(datetime, 'now', self._previous_datetime_now)
        curse(datetime, 'utcnow', self._previous_datetime_utcnow)
class SMSToolEventHandlerTests (unittest.TestCase):
 
    def setUp(self):
        self.mocker = Mocker()
        self.event_handler = smstool_event_handler.SmsToolEventHandler ([])
 
    def tearDown(self):
        self.mocker.restore()

    def test_parse_sms_ok_files (self):
        for file in glob.glob ('./sms_test_files/GSM*'):
            self.event_handler.headers = {}
            self.event_handler.text = ''
            self.event_handler.filename = file
            self.event_handler.parse_sms_file ()
            if self.event_handler.text == '':
                self.fail ("Coud not parse %s correctly" % file)

    def test_wrong_files (self):
        for file in glob.glob ('./sms_test_files/BADGSM*'):
            self.event_handler.filename = file
            self.assertRaises (ValueError, self.event_handler.parse_sms_file)

    def test_input_parser_wrong_param_numbers (self):
        wrong_argv = ['one', 'two', 'three', 'four', 'five' ]
        self.event_handler.argv = wrong_argv
        self.assertRaises (ValueError, self.event_handler.input_parser)
        wrong_argv = ['one', 'two', 'three']
        self.event_handler.argv = wrong_argv
        self.assertRaises (ValueError, self.event_handler.input_parser)

    def test_input_parser_wrong_event (self):
        wrong_argv = ['smstool_event_handler', 'WrongEvent', 'filename', 'id']
        self.event_handler.argv = wrong_argv
        self.assertRaises (ValueError, self.event_handler.input_parser)

    def test_input_parser_ok_event_SENT (self):
        argv = ['smstool_event_handler', 'SENT', 'filename', 'id']
        self.event_handler.argv = argv
        self.event_handler.input_parser()
        self.assertEqual (self.event_handler.event, 'SENT')
        self.assertEqual (self.event_handler.filename, 'filename')
        self.assertEqual (self.event_handler.message_id, 'id')

    def test_input_parser_ok_event_FAILED (self):
        argv = ['smstool_event_handler', 'FAILED', 'filename', 'id']
        self.event_handler.argv = argv
        self.event_handler.input_parser()
        self.assertEqual (self.event_handler.event, 'FAILED')
        self.assertEqual (self.event_handler.filename, 'filename')
        self.assertEqual (self.event_handler.message_id, 'id')

    def test_input_parser_ok_event_RECEIVED (self):
        argv = ['smstool_event_handler', 'RECEIVED', 'filename']
        self.event_handler.argv = argv
        self.event_handler.input_parser()
        self.assertEqual (self.event_handler.event, 'RECEIVED')
        self.assertEqual (self.event_handler.filename, 'filename')

    def test_input_parser_ok_event_REPORT (self):
        argv = ['smstool_event_handler', 'REPORT', 'filename', 'id']
        self.event_handler.argv = argv
        self.event_handler.input_parser()
        self.assertEqual (self.event_handler.event, 'REPORT')
        self.assertEqual (self.event_handler.filename, 'filename')
        self.assertEqual (self.event_handler.message_id, 'id')

    def test_load_config_ok (self):
        self.event_handler.config_file = './smstool_event_handler.conf'
        self.event_handler.load_config()
        self.assertEqual ('pepito', self.event_handler.username)
        self.assertEqual ('password', self.event_handler.password)
        self.assertEqual ('TestAccess', self.event_handler.access)
        self.assertEqual ('http://localhost:8080/', self.event_handler.server_url)

    def test_load_config_ko (self):
        self.event_handler.config_file = './smstool_event_handler_test_wrong.conf'
        self.assertRaises (ValueError, self.event_handler.load_config)

    def test_load_config_not_found (self):
        self.event_handler.config_file = './no_config_file.conf'
        self.assertRaises (ValueError, self.event_handler.load_config)

    def test_dispatch_SENT_event (self):
        dummy_SOAPpy= self.mocker.replace('SOAPpy')
        dummy_SOAPpy.SOAPProxy (ANY)
        self.mocker.count (1)
        self.mocker.result (True)
        self.mocker.replay ()

        argv = ['smstool_event_handler', 'SENT', './sms_test_files/singularmszhwabc.sms', '1JyoAT']
        event_handler = smstool_event_handler.SmsToolEventHandler (argv, './smstool_event_handler.conf')
        event_handler.dispatch ()
        self.mocker.verify()


    def test_dispatch_RECEIVED_event (self):
        dummy_server = self.mocker.mock ()
        dummy_SOAPpy= self.mocker.replace('SOAPpy')
        dummy_SOAPpy.SOAPProxy (ANY)
        self.mocker.count (1)
        self.mocker.result (dummy_server)

        dummy_server.receiveSMS(ARGS, KWARGS)
        self.mocker.count (1)
        self.mocker.result (True)
        self.mocker.replay ()

        argv = ['smstool_event_handler', 'RECEIVED', './sms_test_files/GSM1.M1h8yB']
        event_handler = smstool_event_handler.SmsToolEventHandler (argv, './smstool_event_handler.conf')
        event_handler.dispatch ()
        self.mocker.verify()

    def test_dispatch_FAILED_event (self):
        dummy_SOAPpy= self.mocker.replace('SOAPpy')
        dummy_SOAPpy.SOAPProxy (ANY)
        self.mocker.count (1)
        self.mocker.result (True)
        self.mocker.replay ()

        argv = ['smstool_event_handler', 'FAILED', './sms_test_files/GSM1.M1h8yB', '1JyoAT']
        event_handler = smstool_event_handler.SmsToolEventHandler (argv, './smstool_event_handler.conf')
        event_handler.dispatch ()
        self.mocker.verify()

    def test_dispatch_REPORT_event (self):
        dummy_SOAPpy= self.mocker.replace('SOAPpy')
        dummy_SOAPpy.SOAPProxy (ANY)
        self.mocker.count (1)
        self.mocker.result (True)
        self.mocker.replay ()

        argv = ['smstool_event_handler', 'REPORT', './sms_test_files/GSM1.M1h8yB', '1JyoAT']
        event_handler = smstool_event_handler.SmsToolEventHandler (argv, './smstool_event_handler.conf')
        event_handler.dispatch ()
        self.mocker.verify()
Exemple #39
0
class MessageSchedulerTests (TestCase):
    fixtures = ['customer_tests.json', ]

    def setUp(self):
        self.mocker = Mocker()

    def tearDown(self):
        self.mocker.restore() 

    def test_create_account_groups_without_accounts (self):
        dummy_Account = self.mocker.replace('accounting.models.Account')

        dummy_Account.objects.all ()
        self.mocker.result ([])
        self.mocker.count (2)

        self.mocker.replay ()

        ms = MessageScheduler ({})
        ms.create_account_groups()

    def test_create_account_groups_with_accounts (self):
        dummy_AccountGroup = self.mocker.replace('singularmsd.AccountGroup')

        dummy_AccountGroup(ANY)
        self.mocker.result (dummy_AccountGroup)
        self.mocker.count (3)

        dummy_AccountGroup.account.id
        self.mocker.result (1)
        self.mocker.count (3)

        dummy_AccountGroup.start ()
        self.mocker.result (True)
        self.mocker.count (3)
        self.mocker.replay ()

        ms = MessageScheduler ({})
        ms.create_account_groups()

    def test_enqueue_messages_without_messages (self):
        dummy_AccountGroup = self.mocker.replace('singularmsd.AccountGroup')

        dummy_AccountGroup(ANY)
        self.mocker.result (dummy_AccountGroup)
        self.mocker.count (3)

        dummy_AccountGroup.account.id
        self.mocker.result (1)
        self.mocker.count (3)

        dummy_AccountGroup.start ()
        self.mocker.result (True)
        self.mocker.count (3)

        self.mocker.replay ()

        ms = MessageScheduler ({})
        ms.create_account_groups()
        ms.enqueue_messages ()

    def test_enqueue_messages_with_messages_mocker (self):
        dummy_AccountGroup = self.mocker.replace('singularmsd.AccountGroup')

        dummy_AccountGroup.account.id
        self.mocker.result (1)
        self.mocker.count (6)

        dummy_AccountGroup(ANY)
        self.mocker.result (dummy_AccountGroup)
        self.mocker.count (6)

        dummy_AccountGroup.start ()
        self.mocker.result (True)
        self.mocker.count (6)

        dummy_AccountGroup.event.set ()
        self.mocker.result (True)
        self.mocker.count (20)

        self.mocker.replay ()

        ms = MessageScheduler ({})
        ms.create_account_groups()
        processor =  SMSProcessor ()
        for i in range (0, 10):
            processor.sendSMS (WebServiceTests.username, WebServiceTests.password,
                'AccessWithPurchase', 'foo', 'bar', datetime.datetime.now())
        for i in range (0, 10):
            processor.sendSMSToChannel (WebServiceTests.username, WebServiceTests.password,
                'AccessWithPurchase', 'TestChannel', 'bar', datetime.datetime.now())
        ms.enqueue_messages ()
class Test_unit_sassubset_Mask_get_dihedral_subset_mask(MockerTestCase): 

   def mock_up(self, Cls_ptch, mthd, mocker, result=None, mmin=0, mmax=None):
      methodToCall = getattr(Cls_ptch,mthd)
      methodToCall(ARGS)
      mocker.result(result)
      mocker.count(mmin, mmax)

   def mock_up_get_dihedral_subset_mask(self, Cls, mocker, natoms, name, resid):
      Cls_ptch = mocker.patch(Cls)
      self.mock_up(Cls_ptch, 'natoms', mocker, natoms)
      self.mock_up(Cls_ptch, 'name', mocker, name)
      self.mock_up(Cls_ptch, 'resid', mocker, resid)
      mocker.replay()

   def assert_list_almost_equal(self,a,b,places=5):
        if (len(a)!=len(b)):
           raise TypeError
        else:
           for i in range(len(a)):
              if isinstance(a[i],(int,float,numpy.generic)):
                 if (numpy.isnan(a[i]) and numpy.isnan(b[i])): continue
                 self.assertAlmostEqual(a[i],b[i],places)
              else:
                 self.assert_list_almost_equal(a[i],b[i],places)
 

   def setUp(self):
      self.m=Mocker()
      self.o=sasmol.SasMol(0)


   def test_single_residue_nomask(self):
      '''
      test for a single residue
      nomask for that residue
      '''
      natoms=5
      name=['N','CA','C','O','CB']
      resid=[1]*natoms
      resid=numpy.array(resid,numpy.long)
      mtype=0
      #
      flexible_residues=[]
      #
      expected_farray = []
      #
      self.mock_up_get_dihedral_subset_mask(self.o, self.m, natoms, name, resid)
      farray = self.o.get_dihedral_subset_mask(flexible_residues,mtype)
      #
      print 'result_mask:\n', list(farray), '\nexpected_mask:\n',expected_farray
      self.assert_list_almost_equal(farray, expected_farray)


   def test_single_residue_mask(self):
      '''
      test for a single residue
      mask that residue
      '''
      natoms=5
      name=['N','CA','C','O','CB']
      resid=[1]*natoms
      resid=numpy.array(resid,numpy.long)
      mtype=0
      #
      flexible_residues=[1]
      #
      expected_farray = [[1,1,1,0,0]]
      #
      self.mock_up_get_dihedral_subset_mask(self.o, self.m, natoms, name, resid)
      farray = self.o.get_dihedral_subset_mask(flexible_residues,mtype)
      #
      print 'result_mask:\n', list(farray), '\nexpected_mask:\n',expected_farray
      self.assert_list_almost_equal(farray, expected_farray)


   def test_three_residues_nomask(self):
      '''
      test for three residue
      nomask for all residues
      '''
      natoms=18
      name=['N','CA','C','O','CB','CG']*3
      resid=[1]*6+[2]*6+[3]*6
      resid=numpy.array(resid,numpy.long)
      mtype=0
      #
      flexible_residues=[]
      #
      expected_farray = []
      #
      self.mock_up_get_dihedral_subset_mask(self.o, self.m, natoms, name, resid)
      farray = self.o.get_dihedral_subset_mask(flexible_residues,mtype)
      #
      print 'result_mask:\n', list(farray), '\nexpected_mask:\n',expected_farray
      self.assert_list_almost_equal(farray, expected_farray)


   def test_three_residues_mask_first_one(self):
      '''
      test for three residues
      mask the first residue
      '''
      natoms=18
      name=['N','CA','C','O','CB','CG']*3
      resid=[1]*6+[2]*6+[3]*6
      resid=numpy.array(resid,numpy.long)
      mtype=0
      #
      flexible_residues=[1]
      #
      expected_farray = [[1,1,1,0,0,0]+[1,0,0,0,0,0]+[0]*6]
      #
      self.mock_up_get_dihedral_subset_mask(self.o, self.m, natoms, name, resid)
      farray = self.o.get_dihedral_subset_mask(flexible_residues,mtype)
      #
      print 'result_mask:\n', list(farray), '\nexpected_mask:\n',expected_farray
      self.assert_list_almost_equal(farray, expected_farray)


   def test_three_residues_mask_second_one(self):
      '''
      test for three residues
      mask the second residue
      '''
      natoms=18
      name=['N','CA','C','O','CB','CG']*3
      resid=[1]*6+[2]*6+[3]*6
      resid=numpy.array(resid,numpy.long)
      mtype=0
      #
      flexible_residues=[2]
      #
      expected_farray = [[0,0,1,0,0,0]+[1,1,1,0,0,0]+[1,0,0,0,0,0]]
      #
      self.mock_up_get_dihedral_subset_mask(self.o, self.m, natoms, name, resid)
      farray = self.o.get_dihedral_subset_mask(flexible_residues,mtype)
      #
      print 'result_mask:\n', list(farray), '\nexpected_mask:\n',expected_farray
      self.assert_list_almost_equal(farray, expected_farray)


   def test_three_residues_mask_third_one(self):
      '''
      test for three residues
      mask the second residue
      '''
      natoms=18
      name=['N','CA','C','O','CB','CG']*3
      resid=[1]*6+[2]*6+[3]*6
      resid=numpy.array(resid,numpy.long)
      mtype=0
      #
      flexible_residues=[3]
      #
      expected_farray = [[0,0,0,0,0,0]+[0,0,1,0,0,0]+[1,1,1,0,0,0]]
      #
      self.mock_up_get_dihedral_subset_mask(self.o, self.m, natoms, name, resid)
      farray = self.o.get_dihedral_subset_mask(flexible_residues,mtype)
      #
      print 'result_mask:\n', list(farray), '\nexpected_mask:\n',expected_farray
      self.assert_list_almost_equal(farray, expected_farray)


   def test_three_residues_mask_first_second(self):
      '''
      test for three residues
      mask the first and second residue
      '''
      natoms=18
      name=['N','CA','C','O','CB','CG']*3
      resid=[1]*6+[2]*6+[3]*6
      resid=numpy.array(resid,numpy.long)
      mtype=0
      #
      flexible_residues=[1,2]
      #
      expected_farray = [[1,1,1,0,0,0]+[1,0,0,0,0,0]+[0]*6, [0,0,1,0,0,0]+[1,1,1,0,0,0]+[1,0,0,0,0,0]]

      #
      self.mock_up_get_dihedral_subset_mask(self.o, self.m, natoms, name, resid)
      farray = self.o.get_dihedral_subset_mask(flexible_residues,mtype)
      #
      print 'result_mask:\n', list(farray), '\nexpected_mask:\n',expected_farray
      self.assert_list_almost_equal(farray, expected_farray)


   def test_three_residues_mask_second_third(self):
      '''
      test for three residues
      mask the first and second residue
      '''
      natoms=18
      name=['N','CA','C','O','CB','CG']*3
      resid=[1]*6+[2]*6+[3]*6
      resid=numpy.array(resid,numpy.long)
      mtype=0
      #
      flexible_residues=[2,3]
      #
      expected_farray = [[0,0,1,0,0,0]+[1,1,1,0,0,0]+[1,0,0,0,0,0], [0,0,0,0,0,0]+[0,0,1,0,0,0]+[1,1,1,0,0,0]]

      #
      self.mock_up_get_dihedral_subset_mask(self.o, self.m, natoms, name, resid)
      farray = self.o.get_dihedral_subset_mask(flexible_residues,mtype)
      #
      print 'result_mask:\n', list(farray), '\nexpected_mask:\n',expected_farray
      self.assert_list_almost_equal(farray, expected_farray)


   def test_three_residues_mask_third_first(self):
      '''
      test for three residues
      mask the third and first residue
      '''
      natoms=18
      name=['N','CA','C','O','CB','CG']*3
      resid=[1]*6+[2]*6+[3]*6
      resid=numpy.array(resid,numpy.long)
      mtype=0
      #
      flexible_residues=[3,1]
      #
      expected_farray = [[0,0,0,0,0,0]+[0,0,1,0,0,0]+[1,1,1,0,0,0], [1,1,1,0,0,0]+[1,0,0,0,0,0]+[0]*6]

      #
      self.mock_up_get_dihedral_subset_mask(self.o, self.m, natoms, name, resid)
      farray = self.o.get_dihedral_subset_mask(flexible_residues,mtype)
      #
      print 'result_mask:\n', list(farray), '\nexpected_mask:\n',expected_farray
      self.assert_list_almost_equal(farray, expected_farray)


   def test_three_residues_mask_all(self):
      '''
      test for three residues
      mask the all residues
      '''
      natoms=18
      name=['N','CA','C','O','CB','CG']*3
      resid=[1]*6+[2]*6+[3]*6
      resid=numpy.array(resid,numpy.long)
      mtype=0
      #
      flexible_residues=[1,2,3]
      #
      expected_farray = [[1,1,1,0,0,0]+[1,0,0,0,0,0]+[0]*6, [0,0,1,0,0,0]+[1,1,1,0,0,0]+[1,0,0,0,0,0], [0,0,0,0,0,0]+[0,0,1,0,0,0]+[1,1,1,0,0,0], ]

      #
      self.mock_up_get_dihedral_subset_mask(self.o, self.m, natoms, name, resid)
      farray = self.o.get_dihedral_subset_mask(flexible_residues,mtype)
      #
      print 'result_mask:\n', list(farray), '\nexpected_mask:\n',expected_farray
      self.assert_list_almost_equal(farray, expected_farray)


   def test_500_residues_nomask(self):
      '''
      test for 500 residues
      mask no residue
      '''
      name=['N','CA','C','O','CB','CG1','CG2']
      natom=len(name)
      nres=500
      name=name*nres
      natoms=natom*nres
      resid=[x/natom for x in range(natoms)]
      resid=numpy.array(resid,numpy.long)
      mtype=0
      #
      flexible_residues=[]
      #
      expected_farray = []
      #
      self.mock_up_get_dihedral_subset_mask(self.o, self.m, natoms, name, resid)
      farray = self.o.get_dihedral_subset_mask(flexible_residues,mtype)
      #
      print 'result_mask:\n', list(farray), '\nexpected_mask:\n',expected_farray
      self.assert_list_almost_equal(farray, expected_farray)


   def test_500_residues_mask_number100(self):
      '''
      test for 500 residues
      mask the 300th residue
      '''
      name=['N','CA','C','O','CB','CG1','CG2']
      natom=len(name)
      nres=500
      name=name*nres
      natoms=natom*nres
      resid=[x/natom for x in range(natoms)]
      resid=numpy.array(resid,numpy.long)
      mtype=0
      #
      nf=300
      flexible_residues=[nf]
      #
      expected_farray = [([0]*natom)*(nf-1) + [0,0,1]+[0]*(natom-3) + [1]*3+[0]*(natom-3) + [1]+[0]*(natom-1) + ([0]*natom)*(nres-nf-2)]

      #
      self.mock_up_get_dihedral_subset_mask(self.o, self.m, natoms, name, resid)
      farray = self.o.get_dihedral_subset_mask(flexible_residues,mtype)
      #
      print 'result_mask:\n', list(farray), '\nexpected_mask:\n',expected_farray
      self.assert_list_almost_equal(farray, expected_farray)


   @skipIf(os.environ['SASSIE_LARGETEST']=='n',"I am not testing large files")
   def test_500_residues_mask_100to300(self):
      '''
      test for 500 residues
      mask the 300th residue
      '''
      name=['N','CA','C','O','CB','CG1','CG2']
      natom=len(name)
      nres=500
      name=name*nres
      natoms=natom*nres
      resid=[x/natom for x in range(natoms)]
      resid=numpy.array(resid,numpy.long)
      mtype=0
      #
      flexible_residues=range(100,300)
      #
      expected_farray=[]
      for nf in flexible_residues:
         tmp_farray = ([0]*natom)*(nf-1) + [0,0,1]+[0]*(natom-3) + [1]*3+[0]*(natom-3) + [1]+[0]*(natom-1) + ([0]*natom)*(nres-nf-2)
         expected_farray.append(tmp_farray)
      #
      self.mock_up_get_dihedral_subset_mask(self.o, self.m, natoms, name, resid)
      farray = self.o.get_dihedral_subset_mask(flexible_residues,mtype)
      #
      print 'result_mask:\n', list(farray), '\nexpected_mask:\n',expected_farray
      self.assert_list_almost_equal(farray, expected_farray)


   def test_500_residues_mask_random_10_residues(self):
      '''
      test for 500 residues
      mask the 300th residue
      '''
      name=['N','CA','C','O','CB','CG1','CG2']
      natom=len(name)
      nres=500
      name=name*nres
      natoms=natom*nres
      resid=[x/natom for x in range(natoms)]
      resid=numpy.array(resid,numpy.long)
      mtype=0
      #
      flexible_residues=[123,12,90,399,1,89,221,78,91,129]
      #
      expected_farray=[]
      for nf in flexible_residues:
         tmp_farray = ([0]*natom)*(nf-1) + [0,0,1]+[0]*(natom-3) + [1]*3+[0]*(natom-3) + [1]+[0]*(natom-1) + ([0]*natom)*(nres-nf-2)
         expected_farray.append(tmp_farray)
      #
      self.mock_up_get_dihedral_subset_mask(self.o, self.m, natoms, name, resid)
      farray = self.o.get_dihedral_subset_mask(flexible_residues,mtype)
      #
      print 'result_mask:\n', list(farray), '\nexpected_mask:\n',expected_farray
      self.assert_list_almost_equal(farray, expected_farray)


   @skipIf(os.environ['SASSIE_LARGETEST']=='n',"I am not testing large files")
   def test_500_residues_mask_all_residues(self):
      '''
      test for 500 residues
      mask the 300th residue
      '''
      name=['N','CA','C','O','CB','CG1','CG2']
      natom=len(name)
      nres=500
      name=name*nres
      natoms=natom*nres
      resid=[x/natom for x in range(natoms)]
      resid=numpy.array(resid,numpy.long)
      mtype=0
      #
      flexible_residues=range(1,nres-1)
      #
      expected_farray=[]
      for nf in flexible_residues:
         tmp_farray = ([0]*natom)*(nf-1) + [0,0,1]+[0]*(natom-3) + [1]*3+[0]*(natom-3) + [1]+[0]*(natom-1) + ([0]*natom)*(nres-nf-2)
         expected_farray.append(tmp_farray)
      #
      self.mock_up_get_dihedral_subset_mask(self.o, self.m, natoms, name, resid)
      farray = self.o.get_dihedral_subset_mask(flexible_residues,mtype)
      #
      print 'result_mask:\n', list(farray), '\nexpected_mask:\n',expected_farray
      self.assert_list_almost_equal(farray, expected_farray)

   def test_1_residues_rna_mask_none(self):
      '''
      test for rna with 1 residue, mask none
      '''
      name=['P', 'O1P', 'O2P', "O5'", "C5'", "H5'", "H5''", "C4'", "H4'", "O4'", "C1'", "H1'", 'N1', 'C6', 'H6', 'C2', 'O2', 'N3', 'H3', 'C4', 'O4', 'C5', 'H5', "C2'", "H2''", "O2'", "H2'", "C3'", "H3'", "O3'"]
      natom=len(name)
      nres=1
      name=name*nres
      natoms=natom*nres
      resid=[x/natom for x in range(natoms)]
      resid=numpy.array(resid,numpy.long)
      mtype=1
      #
      expected_farray = []
      flexible_residues=[]
      for nf in flexible_residues:
         tmp_farray = ([0]*natom)*(nf-1) + [0]*(natom-1)+[1] + [1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1] + [1]+[0]*(natom-1) + ([0]*natom)*(nres-nf-2)
         expected_farray.append(tmp_farray)
      #
      self.mock_up_get_dihedral_subset_mask(self.o, self.m, natoms, name, resid)
      farray = self.o.get_dihedral_subset_mask(flexible_residues,mtype)
      #
      print 'result_mask:\n', list(farray), '\nexpected_mask:\n',expected_farray
      self.assert_list_almost_equal(farray, expected_farray)

   def test_1_residues_rna_mask_all(self):
      '''
      test for rna with 1 residue, mask all
      '''
      name=['P', 'O1P', 'O2P', "O5'", "C5'", "H5'", "H5''", "C4'", "H4'", "O4'", "C1'", "H1'", 'N1', 'C6', 'H6', 'C2', 'O2', 'N3', 'H3', 'C4', 'O4', 'C5', 'H5', "C2'", "H2''", "O2'", "H2'", "C3'", "H3'", "O3'"]
      natom=len(name)
      nres=1
      name=name*nres
      natoms=natom*nres
      resid=[x/natom for x in range(natoms)]
      resid=numpy.array(resid,numpy.long)
      mtype=1
      #
      flexible_residues=[0]
      #
      expected_farray = numpy.array([[1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1]],numpy.long)
      #
      self.mock_up_get_dihedral_subset_mask(self.o, self.m, natoms, name, resid)
      farray = self.o.get_dihedral_subset_mask(flexible_residues,mtype)
      #
      print 'result_mask:\n', list(farray), '\nexpected_mask:\n',expected_farray
      self.assert_list_almost_equal(farray, expected_farray)

   def test_5_residues_rna_mask_all(self):
      '''
      test for rna with 5 residue, mask none
      '''
      name=['P', 'O1P', 'O2P', "O5'", "C5'", "H5'", "H5''", "C4'", "H4'", "O4'", "C1'", "H1'", 'N1', 'C6', 'H6', 'C2', 'O2', 'N3', 'H3', 'C4', 'O4', 'C5', 'H5', "C2'", "H2''", "O2'", "H2'", "C3'", "H3'", "O3'"]
      natom=len(name)
      nres=5
      name=name*nres
      natoms=natom*nres
      resid=[x/natom for x in range(natoms)]
      resid=numpy.array(resid,numpy.long)
      mtype=1
      #
      expected_farray = []
      flexible_residues=[]
      for nf in flexible_residues:
         tmp_farray = ([0]*natom)*(nf-1) + [0]*(natom-1)+[1] + [1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1] + [1,0,0,1]+[0]*(natom-4) + ([0]*natom)*(nres-nf-2)
         expected_farray.append(tmp_farray)
      #
      self.mock_up_get_dihedral_subset_mask(self.o, self.m, natoms, name, resid)
      farray = self.o.get_dihedral_subset_mask(flexible_residues,mtype)
      #
      print 'result_mask:\n', list(farray), '\nexpected_mask:\n',expected_farray
      self.assert_list_almost_equal(farray, expected_farray)

   def test_5_residues_rna_mask_random(self):
      '''
      test for rna with 5 residue, mask 2 and 3
      '''
      name=['P', 'O1P', 'O2P', "O5'", "C5'", "H5'", "H5''", "C4'", "H4'", "O4'", "C1'", "H1'", 'N1', 'C6', 'H6', 'C2', 'O2', 'N3', 'H3', 'C4', 'O4', 'C5', 'H5', "C2'", "H2''", "O2'", "H2'", "C3'", "H3'", "O3'"]
      natom=len(name)
      nres=5
      name=name*nres
      natoms=natom*nres
      resid=[x/natom for x in range(natoms)]
      resid=numpy.array(resid,numpy.long)
      mtype=1
      #
      expected_farray = []
      flexible_residues=[2,3]
      for nf in flexible_residues:
         tmp_farray = ([0]*natom)*(nf-1) + [0]*(natom-1)+[1] + [1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1] + [1,0,0,1]+[0]*(natom-4) + ([0]*natom)*(nres-nf-2)
         expected_farray.append(tmp_farray)
      #
      self.mock_up_get_dihedral_subset_mask(self.o, self.m, natoms, name, resid)
      farray = self.o.get_dihedral_subset_mask(flexible_residues,mtype)
      #
      print 'result_mask:\n', map(int,farray.tolist()[0]), '\nexpected_mask:\n',expected_farray[0]
      print 'result_mask:\n', map(int,farray.tolist()[1]), '\nexpected_mask:\n',expected_farray[1]
      self.assert_list_almost_equal(farray, expected_farray)

   def test_5_residues_rna_mask_all(self):
      '''
      test for rna with 5 residue, mask all
      '''
      name=['P', 'O1P', 'O2P', "O5'", "C5'", "H5'", "H5''", "C4'", "H4'", "O4'", "C1'", "H1'", 'N1', 'C6', 'H6', 'C2', 'O2', 'N3', 'H3', 'C4', 'O4', 'C5', 'H5', "C2'", "H2''", "O2'", "H2'", "C3'", "H3'", "O3'"]
      natom=len(name)
      nres=5
      name=name*nres
      natoms=natom*nres
      resid=[x/natom for x in range(natoms)]
      resid=numpy.array(resid,numpy.long)
      mtype=1
      #
      expected_farray = []
      flexible_residues=range(1,nres-1)
      for nf in flexible_residues:
         tmp_farray = ([0]*natom)*(nf-1) + [0]*(natom-1)+[1] + [1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1] + [1,0,0,1]+[0]*(natom-4) + ([0]*natom)*(nres-nf-2)
         expected_farray.append(tmp_farray)
      #
      self.mock_up_get_dihedral_subset_mask(self.o, self.m, natoms, name, resid)
      farray = self.o.get_dihedral_subset_mask(flexible_residues,mtype)
      #
      print 'result_mask:\n', list(farray), '\nexpected_mask:\n',expected_farray
      self.assert_list_almost_equal(farray, expected_farray)

   def test_500_residues_rna_mask_all(self):
      '''
      test for rna with 500 residue, mask all
      '''
      name=['P', 'O1P', 'O2P', "O5'", "C5'", "H5'", "H5''", "C4'", "H4'", "O4'", "C1'", "H1'", 'N1', 'C6', 'H6', 'C2', 'O2', 'N3', 'H3', 'C4', 'O4', 'C5', 'H5', "C2'", "H2''", "O2'", "H2'", "C3'", "H3'", "O3'"]
      natom=len(name)
      nres=5
      name=name*nres
      natoms=natom*nres
      resid=[x/natom for x in range(natoms)]
      resid=numpy.array(resid,numpy.long)
      mtype=1
      #
      expected_farray = []
      flexible_residues=range(1,nres-1)
      for nf in flexible_residues:
         tmp_farray = ([0]*natom)*(nf-1) + [0]*(natom-1)+[1] + [1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1] + [1,0,0,1]+[0]*(natom-4) + ([0]*natom)*(nres-nf-2)
         expected_farray.append(tmp_farray)
      #
      self.mock_up_get_dihedral_subset_mask(self.o, self.m, natoms, name, resid)
      farray = self.o.get_dihedral_subset_mask(flexible_residues,mtype)
      #
      print 'result_mask:\n', list(farray), '\nexpected_mask:\n',expected_farray
      self.assert_list_almost_equal(farray, expected_farray)


   def tearDown(self):
      self.m.restore()
      self.m.verify()
class AccountGroupTests(TestCase):
    fixtures = ["customer_tests.json"]

    username = "******"
    password = "******"

    def setUp(self):
        self.mocker = Mocker()

    def tearDown(self):
        self.mocker.restore()

    def test_process_from_queue_without_messages(self):
        account = Account.objects.get(name="AccessWithPurchase")
        ag = AccountGroup(account)
        retcode = ag.process_from_queue()
        self.assertEqual(retcode, False)

    def test_process_from_queue_with_messages(self):
        ev = threading.Event()
        mock_Event = self.mocker.replace(ev)

        mock_Event()
        self.mocker.result(mock_Event)
        self.mocker.count(0, 10)

        mock_Event.set()
        self.mocker.result(True)
        self.mocker.count(0, 10)

        mock_Queue = self.mocker.replace("Queue")
        mock_Queue()
        self.mocker.result(True)
        self.mocker.count(0, 3)

        self.mocker.replay()

        ms = MessageScheduler({})
        processor = SMSProcessor()
        for i in range(0, 10):
            processor.sendSMS(
                AccountGroupTests.username,
                AccountGroupTests.password,
                "AccessWithPurchase",
                "foo",
                "bar",
                datetime.datetime.now(),
            )
        account = Account.objects.get(name="AccessWithPurchase")
        ag = AccountGroup(account)
        ms.account_groups[account.id] = ag
        ms.enqueue_messages()
        retcode = ag.process_from_queue()
        self.assertEqual(retcode, True)
        self.assertEqual(ag.work.qsize(), 10)
        self.fail("Este test no funciona, hay que rehacerlo")
        self.mocker.verify()

    def test_create_accountThreads(self):
        dummy_AccountThread = self.mocker.replace("singularmsd.AccountThread")

        dummy_AccountThread(ANY, ANY)
        self.mocker.result(dummy_AccountThread)
        self.mocker.count(4)

        dummy_AccountThread.num_threads
        self.mocker.result(4)
        self.mocker.count(4)

        dummy_AccountThread.start()
        self.mocker.result(True)
        self.mocker.count(4)
        self.mocker.replay()

        account = Account.objects.get(name="AccessWithPurchase")
        ag = AccountGroup(account)
        ag.create_accountThreads()
        self.assertEqual(len(ag.threads), 4)
        self.mocker.verify()
Exemple #42
0
class TestDelivery(TwistedTestCase):
    """More delivery testing."""
    @inlineCallbacks
    def setUp(self):
        """Set up test."""
        yield super(TestDelivery, self).setUp()
        self.mocker = Mocker()
        self.fake_reactor = DummyReactor()
        self.content = self.mocker.mock()
        self.node_owner_id = 1
        self.node_uuid = uuid.uuid4()
        self.node_hash = "hash:blah"
        self.owner_id = 0
        self.free_bytes = 0
        self.node_shard_id = 'shard1'
        self.node_volume_id = uuid.uuid4()
        self.content_node = self.mocker.mock()
        content_class = lambda _: self.content
        MetricsConnector.register_metrics("sli", instance=ExtendedMetrics())
        MetricsConnector.register_metrics("root", instance=ExtendedMetrics())
        MetricsConnector.register_metrics("user", instance=ExtendedMetrics())
        self.factory = StorageServerFactory(s3_host=None,
                                            s3_port=None,
                                            s3_key=None,
                                            s3_ssl=False,
                                            s3_secret=None,
                                            content_class=content_class,
                                            reactor=self.fake_reactor)

    @inlineCallbacks
    def tearDown(self):
        """Tear down test."""
        MetricsConnector.unregister_metrics()
        try:
            self.mocker.verify()
        finally:
            yield super(TestDelivery, self).tearDown()
            self.mocker.restore()

    @inlineCallbacks
    def test_new_volume_generation_ok(self):
        """Test new volume generation delivery ok."""
        user = self.mocker.mock()
        expect(self.content.get_user_by_id('user_id')).count(1).result(
            succeed(user))
        expect(user.broadcast).count(1).result(lambda *a, **kw: None)

        # test
        self.mocker.replay()
        notif = VolumeNewGeneration('user_id', 'vol_id', 23)
        yield self.factory.deliver_volume_new_generation(notif)

    @inlineCallbacks
    def test_new_volume_generation_not_connected(self):
        """Test new volume generation delivery for a not connected user."""
        expect(self.content.get_user_by_id('user_id')).count(1).result(
            succeed(None))

        # test
        self.mocker.replay()
        notif = VolumeNewGeneration('user_id', 'vol_id', 23)
        yield self.factory.deliver_volume_new_generation(notif)

    @inlineCallbacks
    def test_new_volume_generation_broadcasting_message(self):
        """Test new volume generation delivery with correct message."""
        deferred = defer.Deferred()
        protocol = self.mocker.mock()

        def test(resp, filter):
            """Check that the broadcast message info is ok."""
            self.assertEqual(resp.type,
                             protocol_pb2.Message.VOLUME_NEW_GENERATION)
            self.assertEqual(resp.volume_new_generation.volume, 'vol_id')
            self.assertEqual(resp.volume_new_generation.generation, 66)

            # other session, and generations
            self.mocker.reset()
            expect(protocol.session_id).count(0, 1).result(uuid.uuid4())
            expect(protocol.working_caps).count(0, 1).result(['generations'])
            self.mocker.replay()
            self.assertTrue(filter(protocol))

            # same session, and generations
            self.mocker.reset()
            expect(protocol.session_id).count(0, 1).result(session_id)
            expect(protocol.working_caps).count(0, 1).result(['generations'])
            self.mocker.replay()
            self.assertFalse(filter(protocol))

            deferred.callback(None)

        user = self.mocker.mock()
        expect(self.content.get_user_by_id('user_id')).count(1).result(
            succeed(user))
        expect(user.broadcast).result(test)

        # test
        self.mocker.replay()
        session_id = uuid.uuid4()
        notif = VolumeNewGeneration('user_id', 'vol_id', 66, session_id)
        yield self.factory.deliver_volume_new_generation(notif)
        yield deferred

    @inlineCallbacks
    def test_share_accepted_broadcasting_message(self):
        """Test that ShareAccepted gets broadcast to both users properly."""
        deferred_from = defer.Deferred()
        deferred_to = defer.Deferred()
        share_id = uuid.uuid4()
        from_user = 1
        to_user = 2
        root_id = uuid.uuid4()

        def test_from(resp, filter):
            """Check that the broadcast message info is ok."""
            self.assertEqual(resp.type, protocol_pb2.Message.SHARE_ACCEPTED)
            self.assertEqual(resp.share_accepted.share_id, str(share_id))
            self.assertEqual(resp.share_accepted.answer,
                             protocol_pb2.ShareAccepted.YES)
            deferred_from.callback(None)

        def test_to(resp, filter):
            """Check that the broadcast message info is ok."""
            self.assertEqual(resp.type, protocol_pb2.Message.VOLUME_CREATED)
            self.assertEqual(resp.volume_created.share.share_id, str(share_id))
            self.assertEqual(resp.volume_created.share.subtree, str(root_id))
            self.assertEqual(resp.volume_created.share.direction,
                             protocol_pb2.Shares.TO_ME)
            deferred_to.callback(None)

        user = self.mocker.mock()
        user2 = self.mocker.mock()

        for i in range(2):
            expect(self.content.get_user_by_id(from_user)).result(
                succeed(user))
            expect(self.content.get_user_by_id(to_user)).result(succeed(user2))

        expect(user.id).count(2).result(1)
        expect(user.broadcast).count(1).result(test_from)
        expect(user.username).count(1).result(u"username")
        expect(user.visible_name).count(1).result(u"username")
        expect(user2.id).count(2).result(2)
        expect(user2.broadcast).count(1).result(test_to)

        # test
        self.mocker.replay()
        notif_to = ShareAccepted(share_id, u"name", root_id, from_user,
                                 to_user, "View", True)
        notif_from = ShareAccepted(share_id, u"name", root_id, from_user,
                                   to_user, "View", True)
        yield self.factory.deliver_share_accepted(notif_to,
                                                  recipient_id=to_user)
        yield self.factory.deliver_share_accepted(notif_from,
                                                  recipient_id=from_user)
        yield deferred_from
        yield deferred_to

    @inlineCallbacks
    def test_share_accepted_broadcasting_no_from(self):
        """Test ShareAccepted when the from user isn't present."""
        deferred_to = defer.Deferred()
        share_id = uuid.uuid4()
        to_user = 1
        from_user = 2
        root_id = uuid.uuid4()

        def test_to(resp, filter):
            """Check that the broadcast message info is ok."""
            self.assertEqual(resp.type, protocol_pb2.Message.VOLUME_CREATED)
            self.assertEqual(resp.volume_created.share.share_id, str(share_id))
            self.assertEqual(resp.volume_created.share.subtree, str(root_id))
            self.assertEqual(resp.volume_created.share.direction,
                             protocol_pb2.Shares.TO_ME)
            deferred_to.callback(None)

        user = self.mocker.mock()
        user2 = self.mocker.mock()
        for i in range(2):
            expect(self.content.get_user_by_id(from_user)).result(
                succeed(None))
            expect(self.content.get_user_by_id(to_user)).result(succeed(user2))
        expect(self.content.get_user_by_id(from_user, required=True)).result(
            succeed(user))
        expect(user.username).count(1).result(u"username")
        expect(user.visible_name).count(1).result(u"username")
        expect(user2.id).count(2).result(2)
        expect(user2.broadcast).count(1).result(test_to)
        # test
        self.mocker.replay()
        notif = ShareAccepted(share_id, u"name", root_id, from_user, to_user,
                              "View", True)
        notif2 = ShareAccepted(share_id, u"name", root_id, from_user, to_user,
                               "View", True)
        yield self.factory.deliver_share_accepted(notif,
                                                  recipient_id=from_user)
        yield self.factory.deliver_share_accepted(notif2, recipient_id=to_user)
        yield deferred_to
class Test_sascalc_Prop_calcpmi(MockerTestCase): 

    def setUp(self):
        self.centertmp = sasop.Move.center

        self.m = Mocker()
        sasop.Move.center = self.m.mock()
        sasop.Move.center(ARGS)
        self.m.result(None)
        self.m.count(0,None)

        self.m.replay()

        self.o=sasmol.SasMol(0)

    def assert_list_almost_equal_flip_sign_allowed(self,a,b,places=5):
        if (len(a)!=len(b)):
           raise TypeError
        else:
           sign=1
           for i in range(len(a)):
              if isinstance(a[i],(int,float)):
                 if (numpy.isnan(a[i]) and numpy.isnan(b[i])): continue
                 if (a[i]*b[i]<0.0): sign = -1
                 self.assertAlmostEqual(a[i],sign*b[i],places)
              else:
                 self.assert_list_almost_equal_flip_sign_allowed(a[i],b[i],places)

    def reorder_eigens(self, result_eigenvalues, result_eigenvectors):
        idx=result_eigenvalues.argsort()
        idx=idx[::-1]
        result_eigenvalues = result_eigenvalues[idx]
        result_eigenvectors = result_eigenvectors[idx]
        result_eigenvectors[2]*=-1
        return result_eigenvalues, result_eigenvectors


    def test_one_atom(self):
        return
        '''
        
        self.o.setCoor(numpy.array([[[-1.0, 2.0, 3.0]]],floattype))
        self.o.setElement(['C'])
        self.o.setNatoms(len(self.o.element()))
        result = self.o.calcpmi(0)
        result_eigenvalues = result[0]
        result_eigenvectors = result[1].T
        result_I = result[2]
        result_eigenvalues, result_eigenvectors = self.reorder_eigens(result_eigenvalues, result_eigenvectors)
        print list(result_I), '\n',list(result_eigenvalues), '\n', list(result_eigenvectors)
        expected_I = numpy.array([[156.14, 24.022, 36.032], [24.022, 120.108, -72.065], [36.032, -72.065, 60.054]], floattype)
        expected_eigenvalues = numpy.array([168.151, 168.151, -5.329e-15], floattype)
        expected_eigenvectors = numpy.array([[0.103, -0.812, 0.575], [0.964, 0.148, 0.222], [-0.267, 0.535, 0.802]], floattype)
        self.assert_list_almost_equal_flip_sign_allowed(expected_I, result_I, 3)        
        #self.assert_list_almost_equal_flip_sign_allowed(expected_eigenvalues, result_eigenvalues,3)
        #self.assert_list_almost_equal_flip_sign_allowed(expected_eigenvectors, result_eigenvectors,3)
        '''

    def test_two_centered_atoms(self):
        return
        '''
        self.o.setCoor(numpy.array([[[-1.0, -2.0, -3.0],[1.0, 2.0, 3.0]]],floattype))
        self.o.setElement(['C','C'])
        self.o.setNatoms(len(self.o.element()))
        result = self.o.calcpmi(0)
        result_eigenvalues = result[0]
        result_eigenvectors = result[1].T
        result_I = result[2]
        result_eigenvalues, result_eigenvectors = self.reorder_eigens(result_eigenvalues, result_eigenvectors)
        print list(result_eigenvalues), '\n', list(result_eigenvectors)
        expected_I = numpy.array([[26.,  -4.,  -6.], [-4.,  20., -12.], [-6., -12.,  10.]], floattype)     
        expected_eigenvalues = numpy.array([336.302, 336.302, -7.105e-15], floattype)
        expected_eigenvectors = numpy.array([[-0.103, -0.812, 0.575], [0.964, -0.148, -0.222], [0.267, 0.535, 0.802]],floattype)
        #self.assert_list_almost_equal_flip_sign_allowed(expected_eigenvalues, result_eigenvalues,3)
        #self.assert_list_almost_equal_flip_sign_allowed(expected_eigenvectors, result_eigenvectors,3)

        '''


    def test_two_uncentered_atoms(self):
        self.o.setCoor(numpy.array([[[-2.0, -2.0, -3.0],[1.0, 2.0, 3.0]]],floattype))
        self.o.setElement(['C','N'])
        self.o.setNatoms(len(self.o.element()))
        result = self.o.calcpmi(0)
        result_eigenvalues = result[0]
        result_eigenvectors = result[1].T
        result_I = result[2]
        result_eigenvalues, result_eigenvectors = self.reorder_eigens(result_eigenvalues, result_eigenvectors)
        print result_I, '\n', result_eigenvalues, '\n',result_eigenvectors
        expected_eigenvalues = numpy.array([400.277, 394.737, 5.54], floattype)
        expected_eigenvectors = numpy.array([[-6.274e-15, -8.321e-01, 5.547e-01], [9.246e-01, -2.114e-01, -3.170e-01], [3.810e-01, 5.129e-01, 7.693e-01]], floattype)
        self.assert_list_almost_equal_flip_sign_allowed(expected_eigenvalues, result_eigenvalues,3)
        self.assert_list_almost_equal_flip_sign_allowed(expected_eigenvectors, result_eigenvectors,3)

    def test_six_uncentered_atoms(self):
        self.o.setCoor(numpy.array([[[1.0, 2.0, 3.0],[4.0, 5.0, 6.0],[7.0, 8.0, 9.0],[1.0, 3.0, 5.0],[2.0, 4.0, 6.0],[0.0, 2.0, 3.0]]],floattype))
        self.o.setElement(['C','N','O','C','N','O'])
        self.o.setNatoms(len(self.o.element()))
        result = self.o.calcpmi(0)
        result_eigenvalues = result[0]
        result_eigenvectors = result[1].T
        result_I = result[2]
        result_eigenvalues, result_eigenvectors = self.reorder_eigens(result_eigenvalues, result_eigenvectors)
        print result_I, '\n',result_eigenvalues, '\n',  result_eigenvectors
        expected_eigenvalues = numpy.array([5761.418, 5625.53, 139.66], floattype)
        expected_eigenvectors = numpy.array([[0.351, -0.821, 0.451], [-0.837, -0.059, 0.544],[0.42, 0.568, 0.708]],floattype);
        self.assert_list_almost_equal_flip_sign_allowed(expected_eigenvalues, result_eigenvalues,2)
        self.assert_list_almost_equal_flip_sign_allowed(expected_eigenvectors, result_eigenvectors,3)


    def test_six_uncentered_atoms_inf1(self):
        self.o.setCoor(numpy.array([[[util.HUGE, 2.0, 3.0],[4.0, 5.0, 6.0],[7.0, 8.0, 9.0],[1.0, 3.0, 5.0],[2.0, 4.0, 6.0],[0.0, 2.0, 3.0]]],floattype))
        self.o.setMass([1.0, 2.0, 3.2, 3.6, 5.2, 2.8])
        self.o.setNatoms(len(self.o.mass()))
        with self.assertRaises(Exception):
            result = self.o.calcpmi(0)


    def test_six_uncentered_atoms_inf2(self):
        self.o.setCoor(numpy.array([[[util.INF, 2.0, 3.0],[4.0, 5.0, 6.0],[7.0, 8.0, 9.0],[1.0, 3.0, 5.0],[2.0, 4.0, 6.0],[0.0, 2.0, 3.0]]],floattype))
        self.o.setMass([1.0, 2.0, 3.2, 3.6, 5.2, 2.8])
        self.o.setNatoms(len(self.o.mass()))
        with self.assertRaises(Exception):
            result = self.o.calcpmi(0)


    def test_six_uncentered_atoms_nan(self):
        self.o.setCoor(numpy.array([[[util.NAN, 2.0, 3.0],[4.0, 5.0, 6.0],[7.0, 8.0, 9.0],[1.0, 3.0, 5.0],[2.0, 4.0, 6.0],[0.0, 2.0, 3.0]]],floattype))
        self.o.setMass([1.0, 2.0, 3.2, 3.6, 5.2, 2.8])
        self.o.setNatoms(len(self.o.mass()))
        with self.assertRaises(Exception):
            result = self.o.calcpmi(0)


    def test_six_uncentered_atoms_tiny(self):
        self.o.setCoor(numpy.array([[[util.TINY, 2.0, 3.0],[4.0, 5.0, 6.0],[7.0, 8.0, 9.0],[1.0, 3.0, 5.0],[2.0, 4.0, 6.0],[0.0, 2.0, 3.0]]],floattype))
        self.o.setElement(['C','N','O','C','N','O'])
        self.o.setNatoms(len(self.o.element()))
        result = self.o.calcpmi(0)
        result_eigenvalues = result[0]
        result_eigenvectors = result[1].T
        result_I = result[2]
        result_eigenvalues, result_eigenvectors = self.reorder_eigens(result_eigenvalues, result_eigenvectors)
        print list(result_I), '\n', list(result_eigenvalues), '\n', list(result_eigenvectors)
        expected_I = numpy.array([[4675.176, -1324.189, -1572.26 ], [-1324.189,  3932.916, -2256.545], [-1572.26 , -2256.545,  2894.494]], floattype)
        expected_eigenvalues = numpy.array([5748.699, 5591.441, 162.447], floattype)
        expected_eigenvectors = numpy.array([[0.321, -0.821, 0.472], [-0.852, -0.032, 0.523], [0.414, 0.57, 0.709]], floattype)
        self.assert_list_almost_equal_flip_sign_allowed(expected_I, result_I, 3)
        self.assert_list_almost_equal_flip_sign_allowed(expected_eigenvalues, result_eigenvalues,2)
        self.assert_list_almost_equal_flip_sign_allowed(expected_eigenvectors, result_eigenvectors,3)


    def test_six_uncentered_atoms_ZERO(self):
        self.o.setCoor(numpy.array([[[util.ZERO, 2.0, 3.0],[4.0, 5.0, 6.0],[7.0, 8.0, 9.0],[1.0, 3.0, 5.0],[2.0, 4.0, 6.0],[0.0, 2.0, 3.0]]],floattype))
        self.o.setElement(['C','N','O','C','N','O'])
        self.o.setNatoms(len(self.o.element()))
        result = self.o.calcpmi(0)
        result_eigenvalues = result[0]
        result_eigenvectors = result[1].T
        result_I = result[2]
        result_eigenvalues, result_eigenvectors = self.reorder_eigens(result_eigenvalues, result_eigenvectors)
        print list(result_I), '\n',list(result_eigenvalues), '\n', list(result_eigenvectors)
        expected_I = numpy.array([[4675.176, -1324.189, -1572.26 ], [-1324.189,  3932.916, -2256.545], [-1572.26 , -2256.545,  2894.494]], floattype)
        expected_eigenvalues = numpy.array([5748.699, 5591.441, 162.447], floattype)
        expected_eigenvectors = numpy.array([[ 0.321, -0.821,  0.472], [-0.852, -0.032,  0.523], [ 0.414,  0.57 ,  0.709]], floattype)
        self.assert_list_almost_equal_flip_sign_allowed(expected_I, result_I, 3)
        self.assert_list_almost_equal_flip_sign_allowed(expected_eigenvalues, result_eigenvalues,2)
        self.assert_list_almost_equal_flip_sign_allowed(expected_eigenvectors, result_eigenvectors,3)

    def tearDown(self):
        self.m.restore()
        self.m.verify()
        
        sasop.Move.center = self.centertmp
Exemple #44
0
class Test_unit_subset_Mask_get_dihedral_subset_mask(MockerTestCase): 

   def mock_up(self, Cls_ptch, mthd, mocker, result=None, mmin=0, mmax=None):
      methodToCall = getattr(Cls_ptch,mthd)
      methodToCall(ARGS)
      mocker.result(result)
      mocker.count(mmin, mmax)

   def mock_up_get_dihedral_subset_mask(self, Cls, mocker, natoms, name, resid):
      Cls_ptch = mocker.patch(Cls)
      self.mock_up(Cls_ptch, 'natoms', mocker, natoms)
      self.mock_up(Cls_ptch, 'name', mocker, name)
      self.mock_up(Cls_ptch, 'resid', mocker, resid)
      mocker.replay()

   def assert_list_almost_equal(self,a,b,places=5):
        if (len(a)!=len(b)):
           raise TypeError
        else:
           for i in range(len(a)):
              if isinstance(a[i],(int,float,numpy.generic)):
                 if (numpy.isnan(a[i]) and numpy.isnan(b[i])): continue
                 self.assertAlmostEqual(a[i],b[i],places)
              else:
                 self.assert_list_almost_equal(a[i],b[i],places)
 

   def setUp(self):
      self.m=Mocker()
      self.o=system.Molecule(0)

   def test_single_residue_nomask(self):
      '''
      test for a single residue
      nomask for that residue
      '''
      natoms=5
      name=['N','CA','C','O','CB']
      resid=[1]*natoms
      resid=numpy.array(resid,numpy.long)
      mtype=0
      #
      flexible_residues=[]
      #
      expected_farray = []
      #
      self.mock_up_get_dihedral_subset_mask(self.o, self.m, natoms, name, resid)
      farray = self.o.get_dihedral_subset_mask(flexible_residues,mtype)
      #
      print 'result_mask:\n', list(farray), '\nexpected_mask:\n',expected_farray
      self.assert_list_almost_equal(farray, expected_farray)


   def test_single_residue_mask(self):
      '''
      test for a single residue
      mask that residue
      '''
      natoms=5
      name=['N','CA','C','O','CB']
      resid=[1]*natoms
      resid=numpy.array(resid,numpy.long)
      mtype=0
      #
      flexible_residues=[1]
      #
      expected_farray = [[1,1,1,0,0]]
      #
      self.mock_up_get_dihedral_subset_mask(self.o, self.m, natoms, name, resid)
      farray = self.o.get_dihedral_subset_mask(flexible_residues,mtype)
      #
      print 'result_mask:\n', list(farray), '\nexpected_mask:\n',expected_farray
      self.assert_list_almost_equal(farray, expected_farray)


   def test_three_residues_nomask(self):
      '''
      test for three residue
      nomask for all residues
      '''
      natoms=18
      name=['N','CA','C','O','CB','CG']*3
      resid=[1]*6+[2]*6+[3]*6
      resid=numpy.array(resid,numpy.long)
      mtype=0
      #
      flexible_residues=[]
      #
      expected_farray = []
      #
      self.mock_up_get_dihedral_subset_mask(self.o, self.m, natoms, name, resid)
      farray = self.o.get_dihedral_subset_mask(flexible_residues,mtype)
      #
      print 'result_mask:\n', list(farray), '\nexpected_mask:\n',expected_farray
      self.assert_list_almost_equal(farray, expected_farray)


   def test_three_residues_mask_first_one(self):
      '''
      test for three residues
      mask the first residue
      '''
      natoms=18
      name=['N','CA','C','O','CB','CG']*3
      resid=[1]*6+[2]*6+[3]*6
      resid=numpy.array(resid,numpy.long)
      mtype=0
      #
      flexible_residues=[1]
      #
      expected_farray = [[1,1,1,0,0,0]+[1,0,0,0,0,0]+[0]*6]
      #
      self.mock_up_get_dihedral_subset_mask(self.o, self.m, natoms, name, resid)
      farray = self.o.get_dihedral_subset_mask(flexible_residues,mtype)
      #
      print 'result_mask:\n', list(farray), '\nexpected_mask:\n',expected_farray
      self.assert_list_almost_equal(farray, expected_farray)


   def test_three_residues_mask_second_one(self):
      '''
      test for three residues
      mask the second residue
      '''
      natoms=18
      name=['N','CA','C','O','CB','CG']*3
      resid=[1]*6+[2]*6+[3]*6
      resid=numpy.array(resid,numpy.long)
      mtype=0
      #
      flexible_residues=[2]
      #
      expected_farray = [[0,0,1,0,0,0]+[1,1,1,0,0,0]+[1,0,0,0,0,0]]
      #
      self.mock_up_get_dihedral_subset_mask(self.o, self.m, natoms, name, resid)
      farray = self.o.get_dihedral_subset_mask(flexible_residues,mtype)
      #
      print 'result_mask:\n', list(farray), '\nexpected_mask:\n',expected_farray
      self.assert_list_almost_equal(farray, expected_farray)


   def test_three_residues_mask_third_one(self):
      '''
      test for three residues
      mask the second residue
      '''
      natoms=18
      name=['N','CA','C','O','CB','CG']*3
      resid=[1]*6+[2]*6+[3]*6
      resid=numpy.array(resid,numpy.long)
      mtype=0
      #
      flexible_residues=[3]
      #
      expected_farray = [[0,0,0,0,0,0]+[0,0,1,0,0,0]+[1,1,1,0,0,0]]
      #
      self.mock_up_get_dihedral_subset_mask(self.o, self.m, natoms, name, resid)
      farray = self.o.get_dihedral_subset_mask(flexible_residues,mtype)
      #
      print 'result_mask:\n', list(farray), '\nexpected_mask:\n',expected_farray
      self.assert_list_almost_equal(farray, expected_farray)


   def test_three_residues_mask_first_second(self):
      '''
      test for three residues
      mask the first and second residue
      '''
      natoms=18
      name=['N','CA','C','O','CB','CG']*3
      resid=[1]*6+[2]*6+[3]*6
      resid=numpy.array(resid,numpy.long)
      mtype=0
      #
      flexible_residues=[1,2]
      #
      expected_farray = [[1,1,1,0,0,0]+[1,0,0,0,0,0]+[0]*6, [0,0,1,0,0,0]+[1,1,1,0,0,0]+[1,0,0,0,0,0]]

      #
      self.mock_up_get_dihedral_subset_mask(self.o, self.m, natoms, name, resid)
      farray = self.o.get_dihedral_subset_mask(flexible_residues,mtype)
      #
      print 'result_mask:\n', list(farray), '\nexpected_mask:\n',expected_farray
      self.assert_list_almost_equal(farray, expected_farray)


   def test_three_residues_mask_second_third(self):
      '''
      test for three residues
      mask the first and second residue
      '''
      natoms=18
      name=['N','CA','C','O','CB','CG']*3
      resid=[1]*6+[2]*6+[3]*6
      resid=numpy.array(resid,numpy.long)
      mtype=0
      #
      flexible_residues=[2,3]
      #
      expected_farray = [[0,0,1,0,0,0]+[1,1,1,0,0,0]+[1,0,0,0,0,0], [0,0,0,0,0,0]+[0,0,1,0,0,0]+[1,1,1,0,0,0]]

      #
      self.mock_up_get_dihedral_subset_mask(self.o, self.m, natoms, name, resid)
      farray = self.o.get_dihedral_subset_mask(flexible_residues,mtype)
      #
      print 'result_mask:\n', list(farray), '\nexpected_mask:\n',expected_farray
      self.assert_list_almost_equal(farray, expected_farray)


   def test_three_residues_mask_third_first(self):
      '''
      test for three residues
      mask the third and first residue
      '''
      natoms=18
      name=['N','CA','C','O','CB','CG']*3
      resid=[1]*6+[2]*6+[3]*6
      resid=numpy.array(resid,numpy.long)
      mtype=0
      #
      flexible_residues=[3,1]
      #
      expected_farray = [[0,0,0,0,0,0]+[0,0,1,0,0,0]+[1,1,1,0,0,0], [1,1,1,0,0,0]+[1,0,0,0,0,0]+[0]*6]

      #
      self.mock_up_get_dihedral_subset_mask(self.o, self.m, natoms, name, resid)
      farray = self.o.get_dihedral_subset_mask(flexible_residues,mtype)
      #
      print 'result_mask:\n', list(farray), '\nexpected_mask:\n',expected_farray
      self.assert_list_almost_equal(farray, expected_farray)


   def test_three_residues_mask_all(self):
      '''
      test for three residues
      mask the all residues
      '''
      natoms=18
      name=['N','CA','C','O','CB','CG']*3
      resid=[1]*6+[2]*6+[3]*6
      resid=numpy.array(resid,numpy.long)
      mtype=0
      #
      flexible_residues=[1,2,3]
      #
      expected_farray = [[1,1,1,0,0,0]+[1,0,0,0,0,0]+[0]*6, [0,0,1,0,0,0]+[1,1,1,0,0,0]+[1,0,0,0,0,0], [0,0,0,0,0,0]+[0,0,1,0,0,0]+[1,1,1,0,0,0], ]

      #
      self.mock_up_get_dihedral_subset_mask(self.o, self.m, natoms, name, resid)
      farray = self.o.get_dihedral_subset_mask(flexible_residues,mtype)
      #
      print 'result_mask:\n', list(farray), '\nexpected_mask:\n',expected_farray
      self.assert_list_almost_equal(farray, expected_farray)


   def test_500_residues_nomask(self):
      '''
      test for 500 residues
      mask no residue
      '''
      name=['N','CA','C','O','CB','CG1','CG2']
      natom=len(name)
      nres=500
      name=name*nres
      natoms=natom*nres
      resid=[x/natom for x in range(natoms)]
      resid=numpy.array(resid,numpy.long)
      mtype=0
      #
      flexible_residues=[]
      #
      expected_farray = []
      #
      self.mock_up_get_dihedral_subset_mask(self.o, self.m, natoms, name, resid)
      farray = self.o.get_dihedral_subset_mask(flexible_residues,mtype)
      #
      print 'result_mask:\n', list(farray), '\nexpected_mask:\n',expected_farray
      self.assert_list_almost_equal(farray, expected_farray)


   def test_500_residues_mask_number100(self):
      '''
      test for 500 residues
      mask the 300th residue
      '''
      name=['N','CA','C','O','CB','CG1','CG2']
      natom=len(name)
      nres=500
      name=name*nres
      natoms=natom*nres
      resid=[x/natom for x in range(natoms)]
      resid=numpy.array(resid,numpy.long)
      mtype=0
      #
      nf=300
      flexible_residues=[nf]
      #
      expected_farray = [([0]*natom)*(nf-1) + [0,0,1]+[0]*(natom-3) + [1]*3+[0]*(natom-3) + [1]+[0]*(natom-1) + ([0]*natom)*(nres-nf-2)]

      #
      self.mock_up_get_dihedral_subset_mask(self.o, self.m, natoms, name, resid)
      farray = self.o.get_dihedral_subset_mask(flexible_residues,mtype)
      #
      print 'result_mask:\n', list(farray), '\nexpected_mask:\n',expected_farray
      self.assert_list_almost_equal(farray, expected_farray)


   @skipIf(os.environ['SASMOL_LARGETEST']=='n',"I am not testing large files")
   def test_500_residues_mask_100to300(self):
      '''
      test for 500 residues
      mask the 300th residue
      '''
      name=['N','CA','C','O','CB','CG1','CG2']
      natom=len(name)
      nres=500
      name=name*nres
      natoms=natom*nres
      resid=[x/natom for x in range(natoms)]
      resid=numpy.array(resid,numpy.long)
      mtype=0
      #
      flexible_residues=range(100,300)
      #
      expected_farray=[]
      for nf in flexible_residues:
         tmp_farray = ([0]*natom)*(nf-1) + [0,0,1]+[0]*(natom-3) + [1]*3+[0]*(natom-3) + [1]+[0]*(natom-1) + ([0]*natom)*(nres-nf-2)
         expected_farray.append(tmp_farray)
      #
      self.mock_up_get_dihedral_subset_mask(self.o, self.m, natoms, name, resid)
      farray = self.o.get_dihedral_subset_mask(flexible_residues,mtype)
      #
      print 'result_mask:\n', list(farray), '\nexpected_mask:\n',expected_farray
      self.assert_list_almost_equal(farray, expected_farray)


   def test_500_residues_mask_random_10_residues(self):
      '''
      test for 500 residues
      mask the 300th residue
      '''
      name=['N','CA','C','O','CB','CG1','CG2']
      natom=len(name)
      nres=500
      name=name*nres
      natoms=natom*nres
      resid=[x/natom for x in range(natoms)]
      resid=numpy.array(resid,numpy.long)
      mtype=0
      #
      flexible_residues=[123,12,90,399,1,89,221,78,91,129]
      #
      expected_farray=[]
      for nf in flexible_residues:
         tmp_farray = ([0]*natom)*(nf-1) + [0,0,1]+[0]*(natom-3) + [1]*3+[0]*(natom-3) + [1]+[0]*(natom-1) + ([0]*natom)*(nres-nf-2)
         expected_farray.append(tmp_farray)
      #
      self.mock_up_get_dihedral_subset_mask(self.o, self.m, natoms, name, resid)
      farray = self.o.get_dihedral_subset_mask(flexible_residues,mtype)
      #
      print 'result_mask:\n', list(farray), '\nexpected_mask:\n',expected_farray
      self.assert_list_almost_equal(farray, expected_farray)


   @skipIf(os.environ['SASMOL_LARGETEST']=='n',"I am not testing large files")
   def test_500_residues_mask_all_residues(self):
      '''
      test for 500 residues
      mask the 300th residue
      '''
      name=['N','CA','C','O','CB','CG1','CG2']
      natom=len(name)
      nres=500
      name=name*nres
      natoms=natom*nres
      resid=[x/natom for x in range(natoms)]
      resid=numpy.array(resid,numpy.long)
      mtype=0
      #
      flexible_residues=range(1,nres-1)
      #
      expected_farray=[]
      for nf in flexible_residues:
         tmp_farray = ([0]*natom)*(nf-1) + [0,0,1]+[0]*(natom-3) + [1]*3+[0]*(natom-3) + [1]+[0]*(natom-1) + ([0]*natom)*(nres-nf-2)
         expected_farray.append(tmp_farray)
      #
      self.mock_up_get_dihedral_subset_mask(self.o, self.m, natoms, name, resid)
      farray = self.o.get_dihedral_subset_mask(flexible_residues,mtype)
      #
      print 'result_mask:\n', list(farray), '\nexpected_mask:\n',expected_farray
      self.assert_list_almost_equal(farray, expected_farray)

   def test_1_residues_rna_mask_none(self):
      '''
      test for rna with 1 residue, mask none
      '''
      name=['P', 'O1P', 'O2P', "O5'", "C5'", "H5'", "H5''", "C4'", "H4'", "O4'", "C1'", "H1'", 'N1', 'C6', 'H6', 'C2', 'O2', 'N3', 'H3', 'C4', 'O4', 'C5', 'H5', "C2'", "H2''", "O2'", "H2'", "C3'", "H3'", "O3'"]
      natom=len(name)
      nres=1
      name=name*nres
      natoms=natom*nres
      resid=[x/natom for x in range(natoms)]
      resid=numpy.array(resid,numpy.long)
      mtype=1
      #
      expected_farray = []
      flexible_residues=[]
      for nf in flexible_residues:
         tmp_farray = ([0]*natom)*(nf-1) + [0]*(natom-1)+[1] + [1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1] + [1]+[0]*(natom-1) + ([0]*natom)*(nres-nf-2)
         expected_farray.append(tmp_farray)
      #
      self.mock_up_get_dihedral_subset_mask(self.o, self.m, natoms, name, resid)
      farray = self.o.get_dihedral_subset_mask(flexible_residues,mtype)
      #
      print 'result_mask:\n', list(farray), '\nexpected_mask:\n',expected_farray
      self.assert_list_almost_equal(farray, expected_farray)

   def test_1_residues_rna_mask_all(self):
      '''
      test for rna with 1 residue, mask all
      '''
      name=['P', 'O1P', 'O2P', "O5'", "C5'", "H5'", "H5''", "C4'", "H4'", "O4'", "C1'", "H1'", 'N1', 'C6', 'H6', 'C2', 'O2', 'N3', 'H3', 'C4', 'O4', 'C5', 'H5', "C2'", "H2''", "O2'", "H2'", "C3'", "H3'", "O3'"]
      natom=len(name)
      nres=1
      name=name*nres
      natoms=natom*nres
      resid=[x/natom for x in range(natoms)]
      resid=numpy.array(resid,numpy.long)
      mtype=1
      #
      flexible_residues=[0]
      #
      expected_farray = numpy.array([[1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1]],numpy.long)
      #
      self.mock_up_get_dihedral_subset_mask(self.o, self.m, natoms, name, resid)
      farray = self.o.get_dihedral_subset_mask(flexible_residues,mtype)
      #
      print 'result_mask:\n', list(farray), '\nexpected_mask:\n',expected_farray
      self.assert_list_almost_equal(farray, expected_farray)

   def test_5_residues_rna_mask_all(self):
      '''
      test for rna with 5 residue, mask none
      '''
      name=['P', 'O1P', 'O2P', "O5'", "C5'", "H5'", "H5''", "C4'", "H4'", "O4'", "C1'", "H1'", 'N1', 'C6', 'H6', 'C2', 'O2', 'N3', 'H3', 'C4', 'O4', 'C5', 'H5', "C2'", "H2''", "O2'", "H2'", "C3'", "H3'", "O3'"]
      natom=len(name)
      nres=5
      name=name*nres
      natoms=natom*nres
      resid=[x/natom for x in range(natoms)]
      resid=numpy.array(resid,numpy.long)
      mtype=1
      #
      expected_farray = []
      flexible_residues=[]
      for nf in flexible_residues:
         tmp_farray = ([0]*natom)*(nf-1) + [0]*(natom-1)+[1] + [1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1] + [1,0,0,1]+[0]*(natom-4) + ([0]*natom)*(nres-nf-2)
         expected_farray.append(tmp_farray)
      #
      self.mock_up_get_dihedral_subset_mask(self.o, self.m, natoms, name, resid)
      farray = self.o.get_dihedral_subset_mask(flexible_residues,mtype)
      #
      print 'result_mask:\n', list(farray), '\nexpected_mask:\n',expected_farray
      self.assert_list_almost_equal(farray, expected_farray)

   def test_5_residues_rna_mask_random(self):
      '''
      test for rna with 5 residue, mask 2 and 3
      '''
      name=['P', 'O1P', 'O2P', "O5'", "C5'", "H5'", "H5''", "C4'", "H4'", "O4'", "C1'", "H1'", 'N1', 'C6', 'H6', 'C2', 'O2', 'N3', 'H3', 'C4', 'O4', 'C5', 'H5', "C2'", "H2''", "O2'", "H2'", "C3'", "H3'", "O3'"]
      natom=len(name)
      nres=5
      name=name*nres
      natoms=natom*nres
      resid=[x/natom for x in range(natoms)]
      resid=numpy.array(resid,numpy.long)
      mtype=1
      #
      expected_farray = []
      flexible_residues=[2,3]
      for nf in flexible_residues:
         tmp_farray = ([0]*natom)*(nf-1) + [0]*(natom-1)+[1] + [1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1] + [1,0,0,1]+[0]*(natom-4) + ([0]*natom)*(nres-nf-2)
         expected_farray.append(tmp_farray)
      #
      self.mock_up_get_dihedral_subset_mask(self.o, self.m, natoms, name, resid)
      farray = self.o.get_dihedral_subset_mask(flexible_residues,mtype)
      #
      print 'result_mask:\n', map(int,farray.tolist()[0]), '\nexpected_mask:\n',expected_farray[0]
      print 'result_mask:\n', map(int,farray.tolist()[1]), '\nexpected_mask:\n',expected_farray[1]
      self.assert_list_almost_equal(farray, expected_farray)

   def test_5_residues_rna_mask_all(self):
      '''
      test for rna with 5 residue, mask all
      '''
      name=['P', 'O1P', 'O2P', "O5'", "C5'", "H5'", "H5''", "C4'", "H4'", "O4'", "C1'", "H1'", 'N1', 'C6', 'H6', 'C2', 'O2', 'N3', 'H3', 'C4', 'O4', 'C5', 'H5', "C2'", "H2''", "O2'", "H2'", "C3'", "H3'", "O3'"]
      natom=len(name)
      nres=5
      name=name*nres
      natoms=natom*nres
      resid=[x/natom for x in range(natoms)]
      resid=numpy.array(resid,numpy.long)
      mtype=1
      #
      expected_farray = []
      flexible_residues=range(1,nres-1)
      for nf in flexible_residues:
         tmp_farray = ([0]*natom)*(nf-1) + [0]*(natom-1)+[1] + [1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1] + [1,0,0,1]+[0]*(natom-4) + ([0]*natom)*(nres-nf-2)
         expected_farray.append(tmp_farray)
      #
      self.mock_up_get_dihedral_subset_mask(self.o, self.m, natoms, name, resid)
      farray = self.o.get_dihedral_subset_mask(flexible_residues,mtype)
      #
      print 'result_mask:\n', list(farray), '\nexpected_mask:\n',expected_farray
      self.assert_list_almost_equal(farray, expected_farray)

   def test_500_residues_rna_mask_all(self):
      '''
      test for rna with 500 residue, mask all
      '''
      name=['P', 'O1P', 'O2P', "O5'", "C5'", "H5'", "H5''", "C4'", "H4'", "O4'", "C1'", "H1'", 'N1', 'C6', 'H6', 'C2', 'O2', 'N3', 'H3', 'C4', 'O4', 'C5', 'H5', "C2'", "H2''", "O2'", "H2'", "C3'", "H3'", "O3'"]
      natom=len(name)
      nres=5
      name=name*nres
      natoms=natom*nres
      resid=[x/natom for x in range(natoms)]
      resid=numpy.array(resid,numpy.long)
      mtype=1
      #
      expected_farray = []
      flexible_residues=range(1,nres-1)
      for nf in flexible_residues:
         tmp_farray = ([0]*natom)*(nf-1) + [0]*(natom-1)+[1] + [1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1] + [1,0,0,1]+[0]*(natom-4) + ([0]*natom)*(nres-nf-2)
         expected_farray.append(tmp_farray)
      #
      self.mock_up_get_dihedral_subset_mask(self.o, self.m, natoms, name, resid)
      farray = self.o.get_dihedral_subset_mask(flexible_residues,mtype)
      #
      print 'result_mask:\n', list(farray), '\nexpected_mask:\n',expected_farray
      self.assert_list_almost_equal(farray, expected_farray)


   def tearDown(self):
      self.m.restore()
      self.m.verify()
class TestCustodyPeriod(MockTestCase, TestCase):

    def setUp(self):
        super(TestCustodyPeriod, self).setUp()

        self.testcase_mocker = Mocker()
        grok('opengever.base.behaviors.lifecycle')

        # mock the registry, so that we have a static
        # configuration in our tests. we test functionality,
        # not configuration..
        proxy = self.testcase_mocker.mock()
        proxy.custody_periods
        self.testcase_mocker.result([u'0', u'10', u'20', u'30'])
        self.testcase_mocker.count(0, None)

        registry = self.testcase_mocker.mock()
        provideUtility(provides=IRegistry, component=registry)
        registry.forInterface(IBaseCustodyPeriods)
        self.testcase_mocker.result(proxy)
        self.testcase_mocker.count(0, None)

        # we need to register the vocabulary utility in the
        # vocabulary registry manually at this point:
        vocabulary_registry = getVocabularyRegistry()
        field = lifecycle.ILifeCycle['custody_period']
        try:
            vocabulary_registry.get(None, field.vocabularyName)
        except VocabularyRegistryError:
            factory = getUtility(IVocabularyFactory,
                                 name=u'lifecycle_custody_period_vocabulary')
            vocabulary_registry.register(field.vocabularyName, factory)

        # in this stage, only the grok-components (adapaters, utilities)
        # of the module are registered in the component registry.

        # we need to register any plone.directives.form magic components
        # from the module manually (they are not grokky):
        for factory, name in lifecycle.__form_value_adapters__:
            provideAdapter(factory, name=name)

        self.testcase_mocker.replay()

    def tearDown(self):
        self.testcase_mocker.verify()
        self.testcase_mocker.restore()
        super(TestCustodyPeriod, self).tearDown()

    def _get_term_titles_from_vocabulary(self, voc):
        return [term.title for term in voc._terms]

    def test_configured_field_vocabulary_factory_name(self):
        field = lifecycle.ILifeCycle['custody_period']
        self.assertEqual(field.vocabularyName,
                         u'lifecycle_custody_period_vocabulary')

    def test_vocabulary(self):
        vocfactory = getUtility(IVocabularyFactory,
                                name=u'lifecycle_custody_period_vocabulary')
        self.assertEqual(vocfactory.option_names, [0, 10, 20, 30])

    def test_vocabulary_in_context(self):
        vocfactory = getUtility(IVocabularyFactory,
                                name=u'lifecycle_custody_period_vocabulary')

        request = self.mocker.mock()
        self.expect(request.get('PATH_INFO', ANY)).result('somepath/++add++type')

        context = self.mocker.mock()
        self.expect(context.REQUEST).result(request)
        self.expect(context.custody_period).result(20)

        self.replay()

        vocabulary = vocfactory(context)
        self.assertEqual(sorted(self._get_term_titles_from_vocabulary(vocabulary)),
                         [u'20', u'30'])

    def test_validator(self):
        request = self.mocker.mock()
        self.expect(request.get('PATH_INFO', ANY)).result('somepath/++add++type')

        field = lifecycle.ILifeCycle['custody_period']

        context = None
        view = None
        widget = None

        self.replay()

        validator = getMultiAdapter((context, request, view, field, widget), IValidator)
        validator.validate(20)

        with TestCase.assertRaises(self, ConstraintNotSatisfied):
            validator.validate(15)

    def test_validator_in_context(self):
        request = self.mocker.mock()
        self.expect(request.get('PATH_INFO', ANY)).result(
            'somepath/++add++type').count(0, None)

        context = self.mocker.mock()
        self.expect(context.REQUEST).result(request).count(0, None)
        self.expect(context.custody_period).result(20).count(0, None)
        self.expect(context.aq_inner).result(context).count(0, None)
        self.expect(context.aq_parent).result(None).count(0, None)

        field = lifecycle.ILifeCycle['custody_period']

        view = None
        widget = None

        self.replay()

        validator = getMultiAdapter((context, request, view, field, widget), IValidator)
        validator.validate(20)
        validator.validate(30)

        with TestCase.assertRaises(self, ConstraintNotSatisfied):
            validator.validate(10)

    def test_default_value(self):
        field = lifecycle.ILifeCycle['custody_period']

        portal = self.create_dummy()
        directlyProvides(portal, ISiteRoot)

        default_value = getMultiAdapter(
            (portal,  # context
             None,  # request
             None,  # form
             field,  # field
             None,  # Widget
             ),
            IValue,
            name='default')
        self.assertEqual(default_value.get(), 30)

    def test_default_value_in_context(self):
        field = lifecycle.ILifeCycle['custody_period']

        context = self.create_dummy(custody_period=10)
        directlyProvides(context, lifecycle.ILifeCycle)

        default_value = getMultiAdapter(
            (context,  # context
             None,  # request
             None,  # form
             field,  # field
             None,  # Widget
             ),
            IValue,
            name='default')
        self.assertEqual(default_value.get(), 10)
class TestDataLoader(unittest.TestCase):


    def setUp(self):
        self.mocker = Mocker()
        self.file = cStringIO.StringIO()
        self.file2 = cStringIO.StringIO()


    def tearDown(self):
        self.mocker.restore()
        self.mocker.verify()
        self.file.close()
        self.file2.close()


    def test_single_file_happy_path(self):
        self.file.write("418|12|2012-07-18 14:43:38|37.6164|-122.386|41059b00f964a520850b1fe3|empty_message\n418|12|2012-07-18 14:43:38|37.6164|-122.386|41059b00f964a520850b1fe3|empty_message")
        self.file.seek(0)
        expected = {'418': [{'venue_id': '41059b00f964a520850b1fe3', 'latitude': 37.6164, 'check_in_message': 'empty_message', 'check_in_id': '12', 'longitude': -122.386, 'date': datetime.datetime(2012, 7, 18, 14, 43, 38)}, {'venue_id': '41059b00f964a520850b1fe3', 'latitude': 37.6164, 'check_in_message': 'empty_message', 'check_in_id': '12', 'longitude': -122.386, 'date': datetime.datetime(2012, 7, 18, 14, 43, 38)}]}
        actual = DataLoader.load_check_ins_from_file(self.file)
        self.assertDictEqual(expected, actual)


    def test_invalid_number_of_check_in_parameters(self):
        self.file.write("418|12|2012-07-18 14:43:38|37.6164|-122.386|41059b00f964a520850b1fe3|empty_message\n418|2012-07-18 14:43:38|37.6164|-122.386|41059b00f964a520850b1fe3|empty_message")
        self.file.seek(0)
        with self.assertRaises(ValueError) as cm:
            DataLoader.load_check_ins_from_file(self.file)
        self.assertEqual(cm.exception.message, "Error in line 2: the line should contain user_id, check-in_id, date, latitude, longitude, venue_id and check-in_message, separated by |")
        

    def test_empty_strings_in_middle(self):
        self.file.write("\n418|2012-07-18 14:43:38|37.6164|-122.386|41059b00f964a520850b1fe3|empty_message")
        self.file.seek(0)
        with self.assertRaises(ValueError) as cm:
            DataLoader.load_check_ins_from_file(self.file)
        self.assertEqual(cm.exception.message, "Error in line 1: the line should contain user_id, check-in_id, date, latitude, longitude, venue_id and check-in_message, separated by |")
            
       
    def test_empty_strings_in_end(self):
        self.file.write("418|23|2012-07-18 14:43:38|37.6164|-122.386|41059b00f964a520850b1fe3|empty_message\n ")
        self.file.seek(0)
        with self.assertRaises(ValueError) as cm:
            DataLoader.load_check_ins_from_file(self.file)
        self.assertEqual(cm.exception.message, "Error in line 2: the line should contain user_id, check-in_id, date, latitude, longitude, venue_id and check-in_message, separated by |")
        

    def test_invalid_date(self):
        self.file.write("418|12|2012-07-18 14:43:38|37.6164|-122.386|41059b00f964a520850b1fe3|empty_message\n418|12|123asd|37.6164|-122.386|41059b00f964a520850b1fe3|empty_message")
        self.file.seek(0)
        with self.assertRaises(ValueError) as cm:
            DataLoader.load_check_ins_from_file(self.file)
        self.assertEqual(cm.exception.message, 'Error in line 2: invalid format of date, should be YYYY-MM-DD HH:MM:SS')
        

    def test_longitude_not_a_number(self):
        self.file.write("418|12|2012-07-18 14:43:38|37.6164|-122.386|41059b00f964a520850b1fe3|empty_message\n418|12|2012-07-18 12:34:45|45.54|a|41059b00f964a520850b1fe3|empty_message")
        self.file.seek(0)
        with self.assertRaises(ValueError) as cm:
            DataLoader.load_check_ins_from_file(self.file)
        self.assertEqual(cm.exception.message, 'Error in line 2: longitude should be a float number')
        

    def test_longitude_out_of_bounds(self):
        self.file.write("418|12|2012-07-18 14:43:38|37.6164|-122.386|41059b00f964a520850b1fe3|empty_message\n418|12|2012-07-18 12:34:45|45.5|-190.386|41059b00f964a520850b1fe3|empty_message")
        self.file.seek(0)
        with self.assertRaises(ValueError) as cm:
            DataLoader.load_check_ins_from_file(self.file)
        self.assertEqual(cm.exception.message, 'Error in line 2: longitude should be between -90 and 90')
        

    def test_latitude_not_a_number(self):
        self.file.write("418|12|2012-07-18 14:43:38|37.6164|-122.386|41059b00f964a520850b1fe3|empty_message\n418|12|2012-07-18 12:34:45|abcd|-122.386|41059b00f964a520850b1fe3|empty_message")
        self.file.seek(0)
        with self.assertRaises(ValueError) as cm:
            DataLoader.load_check_ins_from_file(self.file)
        self.assertEqual(cm.exception.message, 'Error in line 2: latitude should be a float number')
        

    def test_latitude_out_of_bounds(self):
        self.file.write("418|12|2012-07-18 14:43:38|37.6164|-122.386|41059b00f964a520850b1fe3|empty_message\n418|12|2012-07-18 12:34:45|100|-122.386|41059b00f964a520850b1fe3|empty_message")
        self.file.seek(0)
        with self.assertRaises(ValueError) as cm:
            DataLoader.load_check_ins_from_file(self.file)
        self.assertEqual(cm.exception.message, 'Error in line 2: latitude should be between -90 and 90')
        

    def test_invalid_venue(self):
        self.file.write("418|12|2012-07-18 14:43:38|37.6164|-122.386|41059b00f964a520850b1fe3|empty_message\n418|12|2012-07-18 12:34:45|34|-122.386||empty_message")
        self.file.seek(0)
        with self.assertRaises(ValueError) as cm:
            DataLoader.load_check_ins_from_file(self.file)
        self.assertEqual(cm.exception.message, 'Error in line 2: venue_id can not be an empty string')
        

    def test_single_directory_happy_path(self):
        self.file.write("418|12|2012-07-18 14:43:38|37.6164|-122.386|41059b00f964a520850b1fe3|empty_message\n418|13|2012-07-18 12:34:45|45.54|45.6|41059b00f964a520850b1fe3|empty_message")
        self.file.seek(0)
        self.file2.write("418|14|2012-07-18 14:43:38|37.6164|-122.386|41059b00f964a520850b1fe3|empty_message\n418|15|2012-07-18 12:34:45|45.54|45.6|41059b00f964a520850b1fe3|empty_message")
        self.file2.seek(0)

        mock_glob = self.mocker.replace('glob.glob')
        mock_glob("some_directory/*")
        self.mocker.result(['.', 'file1', 'file2'])

        mock_open = self.mocker.replace('__builtin__.open')
        mock_open("file1", 'rU')
        self.mocker.result(self.file)
        mock_open("file2", 'rU')
        self.mocker.result(self.file2)

        self.mocker.replay()
        expected_dict = {
            '418': [{'venue_id': '41059b00f964a520850b1fe3', 'latitude': 37.6164, 'check_in_message': 'empty_message', 'check_in_id': '12', 'longitude': -122.386, 'date': datetime.datetime(2012, 7, 18, 14, 43, 38)},
                    {'venue_id': '41059b00f964a520850b1fe3', 'latitude': 45.54, 'check_in_message': 'empty_message', 'check_in_id': '13', 'longitude': 45.6, 'date': datetime.datetime(2012, 7, 18, 12, 34, 45)},
                    {'venue_id': '41059b00f964a520850b1fe3', 'latitude': 37.6164, 'check_in_message': 'empty_message', 'check_in_id': '14', 'longitude': -122.386, 'date': datetime.datetime(2012, 7, 18, 14, 43, 38)},
                    {'venue_id': '41059b00f964a520850b1fe3', 'latitude': 45.54, 'check_in_message': 'empty_message', 'check_in_id': '15', 'longitude': 45.6, 'date': datetime.datetime(2012, 7, 18, 12, 34, 45)}]}
        actual_dict = DataLoader.load_check_ins_from_directory("some_directory")
        self.assertDictEqual(expected_dict, actual_dict)


    def test_same_check_in_ids_in_different_files(self):
        self.file.write("418|12|2012-07-18 14:43:38|37.6164|-122.386|41059b00f964a520850b1fe3|empty_message\n418|13|2012-07-18 12:34:45|45.54|45.6|41059b00f964a520850b1fe3|empty_message")
        self.file.seek(0)
        self.file2.write("418|12|2012-07-18 14:43:38|37.6164|-122.386|41059b00f964a520850b1fe3|empty_message\n418|15|2012-07-18 12:34:45|45.54|45.6|41059b00f964a520850b1fe3|empty_message")
        self.file2.seek(0)

        mock_glob = self.mocker.replace('glob.glob')
        mock_glob("some_directory/*")
        self.mocker.result(['.', 'file1', 'file2'])

        mock_open = self.mocker.replace('__builtin__.open')
        mock_open("file1", 'rU')
        self.mocker.result(self.file)
        mock_open("file2", 'rU')
        self.mocker.result(self.file2)

        self.mocker.replay()
        with self.assertRaises(ValueError) as cm:
            DataLoader.load_check_ins_from_directory("some_directory")
        self.assertEqual(cm.exception.message, 'Error processing file file2: check-in with ID 12 has already been encountered for user 418')
        

    def test_same_check_in_ids_in_same_file(self):
        self.file.write("418|12|2012-07-18 14:43:38|37.6164|-122.386|41059b00f964a520850b1fe3|empty_message\n418|12|2012-07-18 12:34:45|45.54|45.6|41059b00f964a520850b1fe3|empty_message")
        self.file.seek(0)

        mock_glob = self.mocker.replace('glob.glob')
        mock_glob("some_directory/*")
        self.mocker.result(['.', 'file1'])

        mock_open = self.mocker.replace('__builtin__.open')
        mock_open("file1", 'rU')
        self.mocker.result(self.file)

        self.mocker.replay()
        with self.assertRaises(ValueError) as cm:
            DataLoader.load_check_ins_from_directory("some_directory")
        self.assertEqual(cm.exception.message, 'Error processing file file1: check-in with ID 12 has already been encountered for user 418')
        

    def test_empty_directory(self):
        mock_glob = self.mocker.replace('glob.glob')
        mock_glob("some_directory/*")
        self.mocker.result(['.'])
        self.mocker.replay()
        with self.assertRaises(ValueError) as cm:
            DataLoader.load_check_ins_from_directory("some_directory")
        self.assertEqual(cm.exception.message, 'Error: directory some_directory is empty')