Exemple #1
0
import faker

faker_instance = faker.Faker('en_US')

def make_fake_contact_data_with_email(email):
    """
        <contactType>_<contactField>

        Valid values for contactType are:
        Registrant
        Admin
        Technical
        Billing

        For each contactType the following contactFields are mandatory:
        FirstName
        LastName
        Email
        PhoneNumber
        Street
        City
        CountryCode
        PostalCode
    """
    fake_data = {
        'FirstName' : faker_instance.first_name(),
        'LastName' : faker_instance.last_name(),
        'PhoneNumber' : "+1.2025550129", # totally fake
        'Email' : email,
        'Street' : faker_instance.street_address(),
        'City' : faker_instance.city(),
 def supports_source(locale):
     test_faker = faker.Faker(locale)
     for provider in providers:
         test_faker.add_provider(provider)
     return hasattr(test_faker, source)
class FakerMixin:
    faker = faker.Faker()
Exemple #4
0
# pytest -q tests.py
import faker
import os
import re
import requests
import tqdm

# Utilities
F = faker.Faker()
tqdm_config = {'ascii': True}


def recursive_file(path=''):
    '''
	Example
	=======
		>>> for file in recursive_file('.'):
		... 	print(file)
	'''
    path = path or os.path.curdir
    for dirpath, dirnames, filenames in os.walk(path):
        for filename in filenames:
            yield os.path.join(dirpath, filename)


def ping_links(*links, seconds=3, verify=False, **kwargs):
    '''
	Example
	=======
		>>> ping_links('...', '...', seconds=1)
	'''
Exemple #5
0
        Category.objects.create(name=cate)

    for tag in tag_list:
        Tag.objects.create(name=tag)

    print('create a markdown sample post')
    Post.objects.create(
        title='Markdown 与代码高亮测试',
        body=pathlib.Path(BASE_DIR).joinpath(
            'scripts', 'md.sample').read_text(encoding='utf-8'),
        category=Category.objects.create(name='Markdown测试'),
        author=user,
    )

    print('create some faked posts published within the past year')
    fake = faker.Faker()  # English
    for _ in range(100):
        tags = Tag.objects.order_by('?')
        tag1 = tags.first()
        tag2 = tags.last()
        cate = Category.objects.order_by('?').first()
        created_time = fake.date_time_between(
            start_date='-1y',
            end_date="now",
            tzinfo=timezone.get_current_timezone())
        post = Post.objects.create(
            title=fake.sentence().rstrip('.'),
            body='\n\n'.join(fake.paragraphs(10)),
            created_time=created_time,
            category=cate,
            author=user,
Exemple #6
0
def fake():
    return faker.Faker()
Exemple #7
0
 def stop(self):
     fake = faker.Faker()
     return fake.past_datetime(start_date=self.start)
Exemple #8
0
import faker

from django.contrib.auth import get_user_model
from model_bakery.recipe import Recipe

from .models import Dancer, Reservation, Performance

User = get_user_model()

FAKE = faker.Faker()

guardian = Recipe(User, username=FAKE.user_name, email=FAKE.safe_email)

dancer = Recipe(
    Dancer,
    display_name=FAKE.name,
    first_name=FAKE.first_name,
    last_name=FAKE.last_name,
)
Exemple #9
0
 def __init__(self):
     self.faker = faker.Faker()
Exemple #10
0
    def test_dump_exepctations(self):
        dump = pgdumplib.new('test', 'UTF8')
        entry = dump.add_entry(None,
                               'postgres',
                               constants.SECTION_PRE_DATA,
                               'postgres',
                               'DATABASE',
                               """\
            CREATE DATABASE postgres
              WITH TEMPLATE = template0
                   ENCODING = 'UTF8'
                   LC_COLLATE = 'en_US.utf8'
                   LC_CTYPE = 'en_US.utf8';""",
                               'DROP DATABASE postgres',
                               dump_id=1024)

        dump.add_entry(None,
                       'DATABASE postgres',
                       constants.SECTION_PRE_DATA,
                       'postgres',
                       'COMMENT',
                       """\
            COMMENT ON DATABASE postgres
                 IS 'default administrative connection database';""",
                       dependencies=[entry.dump_id])

        example = dump.add_entry(
            'public', 'example', constants.SECTION_PRE_DATA, 'postgres',
            'TABLE', 'CREATE TABLE public.example (\
              id UUID NOT NULL PRIMARY KEY, \
              created_at TIMESTAMP WITH TIME ZONE, \
              value TEXT NOT NULL);', 'DROP TABLE public.example')

        columns = 'id', 'created_at', 'value'

        fake = faker.Faker()
        fake.add_provider(date_time)

        rows = [(uuid.uuid4(), fake.date_time(tzinfo=tz.tzutc()), 'foo'),
                (uuid.uuid4(), fake.date_time(tzinfo=tz.tzutc()), 'bar'),
                (uuid.uuid4(), fake.date_time(tzinfo=tz.tzutc()), 'baz'),
                (uuid.uuid4(), fake.date_time(tzinfo=tz.tzutc()), 'qux')]

        with dump.table_data_writer(example, columns) as writer:
            for row in rows:
                writer.append(*row)

        row = (uuid.uuid4(), fake.date_time(tzinfo=tz.tzutc()), None)
        rows.append(row)

        # Append a second time to get same writer
        with dump.table_data_writer(example, columns) as writer:
            writer.append(*row)

        dump.save('build/data/dump.test')

        test_file = pathlib.Path('build/data/dump.test')
        self.assertTrue(test_file.exists())

        dump = pgdumplib.load(test_file, converters.SmartDataConverter)
        entry = dump.get_entry(1024)
        self.assertEqual(entry.desc, 'DATABASE')
        self.assertEqual(entry.owner, 'postgres')
        self.assertEqual(entry.tag, 'postgres')
        values = [row for row in dump.table_data('public', 'example')]
        self.assertListEqual(values, rows)
Exemple #11
0
class GetReportFileTests(MasuTestCase):
    """Test Cases for the celery task."""

    fake = faker.Faker()

    @patch(
        'masu.processor._tasks.download.ReportDownloader', return_value=FakeDownloader
    )
    def test_get_report(self, fake_downloader):
        """Test task"""
        account = fake_arn(service='iam', generate_account_id=True)
        report = _get_report_files(
            Mock(),
            customer_name=self.fake.word(),
            authentication=account,
            provider_type='AWS',
            report_name=self.fake.word(),
            provider_uuid=self.aws_provider_uuid,
            billing_source=self.fake.word(),
        )

        self.assertIsInstance(report, list)
        self.assertGreater(len(report), 0)

    @patch(
        'masu.processor._tasks.download.ReportDownloader', return_value=FakeDownloader
    )
    def test_disk_status_logging(self, fake_downloader):
        """Test task for logging when temp directory exists."""
        logging.disable(logging.NOTSET)
        os.makedirs(Config.TMP_DIR, exist_ok=True)

        account = fake_arn(service='iam', generate_account_id=True)
        expected = 'INFO:masu.processor._tasks.download:Available disk space'
        with self.assertLogs('masu.processor._tasks.download', level='INFO') as logger:
            _get_report_files(
                Mock(),
                customer_name=self.fake.word(),
                authentication=account,
                provider_type='AWS',
                report_name=self.fake.word(),
                provider_uuid=self.aws_provider_uuid,
                billing_source=self.fake.word(),
            )
            statement_found = False
            for log in logger.output:
                if expected in log:
                    statement_found = True
            self.assertTrue(statement_found)

        shutil.rmtree(Config.TMP_DIR, ignore_errors=True)

    @patch(
        'masu.processor._tasks.download.ReportDownloader', return_value=FakeDownloader
    )
    def test_disk_status_logging_no_dir(self, fake_downloader):
        """Test task for logging when temp directory does not exist."""
        logging.disable(logging.NOTSET)

        Config.PVC_DIR = '/this/path/does/not/exist'

        account = fake_arn(service='iam', generate_account_id=True)
        expected = (
            'INFO:masu.processor._tasks.download:Unable to find'
            + f' available disk space. {Config.PVC_DIR} does not exist'
        )
        with self.assertLogs('masu.processor._tasks.download', level='INFO') as logger:
            _get_report_files(
                Mock(),
                customer_name=self.fake.word(),
                authentication=account,
                provider_type='AWS',
                report_name=self.fake.word(),
                provider_uuid=self.aws_provider_uuid,
                billing_source=self.fake.word(),
            )
            self.assertIn(expected, logger.output)

    @patch(
        'masu.processor._tasks.download.ReportDownloader._set_downloader',
        side_effect=Exception('only a test'),
    )
    def test_get_report_exception(self, fake_downloader):
        """Test task"""
        account = fake_arn(service='iam', generate_account_id=True)

        with self.assertRaises(Exception):
            _get_report_files(
                Mock(),
                customer_name=self.fake.word(),
                authentication=account,
                provider_type='AWS',
                report_name=self.fake.word(),
                provider_uuid=self.aws_provider_uuid,
                billing_source=self.fake.word(),
            )

    @patch(
        'masu.processor._tasks.download.ReportDownloader._set_downloader',
        return_value=FakeDownloader,
    )
    @patch(
        'masu.database.provider_db_accessor.ProviderDBAccessor.get_setup_complete',
        return_value=True,
    )
    def test_get_report_with_override(self, fake_accessor, fake_report_files):
        """Test _get_report_files on non-initial load with override set."""
        Config.INGEST_OVERRIDE = True
        Config.INITIAL_INGEST_NUM_MONTHS = 5
        initial_month_qty = Config.INITIAL_INGEST_NUM_MONTHS

        account = fake_arn(service='iam', generate_account_id=True)
        with patch.object(ReportDownloader, 'get_reports') as download_call:
            _get_report_files(
                Mock(),
                customer_name=self.fake.word(),
                authentication=account,
                provider_type='AWS',
                report_name=self.fake.word(),
                provider_uuid=self.aws_provider_uuid,
                billing_source=self.fake.word(),
            )

            download_call.assert_called_with(initial_month_qty)

        Config.INGEST_OVERRIDE = False
        Config.INITIAL_INGEST_NUM_MONTHS = 2

    @patch(
        'masu.processor._tasks.download.ReportDownloader._set_downloader',
        return_value=FakeDownloader,
    )
    @patch(
        'masu.database.provider_db_accessor.ProviderDBAccessor.get_setup_complete',
        return_value=True,
    )
    def test_get_report_without_override(self, fake_accessor, fake_report_files):
        """Test _get_report_files for two months."""
        initial_month_qty = 2

        account = fake_arn(service='iam', generate_account_id=True)
        with patch.object(ReportDownloader, 'get_reports') as download_call:
            _get_report_files(
                Mock(),
                customer_name=self.fake.word(),
                authentication=account,
                provider_type='AWS',
                report_name=self.fake.word(),
                provider_uuid=self.aws_provider_uuid,
                billing_source=self.fake.word(),
            )

            download_call.assert_called_with(initial_month_qty)


    @patch('masu.processor._tasks.download.ProviderStatus.set_error')
    @patch(
        'masu.processor._tasks.download.ReportDownloader._set_downloader',
        side_effect=ReportDownloaderError('only a test'),
    )
    def test_get_report_exception_update_status(self, fake_downloader, fake_status):
        """Test that status is updated when an exception is raised."""
        account = fake_arn(service='iam', generate_account_id=True)

        try:
            _get_report_files(
                Mock(),
                customer_name=self.fake.word(),
                authentication=account,
                provider_type='AWS',
                report_name=self.fake.word(),
                provider_uuid=self.aws_provider_uuid,
                billing_source=self.fake.word(),
            )
        except ReportDownloaderError:
            pass
        fake_status.assert_called()

    @patch('masu.processor._tasks.download.ProviderStatus.set_status')
    @patch('masu.processor._tasks.download.ReportDownloader', spec=True)
    def test_get_report_update_status(self, fake_downloader, fake_status):
        """Test that status is updated when downloading is complete."""
        account = fake_arn(service='iam', generate_account_id=True)

        _get_report_files(
            Mock(),
            customer_name=self.fake.word(),
            authentication=account,
            provider_type='AWS',
            report_name=self.fake.word(),
            provider_uuid=self.aws_provider_uuid,
            billing_source=self.fake.word(),
        )
        fake_status.assert_called_with(ProviderStatusCode.READY)
Exemple #12
0
class RateSerializerTest(IamTestCase):
    """Rate serializer tests."""

    fake = faker.Faker()

    def setUp(self):
        """Set up the tests."""
        super().setUp()
        request = self.request_context['request']
        serializer = UserSerializer(data=self.user_data,
                                    context=self.request_context)
        if serializer.is_valid(raise_exception=True):
            user = serializer.save()
            request.user = user

        provider_data = {
            'name': 'test_provider',
            'type': Provider.PROVIDER_OCP,
            'authentication': {
                'provider_resource_name': self.fake.word()
            }
        }
        serializer = ProviderSerializer(data=provider_data,
                                        context=self.request_context)
        if serializer.is_valid(raise_exception=True):
            self.provider = serializer.save()

    def tearDown(self):
        """Clean up test cases."""
        with tenant_context(self.tenant):
            Rate.objects.all().delete()
            RateMap.objects.all().delete()

    def test_uuid_key_related_field(self):
        """Test the uuid key related field."""
        uuid_field = UUIDKeyRelatedField(queryset=Provider.objects.all(),
                                         pk_field='uuid')
        self.assertFalse(uuid_field.use_pk_only_optimization())
        self.assertEqual(self.provider.uuid,
                         uuid_field.to_internal_value(self.provider.uuid))
        self.assertEqual(self.provider.uuid,
                         uuid_field.to_representation(self.provider))
        self.assertEqual(self.provider.uuid,
                         uuid_field.display_value(self.provider))

    def test_error_on_invalid_provider(self):
        """Test error with an invalid provider id."""
        rate = {
            'provider_uuids': ['1dd7204c-72c4-4ec4-95bc-d5c447688b27'],
            'metric': {
                'name': CostModelMetricsMap.OCP_METRIC_MEM_GB_USAGE_HOUR
            },
            'tiered_rate': [{
                'value': round(Decimal(random.random()), 6),
                'unit': 'USD',
                'usage': {
                    'usage_start': None,
                    'usage_end': None
                }
            }]
        }
        with tenant_context(self.tenant):
            serializer = RateSerializer(data=rate)
            with self.assertRaises(serializers.ValidationError):
                if serializer.is_valid(raise_exception=True):
                    serializer.save()

    def test_error_on_invalid_metric(self):
        """Test error on an invalid metric rate."""
        rate = {
            'provider_uuids': [self.provider.uuid],
            'metric': {
                'name': 'invalid_metric'
            },
            'tiered_rate': [{
                'value': round(Decimal(random.random()), 6),
                'unit': 'USD',
                'usage': {
                    'usage_start': None,
                    'usage_end': None
                }
            }]
        }
        with tenant_context(self.tenant):
            serializer = RateSerializer(data=rate)
            with self.assertRaises(serializers.ValidationError):
                if serializer.is_valid(raise_exception=True):
                    serializer.save()

    def test_error_on_usage_end_larger_then_start(self):
        """Test error on a larger usage_end then usage_start ."""
        rate = {
            'provider_uuids': [self.provider.uuid],
            'metric': {
                'name': CostModelMetricsMap.OCP_METRIC_CPU_CORE_USAGE_HOUR
            },
            'tiered_rate': [{
                'value': round(Decimal(random.random()), 6),
                'unit': 'USD',
                'usage': {
                    'usage_start': 5,
                    'usage_end': 10
                }
            }]
        }
        with tenant_context(self.tenant):
            serializer = RateSerializer(data=rate)
            with self.assertRaises(serializers.ValidationError):
                if serializer.is_valid(raise_exception=True):
                    serializer.save()

    def test_error_on_rate_type(self):
        """Test error when trying to create an invalid rate input."""
        rate = {
            'provider_uuids': [self.provider.uuid],
            'metric': {
                'name': CostModelMetricsMap.OCP_METRIC_CPU_CORE_USAGE_HOUR
            },
            'invalid_rate': {
                'value': round(Decimal(random.random()), 6),
                'unit': 'USD'
            }
        }
        with tenant_context(self.tenant):
            serializer = RateSerializer(data=rate)
            with self.assertRaises(serializers.ValidationError):
                if serializer.is_valid(raise_exception=True):
                    serializer.save()

    def test_error_on_negative_rate(self):
        """Test error when trying to create an negative rate input."""
        rate = {
            'provider_uuids': [self.provider.uuid],
            'metric': {
                'name': CostModelMetricsMap.OCP_METRIC_CPU_CORE_USAGE_HOUR
            },
            'tiered_rate': [{
                'value': (round(Decimal(random.random()), 6) * -1),
                'unit': 'USD',
                'usage': {
                    'usage_start': None,
                    'usage_end': None
                }
            }]
        }
        with tenant_context(self.tenant):
            serializer = RateSerializer(data=rate)
            with self.assertRaises(serializers.ValidationError):
                if serializer.is_valid(raise_exception=True):
                    serializer.save()

    def test_error_no_rate(self):
        """Test error when trying to create an empty rate."""
        rate = {
            'provider_uuids': [self.provider.uuid],
            'metric': CostModelMetricsMap.OCP_METRIC_CPU_CORE_USAGE_HOUR
        }
        with tenant_context(self.tenant):
            serializer = RateSerializer(data=rate)
            with self.assertRaises(serializers.ValidationError):
                if serializer.is_valid(raise_exception=True):
                    serializer.save()

    def test_error_neg_tier_value(self):
        """Test error when trying to create a negative tiered value."""
        rate = {
            'provider_uuids': [self.provider.uuid],
            'metric': {
                'name': CostModelMetricsMap.OCP_METRIC_CPU_CORE_USAGE_HOUR
            },
            'tiered_rate': [{
                'unit': 'USD',
                'value': (round(Decimal(random.random()), 6) * -1),
                'usage': {
                    'usage_start': 10.0,
                    'usage_end': 20.0
                }
            }]
        }
        with tenant_context(self.tenant):
            serializer = RateSerializer(data=rate)
            with self.assertRaises(serializers.ValidationError):
                if serializer.is_valid(raise_exception=True):
                    serializer.save()

    def test_error_neg_tier_usage_start(self):
        """Test error when trying to create a negative tiered usage_start."""
        rate = {
            'provider_uuids': [self.provider.uuid],
            'metric': {
                'name': CostModelMetricsMap.OCP_METRIC_CPU_CORE_USAGE_HOUR
            },
            'tiered_rate': [{
                'unit': 'USD',
                'value': 1.0,
                'usage': {
                    'usage_start': (round(Decimal(random.random()), 6) * -1),
                    'usage_end': 20.0
                }
            }]
        }
        with tenant_context(self.tenant):
            serializer = RateSerializer(data=rate)
            with self.assertRaises(serializers.ValidationError):
                if serializer.is_valid(raise_exception=True):
                    serializer.save()

    def test_error_neg_tier_usage_end(self):
        """Test error when trying to create a negative tiered usage_end."""
        rate = {
            'provider_uuids': [self.provider.uuid],
            'metric': {
                'name': CostModelMetricsMap.OCP_METRIC_CPU_CORE_USAGE_HOUR
            },
            'tiered_rate': [{
                'unit': 'USD',
                'value': 1.0,
                'usage': {
                    'usage_start': 10.0,
                    'usage_end': (round(Decimal(random.random()), 6) * -1)
                }
            }]
        }
        with tenant_context(self.tenant):
            serializer = RateSerializer(data=rate)
            with self.assertRaises(serializers.ValidationError):
                if serializer.is_valid(raise_exception=True):
                    serializer.save()

    def test_error_tier_usage_end_less_than(self):
        """Test error when trying to create a tiered usage_end less than usage_start."""
        rate = {
            'provider_uuids': [self.provider.uuid],
            'metric': {
                'name': CostModelMetricsMap.OCP_METRIC_CPU_CORE_USAGE_HOUR
            },
            'tiered_rate': [{
                'unit': 'USD',
                'value': 1.0,
                'usage': {
                    'usage_start': 10.0,
                    'usage_end': 3.0
                }
            }]
        }
        with tenant_context(self.tenant):
            serializer = RateSerializer(data=rate)
            with self.assertRaises(serializers.ValidationError):
                if serializer.is_valid(raise_exception=True):
                    serializer.save()

    def test_create_cpu_core_per_hour_tiered_rate(self):
        """Test creating a cpu_core_per_hour rate."""
        rate = {
            'provider_uuids': [self.provider.uuid],
            'metric': {
                'name': CostModelMetricsMap.OCP_METRIC_CPU_CORE_USAGE_HOUR
            },
            'tiered_rate': [{
                'unit': 'USD',
                'value': 0.22,
                'usage': {
                    'usage_start': None,
                    'usage_end': 10.0
                }
            }, {
                'unit': 'USD',
                'value': 0.26,
                'usage': {
                    'usage_start': 10.0,
                    'usage_end': None
                }
            }]
        }

        with tenant_context(self.tenant):
            instance = None
            serializer = RateSerializer(data=rate)
            if serializer.is_valid(raise_exception=True):
                instance = serializer.save()

            self.assertIsNotNone(instance)
            self.assertIsNotNone(instance.uuid)

    def test_tiered_rate_null_start_end(self):
        """Test creating a rate with out a start and end."""
        rate = {
            'provider_uuids': [self.provider.uuid],
            'metric': {
                'name': CostModelMetricsMap.OCP_METRIC_CPU_CORE_USAGE_HOUR
            },
            'tiered_rate': [{
                'unit': 'USD',
                'value': 0.22,
                'usage': {
                    'usage_start': 0.0,
                    'usage_end': 7.0
                }
            }, {
                'unit': 'USD',
                'value': 0.26,
                'usage': {
                    'usage_start': 10.0,
                    'usage_end': 20.0
                }
            }]
        }

        with tenant_context(self.tenant):
            serializer = RateSerializer(data=rate)
            with self.assertRaises(serializers.ValidationError):
                if serializer.is_valid(raise_exception=True):
                    serializer.save()

    def test_tiered_rate_with_gaps(self):
        """Test creating a tiered rate with a gap between the tiers."""
        rate = {
            'provider_uuids': [self.provider.uuid],
            'metric': {
                'name': CostModelMetricsMap.OCP_METRIC_CPU_CORE_USAGE_HOUR
            },
            'tiered_rate': [{
                'unit': 'USD',
                'value': 0.22,
                'usage': {
                    'usage_start': None,
                    'usage_end': 7.0
                }
            }, {
                'unit': 'USD',
                'value': 0.26,
                'usage_start': 10.0,
                'usage_end': None
            }]
        }

        with tenant_context(self.tenant):
            serializer = RateSerializer(data=rate)
            with self.assertRaises(serializers.ValidationError):
                if serializer.is_valid(raise_exception=True):
                    serializer.save()

    def test_create_storage_tiered_rate(self):
        """Test creating a storage tiered rate."""
        storage_rates = (
            CostModelMetricsMap.OCP_METRIC_STORAGE_GB_REQUEST_MONTH,
            CostModelMetricsMap.OCP_METRIC_STORAGE_GB_USAGE_MONTH)
        for storage_rate in storage_rates:
            rate = {
                'provider_uuids': [self.provider.uuid],
                'metric': {
                    'name': storage_rate
                },
                'tiered_rate': [{
                    'unit': 'USD',
                    'value': 0.22,
                    'usage': {
                        'usage_start': None,
                        'usage_end': 10.0
                    }
                }, {
                    'unit': 'USD',
                    'value': 0.26,
                    'usage': {
                        'usage_start': 10.0,
                        'usage_end': None
                    }
                }]
            }

        with tenant_context(self.tenant):
            instance = None
            serializer = RateSerializer(data=rate)
            if serializer.is_valid(raise_exception=True):
                instance = serializer.save()

            self.assertIsNotNone(instance)
            self.assertIsNotNone(instance.uuid)

    def test_create_storage_no_tiers_rate(self):
        """Test creating a non tiered storage rate."""
        storage_rates = (
            CostModelMetricsMap.OCP_METRIC_STORAGE_GB_REQUEST_MONTH,
            CostModelMetricsMap.OCP_METRIC_STORAGE_GB_USAGE_MONTH)
        for storage_rate in storage_rates:
            rate = {
                'provider_uuids': [self.provider.uuid],
                'metric': {
                    'name': storage_rate
                },
                'tiered_rate': [{
                    'unit': 'USD',
                    'value': 0.22
                }]
            }

        with tenant_context(self.tenant):
            instance = None
            serializer = RateSerializer(data=rate)
            if serializer.is_valid(raise_exception=True):
                instance = serializer.save()

            self.assertIsNotNone(instance)
            self.assertIsNotNone(instance.uuid)

    def test_tiered_rate_with_overlaps(self):
        """Test creating a tiered rate with a overlaps between the tiers."""
        rate = {
            'provider_uuids': [self.provider.uuid],
            'metric': {
                'name': CostModelMetricsMap.OCP_METRIC_CPU_CORE_USAGE_HOUR
            },
            'tiered_rate': [{
                'unit': 'USD',
                'value': 0.22,
                'usage': {
                    'usage_start': None,
                    'usage_end': 10.0
                }
            }, {
                'unit': 'USD',
                'value': 0.26,
                'usage': {
                    'usage_start': 5.0,
                    'usage_end': 20.0
                }
            }, {
                'unit': 'USD',
                'value': 0.26,
                'usage': {
                    'usage_start': 20.0,
                    'usage_end': None
                }
            }]
        }

        with tenant_context(self.tenant):
            serializer = RateSerializer(data=rate)
            with self.assertRaises(serializers.ValidationError):
                if serializer.is_valid(raise_exception=True):
                    serializer.save()

    def test_tiered_rate_with_duplicate(self):
        """Test creating a tiered rate with duplicate tiers."""
        rate = {
            'provider_uuids': [self.provider.uuid],
            'metric': {
                'name': CostModelMetricsMap.OCP_METRIC_CPU_CORE_USAGE_HOUR
            },
            'tiered_rate': [{
                'unit': 'USD',
                'value': 0.22,
                'usage': {
                    'usage_start': None,
                    'usage_end': 10.0
                }
            }, {
                'unit': 'USD',
                'value': 0.26,
                'usage': {
                    'usage_start': 10.0,
                    'usage_end': 20.0
                }
            }, {
                'unit': 'USD',
                'value': 0.26,
                'usage': {
                    'usage_start': 10.0,
                    'usage_end': 20.0
                }
            }, {
                'unit': 'USD',
                'value': 0.26,
                'usage': {
                    'usage_start': 20.0,
                    'usage_end': None
                }
            }]
        }

        with tenant_context(self.tenant):
            serializer = RateSerializer(data=rate)
            with self.assertRaises(serializers.ValidationError):
                if serializer.is_valid(raise_exception=True):
                    serializer.save()

    def test_get_metric_display_data(self):
        """Test the display data helper function."""
        serializer = RateSerializer(data=None)

        for metric_choice in CostModelMetricsMap.METRIC_CHOICES:
            response = serializer._get_metric_display_data(metric_choice[0])
            self.assertIsNotNone(response.get('unit'))
            self.assertIsNotNone(response.get('display_name'))
Exemple #13
0
 def random_user():
     fake = faker.Faker()
     return UserData(username=fake.email(), password=fake.password())
Exemple #14
0
 def _default_faker(self):
     from headfake import HeadFake
     fake = faker.Faker(HeadFake.locale)
     return fake
Exemple #15
0
def populate_data(inflate: int = 0, engine: str = "sqlite"):
    """
    Populate the database with some example data.

    :param inflate:
        If set, this number of extra rows are inserted containing dummy data.
        This is useful for testing.

    """
    # Add some rows
    Director.insert(*[Director(
        **d) for d in DIRECTORS]).run_sync()  # type: ignore # noqa: E501
    Movie.insert(*[Movie(**m) for m in MOVIES]).run_sync()  # type: ignore
    Studio.insert(*[Studio(**s) for s in STUDIOS]).run_sync()  # type: ignore

    if engine == "postgres":
        # We need to update the sequence, as we explicitly set the IDs for the
        # directors we just inserted
        Director.raw("SELECT setval('director_id_seq', max(id)) FROM director"
                     ).run_sync()

    # Create a user for testing login
    user = User(
        username="******",
        password="******",
        admin=True,
        email="*****@*****.**",
        active=True,
    )
    user.save().run_sync()

    if inflate:
        try:
            import faker
        except ImportError:
            print("Install faker to use this feature: "
                  "`pip install piccolo_admin[faker]`")
        else:
            fake = faker.Faker()
            remaining = inflate
            chunk_size = 100

            while remaining > 0:
                if remaining < chunk_size:
                    chunk_size = remaining
                    remaining = 0
                else:
                    remaining = remaining - chunk_size

                directors = []
                genders = ["m", "f", "n"]
                for _ in range(chunk_size):
                    gender = random.choice(genders)
                    if gender == "m":
                        name = fake.name_male()
                    elif gender == "f":
                        name = fake.name_female()
                    else:
                        name = fake.name_nonbinary()
                    directors.append(Director(name=name, gender=gender))

                Director.insert(*directors).run_sync()

                director_ids = (Director.select(
                    Director._meta.primary_key).order_by(
                        Director._meta.primary_key,
                        ascending=False).limit(chunk_size).output(
                            as_list=True).run_sync())

                movies = []
                genres = [i.value for i in Movie.Genre]
                for _ in range(chunk_size):
                    oscar_nominations = random.sample(
                        [0, 0, 0, 0, 0, 1, 1, 3, 5], 1)[0]
                    won_oscar = oscar_nominations > 0
                    rating = (random.randint(80, 100)
                              if won_oscar else random.randint(1, 100)) / 10

                    movie = Movie(
                        name="{} {}".format(
                            fake.word().title(),
                            fake.word(ext_word_list=MOVIE_WORDS),
                        ),
                        rating=rating,
                        duration=datetime.timedelta(
                            minutes=random.randint(60, 210)),
                        director=random.sample(director_ids, 1)[0],
                        oscar_nominations=oscar_nominations,
                        won_oscar=won_oscar,
                        description=fake.sentence(30),
                        release_date=fake.date_time(),
                        box_office=decimal.Decimal(
                            str(random.randint(10, 1500) / 10)),
                        barcode=random.randint(1_000_000_000, 9_999_999_999),
                        genre=random.choice(genres),
                    )
                    movies.append(movie)

                Movie.insert(*movies).run_sync()
Exemple #16
0
import random
from copy import deepcopy

from bson.objectid import ObjectId
import faker


fake = faker.Faker('en_US')

# Generate basics

def generate_ts():
    dt = fake.date_time_this_month(before_now=True, after_now=False, tzinfo=None)
    return str(dt).replace(' ', 'T') + '.000Z'


def generate_id(n=24):
    return str(ObjectId())


def generate_phone():
    return random.choice(['314', '636', '618', '217']) + str(fake.pydecimal(left_digits=10, right_digits=0, positive=True))[:7]


def generate_skills(n=3):
    return [fake.job() for _ in range(n)]


def generate_lang():
    return random.choice(['Spanish', 'English', 'Bosnian'])
Exemple #17
0
 def __init__(self, settings):
     self.faker = faker.Faker()
import os
import json
import time
import random
from functools import reduce
from elasticsearch import Elasticsearch, helpers
from elasticsearch.exceptions import *
import sys
import faker
import pypinyin

from utils import isp2en

reload(sys)
sys.setdefaultencoding('utf-8')
f = faker.Faker(locale='zh_CN')

# ----------- 需要修改的参数 -----------
es_host = '192.168.10.201'
token = '4a859fff6e5c4521aab187eee1cfceb8'
appname = 'iprobe'
doc_type = 'udp'
index_name = 'cc-{appname}-{doc_type}-{token}-{suffix}'.format(
    appname=appname,
    doc_type=doc_type,
    token=token,
    suffix=time.strftime('%Y.%m.%d'))
data_file_name = 'iprobe-udp.txt'
request_body_size = 100
# ------------------------------------
Exemple #19
0
import requests
import faker

from random import randint

fake = faker.Faker("fr_FR")

URL = "http://127.0.0.1:8000/api/v1/contract/create/"

for _ in range(10):

    DATA = {
        "customer": {
            "name": fake.last_name(),
            "first_name": fake.first_name(),
            "age": randint(18, 70),
        },
        "insurance": "MGEN",
    }

    req = requests.post(URL, json=DATA, timeout=5.0)
    print(req.json())
Exemple #20
0
import pika

case = 'publisher_confirms_resend'

#  Connection open
connection = pika.BlockingConnection(
    pika.ConnectionParameters(host='localhost'))
channel = connection.channel()

#  Confirm
channel.confirm_delivery()

#  Exchange
channel.exchange_declare(exchange=case, exchange_type='direct')

message = faker.Faker().text()


def send_message():
    response = channel.basic_publish(exchange=case,
                                     body=message,
                                     routing_key=case,
                                     mandatory=True)
    return response


consumer_running = send_message()
if consumer_running:
    print('Consumer was up. Message was sent.')
    connection.close()
else:
Exemple #21
0
)
'''

insert_table_sql = '''\
insert into user5(username, nickname, password, address, birthday, company, job, telephone)
values (?, ?, ?, ?, ?, ?, ?, ?)
'''

select_public_servant_sql = '''\
select *
from user5
where job = '公务员'
'''

# 准备模拟数据
fake = faker.Faker('zh_CN')
# 设置种子值,不设的话每次随机的都不一样

##fake.Faker.seed(47)

db_file_location = r'c:\test.accdb'
# 这里用的是Python3.5的语法,如果是低版本Python的话需要改成普通方式
connection = pyodbc.connect(
    rf'Driver={{Microsoft Access Driver (*.mdb, *.accdb)}};DBQ={db_file_location};'
)

connection.autocommit = True

# 第一次创建表,将其设置为False
table_exists = False
if not table_exists:
Exemple #22
0
def factory(model_name):
    """generate factory for given model"""

    import faker

    model = getattr(models, model_name, None)
    if not model:
        print("The model not found")
        exit()

    exclude_list = ['id', 'created_at', 'updated_at']
    exclude_columns = list()
    meta = list()
    cols = [c for c in model.__table__.columns]
    f = faker.Faker()
    for col in cols:
        if col.name in exclude_list:
            exclude_columns.append(col)
            continue
        if bool(col.foreign_keys):
            exclude_columns.append(col)
            continue

        p = _find_mapping_provider(col.name)
        if p:
            _meta = dict(col=col, provider=p)
        elif getattr(f, col.name, None):
            _meta = dict(col=col, provider=col.name)
        elif isinstance(col.type, sa.types.String):
            _meta = dict(col=col, provider="word")
        elif isinstance(col.type, sa.types.Integer):
            _meta = dict(col=col, provider="pyint")
        elif isinstance(col.type, sa.types.DateTime):
            _meta = dict(col=col, provider="date_this_year")
        elif isinstance(col.type, sa.types.DECIMAL):
            _meta = dict(col=col, provider="pydecimal")
        elif isinstance(col.type, sa.types.Text):
            _meta = dict(col=col, provider="text")
        elif isinstance(col.type, sa.types.Boolean):
            _meta = dict(col=col, provider="pybool")
        elif isinstance(col.type, sa.types.Date):
            _meta = dict(col=col, provider="date_this_month")
        elif isinstance(col.type, sa.types.Time):
            _meta = dict(col=col, provider="time_object")
        elif isinstance(col.type, sa.types.Float):
            _meta = dict(col=col, provider="pyfloat")
        elif isinstance(col.type, sa.types.JSON):
            _meta = dict(col=col, wtform_type="simple_profile")
        meta.append(_meta)

    one_relationships = list()
    many_relationships = list()
    # find relationships
    for r in inspect(model).relationships:
        _r = dict(key=r.key, cls=r.mapper.class_.__name__)
        if 'TOONE' in r.direction.name:
            one_relationships.append(_r)
            continue
        many_relationships.append(_r)  # relationship name

    template_path = os.path.join(os.getcwd(), 'scripts', "factory.template")

    with open(template_path, 'r') as template:
        t = template.read()
        temp = jinja2.Template(t)
        print(
            temp.render(model_name=model.__name__,
                        many_relationships=many_relationships,
                        one_relationships=one_relationships,
                        exclude_columns=exclude_columns,
                        columns=meta))
Exemple #23
0
import os
os.environ.setdefault('DJANGO_SETTINGS_MODULE','staticfilepractice.settings')



import django
django.setup()
## faker script
import random
import faker
from firstapp.models import  Topic,AccessRecord,Webpage

topic=['Torrent','Files','search','urli','python','java']
fakegen=faker.Faker()
def addTopic():
    top=Topic.objects.get_or_create(Top_name=random.choice(topic))[0]
    top.save()
    return top
def populate(n=5):
    for time in range(n):
        top=addTopic()
        ##craeting fake
        fake_url=fakegen.url()
        fake_date=fakegen.date()
        fake_name=fakegen.company()
        ####
        webob=Webpage.objects.get_or_create(topic=top,name=fake_name,url=fake_url)[0]
        acceob=AccessRecord.objects.get_or_create(name=webob,date=fake_date)[0]
if __name__=='__main__':
    print("Start populating")
    populate(20)
Exemple #24
0
class OrchestratorTest(MasuTestCase):
    """Test Cases for the Orchestrator object."""

    fake = faker.Faker()

    def setUp(self):
        """Set up shared variables."""
        super().setUp()
        self.aws_credentials = self.aws_provider.authentication.credentials
        self.aws_data_source = self.aws_provider.billing_source.data_source
        self.azure_credentials = self.azure_provider.authentication.credentials
        self.azure_data_source = self.azure_provider.billing_source.data_source
        self.gcp_credentials = self.gcp_provider.authentication.credentials
        self.gcp_data_source = self.gcp_provider.billing_source.data_source
        self.ocp_credentials = [
            name[0] for name in Provider.objects.values_list(
                "authentication__credentials")
        ]
        self.ocp_data_source = {}
        self.mock_accounts = [{
            "credentials": {
                "role_arn": fake_arn(service="iam", generate_account_id=True)
            },
            "data_source": {
                "bucket": self.fake.word()
            },
            "customer_name": self.fake.word(),
            "provider_type": Provider.PROVIDER_AWS,
            "schema_name": self.fake.word(),
        }]

    @patch("masu.processor.worker_cache.CELERY_INSPECT")
    def test_initializer(self, mock_inspect):  # noqa: C901
        """Test to init."""
        orchestrator = Orchestrator()
        provider_count = Provider.objects.filter(active=True).count()
        if len(orchestrator._accounts) != provider_count:
            self.fail("Unexpected number of test accounts")

        for account in orchestrator._accounts:
            with self.subTest(provider_type=account.get("provider_type")):
                if account.get("provider_type") in (
                        Provider.PROVIDER_AWS, Provider.PROVIDER_AWS_LOCAL):
                    self.assertEqual(account.get("credentials"),
                                     self.aws_credentials)
                    self.assertEqual(account.get("data_source"),
                                     self.aws_data_source)
                    self.assertEqual(account.get("customer_name"), self.schema)
                elif account.get("provider_type") == Provider.PROVIDER_OCP:
                    self.assertIn(account.get("credentials"),
                                  self.ocp_credentials)
                    self.assertEqual(account.get("data_source"),
                                     self.ocp_data_source)
                    self.assertEqual(account.get("customer_name"), self.schema)
                elif account.get("provider_type") in (
                        Provider.PROVIDER_AZURE,
                        Provider.PROVIDER_AZURE_LOCAL):
                    self.assertEqual(account.get("credentials"),
                                     self.azure_credentials)
                    self.assertEqual(account.get("data_source"),
                                     self.azure_data_source)
                    self.assertEqual(account.get("customer_name"), self.schema)
                elif account.get("provider_type") in (
                        Provider.PROVIDER_GCP, Provider.PROVIDER_GCP_LOCAL):
                    self.assertEqual(account.get("credentials"),
                                     self.gcp_credentials)
                    self.assertEqual(account.get("data_source"),
                                     self.gcp_data_source)
                    self.assertEqual(account.get("customer_name"), self.schema)
                else:
                    self.fail("Unexpected provider")

        if len(orchestrator._polling_accounts) != 3:
            self.fail("Unexpected number of listener test accounts")

        for account in orchestrator._polling_accounts:
            with self.subTest(provider_type=account.get("provider_type")):
                if account.get("provider_type") in (
                        Provider.PROVIDER_AWS, Provider.PROVIDER_AWS_LOCAL):
                    self.assertEqual(account.get("credentials"),
                                     self.aws_credentials)
                    self.assertEqual(account.get("data_source"),
                                     self.aws_data_source)
                    self.assertEqual(account.get("customer_name"), self.schema)
                elif account.get("provider_type") in (
                        Provider.PROVIDER_AZURE,
                        Provider.PROVIDER_AZURE_LOCAL):
                    self.assertEqual(account.get("credentials"),
                                     self.azure_credentials)
                    self.assertEqual(account.get("data_source"),
                                     self.azure_data_source)
                    self.assertEqual(account.get("customer_name"), self.schema)
                elif account.get("provider_type") in (
                        Provider.PROVIDER_GCP, Provider.PROVIDER_GCP_LOCAL):
                    self.assertEqual(account.get("credentials"),
                                     self.gcp_credentials)
                    self.assertEqual(account.get("data_source"),
                                     self.gcp_data_source)
                    self.assertEqual(account.get("customer_name"), self.schema)
                else:
                    self.fail("Unexpected provider")

    @patch("masu.processor.worker_cache.CELERY_INSPECT")
    @patch("masu.external.report_downloader.ReportDownloader._set_downloader",
           return_value=FakeDownloader)
    @patch("masu.external.accounts_accessor.AccountsAccessor.get_accounts",
           return_value=[])
    def test_prepare_no_accounts(self, mock_downloader, mock_accounts_accessor,
                                 mock_inspect):
        """Test downloading cost usage reports."""
        orchestrator = Orchestrator()
        reports = orchestrator.prepare()

        self.assertIsNone(reports)

    @patch("masu.processor.worker_cache.CELERY_INSPECT")
    @patch.object(AccountsAccessor, "get_accounts")
    def test_init_all_accounts(self, mock_accessor, mock_inspect):
        """Test initializing orchestrator with forced billing source."""
        mock_accessor.return_value = self.mock_accounts
        orchestrator_all = Orchestrator()
        self.assertEqual(orchestrator_all._accounts, self.mock_accounts)

    @patch("masu.processor.worker_cache.CELERY_INSPECT")
    @patch.object(AccountsAccessor, "get_accounts")
    def test_init_with_billing_source(self, mock_accessor, mock_inspect):
        """Test initializing orchestrator with forced billing source."""
        mock_accessor.return_value = self.mock_accounts

        fake_source = random.choice(self.mock_accounts)

        individual = Orchestrator(fake_source.get("data_source"))
        self.assertEqual(len(individual._accounts), 1)
        found_account = individual._accounts[0]
        self.assertEqual(found_account.get("data_source"),
                         fake_source.get("data_source"))

    @patch("masu.processor.worker_cache.CELERY_INSPECT")
    @patch.object(AccountsAccessor, "get_accounts")
    def test_init_all_accounts_error(self, mock_accessor, mock_inspect):
        """Test initializing orchestrator accounts error."""
        mock_accessor.side_effect = AccountsAccessorError(
            "Sample timeout error")
        try:
            Orchestrator()
        except Exception:
            self.fail("unexpected error")

    @patch("masu.processor.worker_cache.CELERY_INSPECT")
    @patch.object(ExpiredDataRemover, "remove")
    @patch("masu.processor.orchestrator.remove_expired_data.apply_async",
           return_value=True)
    def test_remove_expired_report_data(self, mock_task, mock_remover,
                                        mock_inspect):
        """Test removing expired report data."""
        expected_results = [{
            "account_payer_id":
            "999999999",
            "billing_period_start":
            "2018-06-24 15:47:33.052509"
        }]
        mock_remover.return_value = expected_results

        expected = "INFO:masu.processor.orchestrator:Expired data removal queued - schema_name: acct10001, Task ID: {}"
        # unset disabling all logging below CRITICAL from masu/__init__.py
        logging.disable(logging.NOTSET)
        with self.assertLogs("masu.processor.orchestrator",
                             level="INFO") as logger:
            orchestrator = Orchestrator()
            results = orchestrator.remove_expired_report_data()
            self.assertTrue(results)
            self.assertEqual(len(results), 5)
            async_id = results.pop().get("async_id")
            self.assertIn(expected.format(async_id), logger.output)

    @patch("masu.processor.worker_cache.CELERY_INSPECT")
    @patch.object(AccountsAccessor, "get_accounts")
    @patch.object(ExpiredDataRemover, "remove")
    @patch("masu.processor.orchestrator.remove_expired_data.apply_async",
           return_value=True)
    def test_remove_expired_report_data_no_accounts(self, mock_task,
                                                    mock_remover,
                                                    mock_accessor,
                                                    mock_inspect):
        """Test removing expired report data with no accounts."""
        expected_results = [{
            "account_payer_id":
            "999999999",
            "billing_period_start":
            "2018-06-24 15:47:33.052509"
        }]
        mock_remover.return_value = expected_results
        mock_accessor.return_value = []

        orchestrator = Orchestrator()
        results = orchestrator.remove_expired_report_data()

        self.assertEqual(results, [])

    @patch("masu.processor.worker_cache.CELERY_INSPECT")
    @patch("masu.processor.orchestrator.AccountLabel", spec=True)
    @patch(
        "masu.processor.orchestrator.Orchestrator.start_manifest_processing",
        side_effect=ReportDownloaderError)
    def test_prepare_w_downloader_error(self, mock_task, mock_labeler,
                                        mock_inspect):
        """Test that Orchestrator.prepare() handles downloader errors."""

        orchestrator = Orchestrator()
        orchestrator.prepare()
        mock_task.assert_called()
        mock_labeler.assert_not_called()

    @patch("masu.processor.worker_cache.CELERY_INSPECT")
    @patch("masu.processor.orchestrator.AccountLabel", spec=True)
    @patch(
        "masu.processor.orchestrator.Orchestrator.start_manifest_processing",
        side_effect=Exception)
    def test_prepare_w_exception(self, mock_task, mock_labeler, mock_inspect):
        """Test that Orchestrator.prepare() handles broad exceptions."""

        orchestrator = Orchestrator()
        orchestrator.prepare()
        mock_task.assert_called()
        mock_labeler.assert_not_called()

    @patch("masu.processor.worker_cache.CELERY_INSPECT")
    @patch("masu.processor.orchestrator.AccountLabel", spec=True)
    @patch(
        "masu.processor.orchestrator.Orchestrator.start_manifest_processing",
        return_value=True)
    def test_prepare_w_manifest_processing_successful(self, mock_task,
                                                      mock_labeler,
                                                      mock_inspect):
        """Test that Orchestrator.prepare() works when manifest processing is successful."""
        mock_labeler().get_label_details.return_value = (True, True)

        orchestrator = Orchestrator()
        orchestrator.prepare()
        mock_labeler.assert_called()

    @patch("masu.processor.worker_cache.CELERY_INSPECT")
    @patch("masu.processor.orchestrator.get_report_files.apply_async",
           return_value=True)
    def test_prepare_w_no_manifest_found(self, mock_task, mock_inspect):
        """Test that Orchestrator.prepare() is skipped when no manifest is found."""
        orchestrator = Orchestrator()
        orchestrator.prepare()
        mock_task.assert_not_called()

    @patch("masu.processor.worker_cache.CELERY_INSPECT")
    @patch("masu.processor.orchestrator.record_report_status",
           return_value=True)
    @patch("masu.processor.orchestrator.chord", return_value=True)
    @patch("masu.processor.orchestrator.ReportDownloader.download_manifest",
           return_value={})
    def test_start_manifest_processing_already_progressed(
            self, mock_record_report_status, mock_download_manifest, mock_task,
            mock_inspect):
        """Test start_manifest_processing with report already processed."""
        orchestrator = Orchestrator()
        account = self.mock_accounts[0]

        orchestrator.start_manifest_processing(
            account.get("customer_name"),
            account.get("credentials"),
            account.get("data_source"),
            "AWS-local",
            account.get("schema_name"),
            account.get("provider_uuid"),
            DateAccessor().get_billing_months(1)[0],
        )
        mock_task.assert_not_called()

    @patch("masu.processor.worker_cache.CELERY_INSPECT")
    @patch("masu.processor.orchestrator.WorkerCache.task_is_running",
           return_value=True)
    @patch("masu.processor.orchestrator.chord", return_value=True)
    @patch("masu.processor.orchestrator.ReportDownloader.download_manifest",
           return_value={})
    def test_start_manifest_processing_in_progress(self,
                                                   mock_record_report_status,
                                                   mock_download_manifest,
                                                   mock_task, mock_inspect):
        """Test start_manifest_processing with report in progressed."""
        orchestrator = Orchestrator()
        account = self.mock_accounts[0]

        orchestrator.start_manifest_processing(
            account.get("customer_name"),
            account.get("credentials"),
            account.get("data_source"),
            "AWS-local",
            account.get("schema_name"),
            account.get("provider_uuid"),
            DateAccessor().get_billing_months(1)[0],
        )
        mock_task.assert_not_called()

    @patch("masu.processor.worker_cache.CELERY_INSPECT")
    @patch("masu.processor.orchestrator.chord")
    @patch("masu.processor.orchestrator.ReportDownloader.download_manifest")
    def test_start_manifest_processing(self, mock_download_manifest, mock_task,
                                       mock_inspect):
        """Test start_manifest_processing."""
        test_matrix = [
            {
                "mock_downloader_manifest": {},
                "expect_chord_called": False
            },
            {
                "mock_downloader_manifest": {
                    "manifest_id": 1,
                    "files": [{
                        "local_file": "file1.csv",
                        "key": "filekey"
                    }],
                },
                "expect_chord_called": True,
            },
        ]
        for test in test_matrix:
            mock_download_manifest.return_value = test.get(
                "mock_downloader_manifest")
            orchestrator = Orchestrator()
            account = self.mock_accounts[0]
            orchestrator.start_manifest_processing(
                account.get("customer_name"),
                account.get("credentials"),
                account.get("data_source"),
                "AWS-local",
                account.get("schema_name"),
                account.get("provider_uuid"),
                DateAccessor().get_billing_months(1)[0],
            )
            if test.get("expect_chord_called"):
                mock_task.assert_called()
            else:
                mock_task.assert_not_called()

    @patch("masu.processor.worker_cache.CELERY_INSPECT")
    @patch(
        "masu.database.provider_db_accessor.ProviderDBAccessor.get_setup_complete"
    )
    def test_get_reports(self, fake_accessor, mock_inspect):
        """Test get_reports for combinations of setup_complete and ingest override."""
        initial_month_qty = Config.INITIAL_INGEST_NUM_MONTHS
        test_matrix = [
            {
                "get_setup_complete": True,
                "ingest_override": True,
                "test_months": 5,
                "expected_month_length": 5
            },
            {
                "get_setup_complete": False,
                "ingest_override": True,
                "test_months": 5,
                "expected_month_length": 5
            },
            {
                "get_setup_complete": True,
                "ingest_override": False,
                "test_months": 5,
                "expected_month_length": 2
            },
            {
                "get_setup_complete": False,
                "ingest_override": False,
                "test_months": 5,
                "expected_month_length": 5
            },
        ]
        for test in test_matrix:
            test_months = test.get("test_months")
            fake_accessor.return_value = test.get("get_setup_complete")
            Config.INGEST_OVERRIDE = test.get("ingest_override")
            Config.INITIAL_INGEST_NUM_MONTHS = test_months

            orchestrator = Orchestrator()
            months = orchestrator.get_reports(self.aws_provider_uuid)
            self.assertEqual(test.get("expected_month_length"), len(months))

        Config.INGEST_OVERRIDE = False
        Config.INITIAL_INGEST_NUM_MONTHS = initial_month_qty
Exemple #25
0
import factory
import faker
from factory.django import DjangoModelFactory

from share import models

__all__ = (
    'SourceFactory',
    'ShareUserFactory',
    'NormalizedDataFactory',
)

faker = faker.Faker()


class ShareUserFactory(DjangoModelFactory):
    username = factory.Sequence(lambda x: '{}{}'.format(faker.name(), x))
    source = factory.RelatedFactory('tests.factories.core.SourceFactory', 'user')

    class Meta:
        model = models.ShareUser


class NormalizedDataFactory(DjangoModelFactory):
    data = {}
    source = factory.SubFactory(ShareUserFactory)

    class Meta:
        model = models.NormalizedData

Exemple #26
0
def write_some_log():
    fp = 'logs.log'
    if os.path.exists(fp):
        os.remove(fp)
    with open(fp, 'a') as file:
        file.write(faker.Faker().sentence())
Exemple #27
0
import random
import uuid
import time
import sys
import os

sys.path.append(
    os.path.dirname(os.path.dirname(os.path.dirname(
        os.path.abspath(__file__)))))

import cx_Oracle
import faker

from core.fake_data import random_datetime

fake = faker.Faker()
G_OBJECTID = 200000000


def random_CRASH_DETAILS_TABLE():
    '''
    随机往CRASH_DETAILS_TABLE表里写数据
    D => CRASH_DETAILS_TABLE
    :param cur:
    :return:
    '''
    return dict(
        OBJECTID=G_OBJECTID,
        CRIMEID=random.randint(0, 30000000),
        CCN=random.randint(0, 100000000),
        PERSONID=random.randint(0, 90000000),
Exemple #28
0
class OrchestratorTest(MasuTestCase):
    """Test Cases for the Orchestrator object."""
    fake = faker.Faker()

    def setUp(self):
        super().setUp()
        self.mock_accounts = []
        for _ in range(1, random.randint(5, 20)):
            self.mock_accounts.append({
                'authentication':
                fake_arn(service='iam', generate_account_id=True),
                'billing_source':
                self.fake.word(),
                'customer_name':
                self.fake.word(),
                'provider_type':
                'AWS',
                'schema_name':
                self.fake.word()
            })

    def test_initializer(self):
        """Test to init"""
        orchestrator = Orchestrator()

        if len(orchestrator._accounts) != 2:
            self.fail("Unexpected number of test accounts")

        for account in orchestrator._accounts:
            if account.get('provider_type') == AMAZON_WEB_SERVICES:
                self.assertEqual(account.get('authentication'),
                                 self.aws_provider_resource_name)
                self.assertEqual(account.get('billing_source'),
                                 self.aws_test_billing_source)
                self.assertEqual(account.get('customer_name'),
                                 self.test_schema)
            elif account.get('provider_type') == OPENSHIFT_CONTAINER_PLATFORM:
                self.assertEqual(account.get('authentication'),
                                 self.ocp_provider_resource_name)
                self.assertEqual(account.get('billing_source'),
                                 self.ocp_test_billing_source)
                self.assertEqual(account.get('customer_name'),
                                 self.test_schema)
            else:
                self.fail('Unexpected provider')

        if len(orchestrator._polling_accounts) != 1:
            self.fail("Unexpected number of listener test accounts")

        for account in orchestrator._polling_accounts:
            if account.get('provider_type') == AMAZON_WEB_SERVICES:
                self.assertEqual(account.get('authentication'),
                                 self.aws_provider_resource_name)
                self.assertEqual(account.get('billing_source'),
                                 self.aws_test_billing_source)
                self.assertEqual(account.get('customer_name'),
                                 self.test_schema)
            else:
                self.fail('Unexpected provider')

    @patch('masu.external.report_downloader.ReportDownloader._set_downloader',
           return_value=FakeDownloader)
    @patch('masu.external.accounts_accessor.AccountsAccessor.get_accounts',
           return_value=[])
    def test_prepare_no_accounts(self, mock_downloader,
                                 mock_accounts_accessor):
        """Test downloading cost usage reports."""
        orchestrator = Orchestrator()
        reports = orchestrator.prepare()

        self.assertIsNone(reports)

    @patch.object(AccountsAccessor, 'get_accounts')
    def test_init_all_accounts(self, mock_accessor):
        """Test initializing orchestrator with forced billing source."""
        mock_accessor.return_value = self.mock_accounts
        orchestrator_all = Orchestrator()
        self.assertEqual(orchestrator_all._accounts, self.mock_accounts)

    @patch.object(AccountsAccessor, 'get_accounts')
    def test_init_with_billing_source(self, mock_accessor):
        """Test initializing orchestrator with forced billing source."""
        mock_accessor.return_value = self.mock_accounts

        fake_source = random.choice(self.mock_accounts)

        individual = Orchestrator(fake_source.get('billing_source'))
        self.assertEqual(len(individual._accounts), 1)
        found_account = individual._accounts[0]
        self.assertEqual(found_account.get('billing_source'),
                         fake_source.get('billing_source'))

    @patch.object(AccountsAccessor, 'get_accounts')
    def test_init_all_accounts_error(self, mock_accessor):
        """Test initializing orchestrator accounts error."""
        mock_accessor.side_effect = AccountsAccessorError(
            'Sample timeout error')
        try:
            Orchestrator()
        except Exception:
            self.fail('unexpected error')

    @patch.object(ExpiredDataRemover, 'remove')
    @patch('masu.processor.orchestrator.remove_expired_data.apply_async',
           return_value=True)
    def test_remove_expired_report_data(self, mock_task, mock_remover):
        """Test removing expired report data."""
        expected_results = [{
            'account_payer_id':
            '999999999',
            'billing_period_start':
            '2018-06-24 15:47:33.052509'
        }]
        mock_remover.return_value = expected_results

        expected = 'INFO:masu.processor.orchestrator:Expired data removal queued - customer: acct10001, Task ID: {}'
        # unset disabling all logging below CRITICAL from masu/__init__.py
        logging.disable(logging.NOTSET)
        with self.assertLogs('masu.processor.orchestrator',
                             level='INFO') as logger:
            orchestrator = Orchestrator()
            results = orchestrator.remove_expired_report_data()
            self.assertTrue(results)
            self.assertEqual(len(results), 2)
            async_id = results.pop().get('async_id')
            self.assertIn(expected.format(async_id), logger.output)

    @patch.object(AccountsAccessor, 'get_accounts')
    @patch.object(ExpiredDataRemover, 'remove')
    @patch('masu.processor.orchestrator.remove_expired_data.apply_async',
           return_value=True)
    def test_remove_expired_report_data_no_accounts(self, mock_task,
                                                    mock_remover,
                                                    mock_accessor):
        """Test removing expired report data with no accounts."""
        expected_results = [{
            'account_payer_id':
            '999999999',
            'billing_period_start':
            '2018-06-24 15:47:33.052509'
        }]
        mock_remover.return_value = expected_results
        mock_accessor.return_value = []

        orchestrator = Orchestrator()
        results = orchestrator.remove_expired_report_data()

        self.assertEqual(results, [])

    @patch('masu.processor.orchestrator.AccountLabel', spec=True)
    @patch('masu.processor.orchestrator.ProviderStatus', spec=True)
    @patch('masu.processor.orchestrator.get_report_files.apply_async',
           return_value=True)
    def test_prepare_w_status_valid(self, mock_task, mock_accessor,
                                    mock_labeler):
        """Test that Orchestrator.prepare() works when status is valid."""
        mock_labeler().get_label_details.return_value = (True, True)

        mock_accessor().is_valid.return_value = True
        mock_accessor().is_backing_off.return_value = False

        orchestrator = Orchestrator()
        orchestrator.prepare()
        mock_task.assert_called()

    @patch('masu.processor.orchestrator.ProviderStatus', spec=True)
    @patch('masu.processor.orchestrator.get_report_files.apply_async',
           return_value=True)
    def test_prepare_w_status_invalid(self, mock_task, mock_accessor):
        """Test that Orchestrator.prepare() is skipped when status is invalid."""
        mock_accessor.is_valid.return_value = False
        mock_accessor.is_backing_off.return_value = False

        orchestrator = Orchestrator()
        orchestrator.prepare()
        mock_task.assert_not_called()

    @patch('masu.processor.orchestrator.ProviderStatus', spec=True)
    @patch('masu.processor.orchestrator.get_report_files.apply_async',
           return_value=True)
    def test_prepare_w_status_backoff(self, mock_task, mock_accessor):
        """Test that Orchestrator.prepare() is skipped when backing off."""
        mock_accessor.is_valid.return_value = False
        mock_accessor.is_backing_off.return_value = True

        orchestrator = Orchestrator()
        orchestrator.prepare()
        mock_task.assert_not_called()
Exemple #29
0
    def generate(n):
        fake = faker.Faker()

        def random_pr():
            created_at = fake.date_time_between(start_date="-3y",
                                                end_date="-6M",
                                                tzinfo=timezone.utc)
            first_commit = fake.date_time_between(start_date="-3y1M",
                                                  end_date=created_at,
                                                  tzinfo=timezone.utc)
            last_commit_before_first_review = fake.date_time_between(
                start_date=created_at,
                end_date=created_at + timedelta(days=30),
                tzinfo=timezone.utc)
            first_comment_on_first_review = fake.date_time_between(
                start_date=last_commit_before_first_review,
                end_date=timedelta(days=2),
                tzinfo=timezone.utc)
            first_review_request = fake.date_time_between(
                start_date=last_commit_before_first_review,
                end_date=first_comment_on_first_review,
                tzinfo=timezone.utc)
            first_passed_checks = fake.date_time_between(
                start_date=created_at,
                end_date=first_review_request,
                tzinfo=timezone.utc)
            approved_at = fake.date_time_between(
                start_date=first_comment_on_first_review + timedelta(days=1),
                end_date=first_comment_on_first_review + timedelta(days=30),
                tzinfo=timezone.utc)
            last_commit = fake.date_time_between(
                start_date=first_comment_on_first_review + timedelta(days=1),
                end_date=approved_at,
                tzinfo=timezone.utc)
            last_passed_checks = fake.date_time_between(last_commit,
                                                        last_commit +
                                                        timedelta(days=1),
                                                        tzinfo=timezone.utc)
            merged_at = fake.date_time_between(approved_at,
                                               approved_at + timedelta(days=2),
                                               tzinfo=timezone.utc)
            closed_at = merged_at
            last_review = fake.date_time_between(approved_at,
                                                 closed_at,
                                                 tzinfo=timezone.utc)
            released_at = fake.date_time_between(merged_at,
                                                 merged_at +
                                                 timedelta(days=30),
                                                 tzinfo=timezone.utc)
            return PullRequestTimes(
                created=Fallback(created_at, None),
                first_commit=Fallback(first_commit, created_at),
                last_commit_before_first_review=Fallback(
                    last_commit_before_first_review, None),
                last_commit=Fallback(last_commit, None),
                merged=Fallback(merged_at, None),
                first_comment_on_first_review=Fallback(
                    first_comment_on_first_review, None),
                first_review_request=Fallback(first_review_request, None),
                last_review=Fallback(last_review, None),
                approved=Fallback(approved_at, None),
                first_passed_checks=Fallback(first_passed_checks, None),
                last_passed_checks=Fallback(last_passed_checks, None),
                finalized=Fallback(
                    min(max(approved_at, last_passed_checks, last_commit),
                        closed_at), None),
                released=Fallback(released_at, None),
                closed=Fallback(closed_at, None),
            )

        return [random_pr() for _ in range(n)]
Exemple #30
0
TO = ["*****@*****.**"]

CS = ["*****@*****.**", "*****@*****.**"]

BASE_PATH = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))

CASE_PATH = os.path.join(BASE_PATH, "cases")

REPORT_PATH = os.path.join(BASE_PATH, "report")

LOG_PATH = os.path.join(BASE_PATH, "logs", "atp.log")
LOG_LEVEL = 'info'
log = Mylog(LOG_PATH, LOG_LEVEL).get_logger()

# 用于参数化
f = faker.Faker(locale="zh-CN")

PARAMS_MAP = {
    "<card>": f.ssn,
    "<phone>": f.phone_number,
    "<email>": f.email,
    "<name>": f.name,
    "<password>": f.password,
    "<bankcard>": f.credit_card_number,
    "<money>": f.random_int,
    "<address>": f.address
}

HOSTS = {
    "test": "http://localhost:8080",
    "dev": "http://localhost:8080",