def test_deduplicating_report(self):
        """Test that archiving creates archive rep, deletes report, and resets the processor."""
        self.report_record.report_platform_id = self.uuid
        self.report_record.save()
        report_to_dedup = Report(upload_srv_kafka_msg=json.dumps(self.msg),
                                 account='4321',
                                 report_platform_id=self.uuid,
                                 state=Report.NEW,
                                 upload_ack_status='success',
                                 state_info=json.dumps([Report.NEW]),
                                 last_update_time=datetime.now(pytz.utc),
                                 retry_count=0,
                                 ready_to_archive=True,
                                 arrival_time=datetime.now(pytz.utc),
                                 processing_start_time=datetime.now(pytz.utc))
        report_to_dedup.save()
        self.processor.report_or_slice = report_to_dedup
        self.processor.account_number = '4321'
        self.processor.upload_message = self.msg
        self.processor.state = report_to_dedup.state
        self.processor.report_platform_id = self.uuid
        self.processor.status = report_processor.SUCCESS_CONFIRM_STATUS

        self.processor.deduplicate_reports()
        # assert the report doesn't exist
        with self.assertRaises(Report.DoesNotExist):
            Report.objects.get(id=report_to_dedup.id)
        # assert the report archive does exist
        archived = ReportArchive.objects.get(account='4321')
        self.assertEqual(json.loads(archived.state_info), [Report.NEW])
        # assert the processor was reset
        self.check_variables_are_reset()
    def test_archiving_report_not_ready(self):
        """Test that archiving fails if report not ready to archive."""
        report_to_archive = Report(upload_srv_kafka_msg=json.dumps(self.msg),
                                   account='4321',
                                   report_platform_id=self.uuid2,
                                   state=Report.NEW,
                                   state_info=json.dumps([Report.NEW]),
                                   last_update_time=datetime.now(pytz.utc),
                                   retry_count=0,
                                   ready_to_archive=False)
        report_to_archive.save()
        self.processor.report_or_slice = report_to_archive
        self.processor.account_number = '4321'
        self.processor.upload_message = self.msg
        self.processor.state = report_to_archive.state
        self.processor.report_platform_id = self.uuid
        self.processor.status = report_processor.SUCCESS_CONFIRM_STATUS

        self.processor.archive_report_and_slices()
        # assert the report still exist
        existing_report = Report.objects.get(id=report_to_archive.id)
        self.assertEqual(existing_report, report_to_archive)
        # assert the report archive does not exist
        with self.assertRaises(ReportArchive.DoesNotExist):
            ReportArchive.objects.get(account='4321')
        # assert the processor was reset
        self.check_variables_are_reset()
 def test_assign_report_oldest_commit(self):
     """Test the assign report function with retry type as commit."""
     current_time = datetime.now(pytz.utc)
     twentyminold_time = current_time - timedelta(minutes=20)
     older_report = Report(upload_srv_kafka_msg=json.dumps(self.msg),
                           account='4321',
                           report_platform_id=self.uuid2,
                           state=Report.DOWNLOADED,
                           state_info=json.dumps(
                               [Report.NEW, Report.DOWNLOADED]),
                           last_update_time=twentyminold_time,
                           retry_count=1,
                           retry_type=Report.GIT_COMMIT,
                           git_commit='1234')
     older_report.save()
     self.report_record.state = Report.DOWNLOADED
     self.report_record.save()
     self.processor.report_or_slice = None
     # the commit should always be different from 1234
     self.processor.assign_object()
     self.assertEqual(self.processor.report_or_slice, older_report)
     self.assertEqual(self.processor.report_or_slice.state,
                      Report.DOWNLOADED)
     # delete the older report object
     Report.objects.get(id=older_report.id).delete()
예제 #4
0
def generateSalesReport(userid, fromDate, toDate):
    periodOfSales = Sale.objects.filter(datetime__range=[fromDate, toDate])
    headers = ['ID', 'Date', 'Customer Name', 'Quantity', 'Price']
    wb = Workbook()
    ws1 = wb.active
    # Sheet 1 sales report overview
    ws1.title = "Sales Report Overview"
    ws1.append(headers)
    for sale in periodOfSales:
        saleItems = SaleItem.objects.filter(sale_id=sale.id)
        currentPrice = 0
        currentQuantity = 0
        for items in saleItems:
            currentPrice = currentPrice + (items.sale_price * items.quantity)
            currentQuantity = currentQuantity + items.quantity
        customer = Customer.objects.get(user_id=sale.customer_id)
        customerName = customer.full_name
        currentRow = [
            sale.id, sale.datetime, customerName, currentQuantity,
            convertToPoundsStr(currentPrice)
        ]
        ws1.append(currentRow)
    # Sheet 2 sales report details
    headers = [
        'Sale ID', 'Item Code', 'Item Name', 'Quantity', 'Price', 'Returned',
        'Date', 'Department', 'Customer Name'
    ]
    ws2 = wb.create_sheet(title="Weekly Sales Item Report")
    ws2.title = "Sales Report Details"
    ws2.append(headers)
    for sale in periodOfSales:
        saleItems = SaleItem.objects.filter(sale_id=sale.id)
        currentPrice = 0
        currentQuantity = 0
        customer = Customer.objects.get(user_id=sale.customer_id)
        for items in saleItems:
            itemDetails = Item.objects.get(id=items.item_id)
            currentRow = [
                sale.id, itemDetails.code, itemDetails.name, items.quantity,
                convertToPoundsStr(item.items.sale_price),
                items.returned_quantity, sale.datetime, customer.dept_id,
                customer.full_name
            ]
            ws2.append(currentRow)
    filename = "Sales_Report" + str(
        timezone.now().strftime("%Y%m%d-%H%M%S")) + ".xlsx"
    wb.save(REPORT_DIR + filename)

    reportMessage = "Sales Report Ready"
    report = Report(user_id=userid,
                    filename=filename,
                    created_date=timezone.now(),
                    report_type="SA")
    report.save()
    notify = Notification(user_id=userid,
                          text=reportMessage,
                          notification_type="RE",
                          link="/staff/reports/?id=" + str(report.id),
                          seen=False)
    notify.save()
 async def async_transition_to_validation_reported_failure_status(self):
     """Set up the test for transitioning to validation reported failure status."""
     report_to_archive = Report(upload_srv_kafka_msg=json.dumps(self.msg),
                                account='43214',
                                report_platform_id=self.uuid2,
                                state=Report.VALIDATED,
                                state_info=json.dumps([Report.NEW]),
                                last_update_time=datetime.now(pytz.utc),
                                retry_count=0,
                                retry_type=Report.TIME,
                                ready_to_archive=True,
                                arrival_time=datetime.now(pytz.utc),
                                processing_start_time=datetime.now(
                                    pytz.utc))
     report_to_archive.upload_ack_status = report_processor.FAILURE_CONFIRM_STATUS
     report_to_archive.save()
     self.processor.report_or_slice = report_to_archive
     self.processor.report_platform_id = self.uuid2
     self.processor.account_number = '43214'
     self.processor.state = Report.VALIDATED
     self.processor.status = report_processor.FAILURE_CONFIRM_STATUS
     self.processor.upload_message = {'request_id': self.uuid}
     self.processor._send_confirmation = CoroutineMock()
     await self.processor.transition_to_validation_reported()
     with self.assertRaises(Report.DoesNotExist):
         Report.objects.get(id=report_to_archive.id)
     archived = ReportArchive.objects.get(account='43214')
     self.assertEqual(archived.state, Report.VALIDATION_REPORTED)
     self.assertEqual(archived.upload_ack_status,
                      report_processor.FAILURE_CONFIRM_STATUS)
     # assert the processor was reset
     self.check_variables_are_reset()
예제 #6
0
def new_report(kw):
    start_time = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
    _report = Report(name=kw["name"],
                     start_time=start_time,
                     report_path=kw["uid"],
                     log=kw["uid"])
    _report.save()
    return _report
예제 #7
0
    def setUp(self):
        """Create test setup."""
        self.payload_url = 'http://insights-upload.com/q/file_to_validate'
        self.uuid = uuid.uuid4()
        self.uuid2 = uuid.uuid4()
        self.uuid3 = uuid.uuid4()
        self.uuid4 = uuid.uuid4()
        self.uuid5 = uuid.uuid4()
        self.uuid6 = uuid.uuid4()
        self.uuid7 = uuid.uuid4()
        self.uuid8 = uuid.uuid4()
        self.fake_record = test_handler.KafkaMsg(msg_handler.QPC_TOPIC, 'http://internet.com')
        self.report_consumer = msg_handler.ReportConsumer()
        self.msg = self.report_consumer.unpack_consumer_record(self.fake_record)
        self.report_json = {
            'request_id': '234332',
            'report_id': 1,
            'report_type': 'insights',
            'report_version': '1.0.0.1b025b8',
            'status': 'completed',
            'report_platform_id': '5f2cc1fd-ec66-4c67-be1b-171a595ce319',
            'hosts': [{'bios_uuid': 'value'},
                      {'invalid': 'value'}]}
        self.report_record = Report(
            request_id='234332',
            upload_srv_kafka_msg=json.dumps(self.msg),
            account='1234',
            state=Report.NEW,
            state_info=json.dumps([Report.NEW]),
            last_update_time=datetime.now(pytz.utc),
            retry_count=0,
            ready_to_archive=False,
            source='satellite',
            arrival_time=datetime.now(pytz.utc),
            processing_start_time=datetime.now(pytz.utc))
        self.report_record.save()

        self.report_slice = ReportSlice(
            report_platform_id=self.uuid,
            report_slice_id=self.uuid2,
            account='13423',
            report_json=json.dumps(self.report_json),
            state=ReportSlice.NEW,
            state_info=json.dumps([ReportSlice.NEW]),
            retry_count=0,
            last_update_time=datetime.now(pytz.utc),
            failed_hosts=[],
            candidate_hosts=[],
            report=self.report_record,
            ready_to_archive=True,
            hosts_count=2,
            source='satellite',
            creation_time=datetime.now(pytz.utc),
            processing_start_time=datetime.now(pytz.utc))
        self.report_slice.save()
        self.report_record.save()
        self.processor = report_slice_processor.ReportSliceProcessor()
        self.processor.report = self.report_slice
예제 #8
0
def generateStockReport(userid, includeChecks, fromDate="", toDate=""):
    items = Item.objects.all()
    headers = [
        'Item ID', 'Item Code', 'Item Name', 'Item Price', 'Quantity',
        'Warning Quantity', 'Is Chemical', 'Pack Size', 'For Sale'
    ]
    wb = Workbook()
    ws1 = wb.active
    ws1.title = "Stock Report"
    ws1.append(headers)
    for item in items:
        currentRow = [
            item.id, item.code, item.name,
            convertToPoundsStr(item.price), item.quantity,
            item.warning_quantity, item.is_chemical, item.pack_size,
            item.for_sale
        ]
        ws1.append(currentRow)
    # Optional Stock Checks
    if includeChecks:
        # Sheet 2 stocks checks
        headers = [
            'Date', 'Staff Member', 'Item Code', 'Item Name',
            'Observed Quantity', 'Expected Quantity', 'Warning Quantity'
        ]
        ws2 = wb.create_sheet(title="Stock Check Report")
        ws2.title = "Stock Check Report"
        ws2.append(headers)
        for check in StockCheck.objects.filter(
                datetime__range=[fromDate, toDate]):
            checkStaff = Staff.objects.get(user_id=check.staff_id)
            checkedItem = StockCheckItem.objects.get(id=check.id)
            itemDetails = Item.objects.get(id=checkedItem.item_id)
            currentRow = [
                check.datetime, checkStaff.full_name, itemDetails.code,
                itemDetails.name, checkedItem.observed_quantity,
                checkedItem.expected_quantity, itemDetails.warning_quantity
            ]
            ws2.append(currentRow)
    filename = "Stock_Report" + str(
        timezone.now().strftime("%Y%m%d-%H%M%S")) + ".xlsx"
    wb.save(REPORT_DIR + filename)
    reportMessage = "Stock Report Ready"

    report = Report(user_id=userid,
                    filename=filename,
                    created_date=timezone.now(),
                    report_type="ST")
    report.save()
    notify = Notification(user_id=userid,
                          text=reportMessage,
                          notification_type="RE",
                          link="/staff/reports/?id=" + str(report.id),
                          seen=False)
    notify.save()
예제 #9
0
 def save_performance_report(self, dsal_filename: str, report_name: str, report: dict):
     """
         Save perfromance report into database
     """
     dsal = Dsal.objects.get(dsal_filename=dsal_filename)
     report_instance = Report(
         dsal=dsal,
         report_name=report_name,
         report_content=report,
         report_created=timezone.now()
     )
     report_instance.save()
     return report_instance
    def setUp(self):
        """Create test setup."""
        self.payload_url = 'http://insights-upload.com/q/file_to_validate'
        self.uuid = uuid.uuid4()
        self.uuid2 = uuid.uuid4()
        self.uuid3 = uuid.uuid4()
        self.fake_record = test_handler.KafkaMsg(msg_handler.MKT_TOPIC,
                                                 'http://internet.com')
        self.report_consumer = msg_handler.ReportConsumer()
        self.msg = self.report_consumer.unpack_consumer_record(
            self.fake_record)
        self.report_json = {
            'report_id': 1,
            'report_slice_id': str(self.uuid2),
            'report_type': 'insights',
            'status': 'completed',
            'report_platform_id': '5f2cc1fd-ec66-4c67-be1b-171a595ce319'
        }
        self.report_record = Report(upload_srv_kafka_msg=json.dumps(self.msg),
                                    account='1234',
                                    state=Report.NEW,
                                    state_info=json.dumps([Report.NEW]),
                                    last_update_time=datetime.now(pytz.utc),
                                    retry_count=0,
                                    ready_to_archive=False,
                                    source=uuid.uuid4(),
                                    arrival_time=datetime.now(pytz.utc),
                                    processing_start_time=datetime.now(
                                        pytz.utc))
        self.report_record.save()

        self.report_slice = ReportSlice(
            report_platform_id=self.uuid,
            report_slice_id=self.uuid2,
            account='13423',
            report_json=json.dumps(self.report_json),
            state=ReportSlice.NEW,
            state_info=json.dumps([ReportSlice.NEW]),
            retry_count=0,
            last_update_time=datetime.now(pytz.utc),
            report=self.report_record,
            ready_to_archive=True,
            creation_time=datetime.now(pytz.utc),
            processing_start_time=datetime.now(pytz.utc))
        self.report_slice.save()

        self.processor = report_processor.ReportProcessor()
        self.processor.report = self.report_record
 def test_assign_report_oldest_time(self):
     """Test the assign report function with older report."""
     current_time = datetime.now(pytz.utc)
     hours_old_time = current_time - timedelta(hours=9)
     older_report = Report(upload_srv_kafka_msg=json.dumps(self.msg),
                           account='4321',
                           report_platform_id=self.uuid2,
                           state=Report.NEW,
                           state_info=json.dumps([Report.NEW]),
                           last_update_time=hours_old_time,
                           retry_count=1)
     older_report.save()
     self.report_record.state = Report.NEW
     self.report_record.save()
     self.processor.report_or_slice = None
     self.processor.assign_object()
     self.assertEqual(self.processor.report_or_slice, older_report)
     # delete the older report object
     Report.objects.get(id=older_report.id).delete()
 def test_assign_report_not_old_enough(self):
     """Test the assign report function with young report."""
     # delete the report record
     Report.objects.get(id=self.report_record.id).delete()
     self.processor.report_or_slice = None
     current_time = datetime.now(pytz.utc)
     min_old_time = current_time - timedelta(minutes=1)
     older_report = Report(upload_srv_kafka_msg=json.dumps(self.msg),
                           account='4321',
                           report_platform_id=self.uuid2,
                           state=Report.STARTED,
                           state_info=json.dumps([Report.NEW]),
                           last_update_time=min_old_time,
                           retry_count=1)
     older_report.save()
     self.processor.assign_object()
     self.assertEqual(self.processor.report_or_slice, None)
     # delete the older report object
     Report.objects.get(id=older_report.id).delete()
예제 #13
0
def generateReturnsReport(userid, fromDate, toDate):
    periodOfReturns = Return.objects.filter(datetime__range=[fromDate, toDate])
    headers = [
        'Return ID', 'Sale ID', 'Item ID', 'Item Name', 'Customer Name',
        'Staff Name', 'Quantity', 'Reason', 'Date'
    ]
    wb = Workbook()
    ws1 = wb.active
    ws1.title = "Return Report"
    ws1.append(headers)
    for ret in periodOfReturns:
        saleItem = SaleItem.objects.get(id=ret.sale_item_id)
        item = Item.objects.get(id=saleItem.item_id)
        sale = Sale.objects.get(id=saleItem.sale_id)
        staff = Staff.objects.get(user_id=ret.staff_id)
        customer = Customer.objects.get(user_id=sale.customer_id)
        currentRow = [
            ret.id, sale.id, item.id, item.name, customer.full_name,
            staff.full_name, ret.quantity, ret.reason, ret.datetime
        ]
        ws1.append(currentRow)
    filename = "Return_Report" + str(
        timezone.now().strftime("%Y%m%d-%H%M%S")) + ".xlsx"
    wb.save(REPORT_DIR + filename)
    reportMessage = "Return Report Ready"

    report = Report(user_id=userid,
                    filename=filename,
                    created_date=timezone.now(),
                    report_type="RE")
    report.save()
    notify = Notification(user_id=userid,
                          text=reportMessage,
                          notification_type="RE",
                          link="/staff/reports/?id=" + str(report.id),
                          seen=False)
    notify.save()
예제 #14
0
    def post(self):
        try:
            request_data = request.get_json()
            content = request_data.get('content')
            case_id = request_data.get('case_id', None)
            log_id = request_data.get('log_id', None)

            report = Report(content=content, case_id=case_id, log_id=log_id)
            db.session.add(report)
            db.session.commit()

            return "Report created", 200
        except Exception as e:
            print(e)
            return "Internal Server Error", 500
예제 #15
0
def report(report_id):
    if request.method == 'GET':
        if report_id is None:
            reports = Report.query.all()
            reports = [r.to_dict() for r in reports]

            return jsonify({
                'data': reports
            })
        else:
            reports = Report.query.filter_by(id=report_id).first()

            return jsonify({
                'data': reports.to_dict()
            })

    elif request.method == 'POST':
        data = request.json
        print(data)
        valid, missing = Report.validate_json(data)

        if not valid:
            return jsonify({
                'message': '{} not given in request.'.format(', '.join(missing))
            }), 422

        report = Report(data['user_id'],
                        data['report_type'],
                        data['timestamp'],
                        data['location']['longitude'],
                        data['location']['latitude'])
        db.session.add(report)
        db.session.commit()

        return jsonify(report.id)

    elif request.method == 'PUT':
        print(request.headers)

        if 'video' not in request.files:
            return jsonify({'message': 'Request does not have a file'}), 422
        
        file = request.files['video']
        
        if file.filename == '':
            return jsonify({'message': 'Request does not have a file'}), 422
        
        if file and utils.allowed_file(file.filename):
            ext = utils.get_ext(file.filename)
            file_id = uuid.uuid4().int
            filename = str(file_id) + '.' + ext
            abs_fpath = os.path.join(app.config['UPLOAD_FOLDER'], filename)
            file.save(abs_fpath)

            rel_path = os.path.join(os.path.basename(app.config['UPLOAD_FOLDER']), filename)

            video = Video(path=rel_path, ext=ext)
            db.session.add(video)
            db.session.commit()

            url = os.path.join('video', str(video.id))
            video.url = url
            db.session.commit()

            report = Report.query.filter_by(id=report_id).first()
            report.video_id = video.id
            db.session.commit()

            if report.report_type == 'hit':
                pipelines.hit(report, abs_fpath)
            elif report.report_type == 'witness':
                pipelines.witness(report, abs_fpath)

            return jsonify({'message': 'Successfully uploaded video.'})

    elif request.method == 'DELETE':
        report = Report.query.filter_by(id=report_id).first()
        db.session.delete(report)
        db.session.commit()
        return jsonify({
            'message': 'Successfully deleted.'
        })
예제 #16
0
class ReportSliceProcessorTests(TestCase):
    """Test Cases for the Message processor."""

    def setUp(self):
        """Create test setup."""
        self.payload_url = 'http://insights-upload.com/q/file_to_validate'
        self.uuid = uuid.uuid4()
        self.uuid2 = uuid.uuid4()
        self.uuid3 = uuid.uuid4()
        self.uuid4 = uuid.uuid4()
        self.uuid5 = uuid.uuid4()
        self.uuid6 = uuid.uuid4()
        self.uuid7 = uuid.uuid4()
        self.uuid8 = uuid.uuid4()
        self.fake_record = test_handler.KafkaMsg(msg_handler.QPC_TOPIC, 'http://internet.com')
        self.report_consumer = msg_handler.ReportConsumer()
        self.msg = self.report_consumer.unpack_consumer_record(self.fake_record)
        self.report_json = {
            'request_id': '234332',
            'report_id': 1,
            'report_type': 'insights',
            'report_version': '1.0.0.1b025b8',
            'status': 'completed',
            'report_platform_id': '5f2cc1fd-ec66-4c67-be1b-171a595ce319',
            'hosts': [{'bios_uuid': 'value'},
                      {'invalid': 'value'}]}
        self.report_record = Report(
            request_id='234332',
            upload_srv_kafka_msg=json.dumps(self.msg),
            account='1234',
            state=Report.NEW,
            state_info=json.dumps([Report.NEW]),
            last_update_time=datetime.now(pytz.utc),
            retry_count=0,
            ready_to_archive=False,
            source='satellite',
            arrival_time=datetime.now(pytz.utc),
            processing_start_time=datetime.now(pytz.utc))
        self.report_record.save()

        self.report_slice = ReportSlice(
            report_platform_id=self.uuid,
            report_slice_id=self.uuid2,
            account='13423',
            report_json=json.dumps(self.report_json),
            state=ReportSlice.NEW,
            state_info=json.dumps([ReportSlice.NEW]),
            retry_count=0,
            last_update_time=datetime.now(pytz.utc),
            failed_hosts=[],
            candidate_hosts=[],
            report=self.report_record,
            ready_to_archive=True,
            hosts_count=2,
            source='satellite',
            creation_time=datetime.now(pytz.utc),
            processing_start_time=datetime.now(pytz.utc))
        self.report_slice.save()
        self.report_record.save()
        self.processor = report_slice_processor.ReportSliceProcessor()
        self.processor.report = self.report_slice

    def check_variables_are_reset(self):
        """Check that report processor members have been cleared."""
        processor_attributes = [self.processor.report_platform_id,
                                self.processor.report,
                                self.processor.state,
                                self.processor.account_number,
                                self.processor.upload_message,
                                self.processor.status,
                                self.processor.report_json,
                                self.processor.candidate_hosts,
                                self.processor.failed_hosts]
        for attribute in processor_attributes:
            self.assertEqual(attribute, None)

    def test_assign_report_slice_new(self):
        """Test the assign report slice function with only a new report slice."""
        self.report_slice.state = ReportSlice.NEW
        self.report_slice.save()
        self.processor.report_or_slice = None
        self.processor.assign_object()
        self.assertEqual(self.processor.report_or_slice, self.report_slice)
        queued_slices = REGISTRY.get_sample_value('queued_report_slices')
        self.assertEqual(queued_slices, 1)

    async def async_test_delegate_state(self):
        """Set up the test for delegate state."""
        self.report_slice.state = ReportSlice.VALIDATED
        self.report_slice.report_platform_id = self.uuid
        self.report_slice.candidate_hosts = json.dumps([
            {str(self.uuid3): {'ip_addresses': 'value', 'name': 'value'},
             'cause': report_slice_processor.FAILED_UPLOAD}])
        self.report_slice.failed_hosts = json.dumps(
            [{str(self.uuid2): {'ip_addresses': 'value', 'name': 'value'},
              'cause': abstract_processor.FAILED_VALIDATION}])
        self.report_slice.save()
        self.processor.report_or_slice = self.report_slice

        def upload_side_effect():
            """Transition the state to uploaded."""
            self.processor.state = ReportSlice.HOSTS_UPLOADED
            self.report_slice.state = ReportSlice.HOSTS_UPLOADED
            self.report_slice.save()

        with patch(
                'processor.report_slice_processor.'
                'ReportSliceProcessor.transition_to_hosts_uploaded',
                side_effect=upload_side_effect):
            await self.processor.delegate_state()
            self.check_variables_are_reset()

        # test pending state for delegate
        self.report_slice.state = ReportSlice.PENDING
        self.processor.report_or_slice = self.report_slice
        await self.processor.delegate_state()
        self.check_variables_are_reset()

    def test_run_delegate(self):
        """Test the async function delegate state."""
        event_loop = asyncio.new_event_loop()
        asyncio.set_event_loop(event_loop)
        coro = asyncio.coroutine(self.async_test_delegate_state)
        event_loop.run_until_complete(coro())
        event_loop.close()

    def test_update_slice_state(self):
        """Test updating the slice state."""
        self.report_slice.failed_hosts = json.dumps([])
        self.report_slice.save()
        report_json = {
            'request_id': '234332',
            'report_id': 1,
            'report_type': 'deployments',
            'report_version': '1.0.0.1b025b8',
            'status': 'completed',
            'report_platform_id': '5f2cc1fd-ec66-4c67-be1b-171a595ce319',
            'hosts': {str(self.uuid): {'key': 'value'}}}
        failed_hosts = [{str(self.uuid6): {'etc_machine_id': 'value'}},
                        {str(self.uuid7): {'subscription_manager_id': 'value'}}]
        self.processor.report_or_slice = self.report_slice
        self.processor.next_state = ReportSlice.VALIDATED
        options = {'report_json': report_json,
                   'failed_hosts': failed_hosts}
        self.processor.update_object_state(options=options)
        self.assertEqual(json.loads(self.report_slice.report_json), report_json)
        self.assertEqual(json.loads(self.report_slice.failed_hosts), failed_hosts)

    def test_transition_to_validated_general_exception(self):
        """Test that when a general exception is raised, we don't pass validation."""
        self.report_slice.state = ReportSlice.RETRY_VALIDATION
        self.report_slice.save()
        self.processor.report_or_slice = self.report_slice

        def validate_side_effect():
            """Transition the state to downloaded."""
            raise Exception('Test')

        with patch('processor.report_slice_processor.'
                   'ReportSliceProcessor._validate_report_details',
                   side_effect=validate_side_effect):
            self.processor.transition_to_validated()
            self.assertEqual(self.report_slice.state, ReportSlice.RETRY_VALIDATION)
            self.assertEqual(self.report_slice.retry_count, 1)

    def test_transition_to_validated(self):
        """Test that when a general exception is raised, we don't pass validation."""
        self.report_slice.state = ReportSlice.RETRY_VALIDATION
        report_json = {
            'request_id': '234332',
            'report_slice_id': '384794738',
            'hosts': [{'ip_addresses': 'value'}]}
        self.report_slice.report_json = json.dumps(report_json)
        self.report_slice.save()
        self.processor.report_or_slice = self.report_slice
        self.processor.transition_to_validated()
        self.assertEqual(self.report_slice.state, ReportSlice.VALIDATED)
        self.assertEqual(self.report_slice.retry_count, 0)

    def test_transition_to_validated_failed(self):
        """Test report missing slice id."""
        self.report_slice.state = ReportSlice.RETRY_VALIDATION
        report_json = {
            'request_id': '234332',
            'report_id': 1,
            'report_type': 'insights',
            'report_version': '1.0.0.1b025b8',
            'status': 'completed',
            'report_platform_id': '5f2cc1fd-ec66-4c67-be1b-171a595ce319',
            'hosts': {str(self.uuid): {'ip_addresses': 'value'}}}
        self.report_slice.report_json = json.dumps(report_json)
        self.report_slice.save()
        self.processor.report_or_slice = self.report_slice
        self.processor.transition_to_validated()
        self.assertEqual(self.report_slice.state, ReportSlice.FAILED_VALIDATION)
        self.assertEqual(self.report_slice.retry_count, 0)
        self.assertEqual(self.report_slice.ready_to_archive, True)

    def test_moved_candidates_to_failed(self):
        """Test that we reset candidates after moving them to failed."""
        candidates = [{self.uuid: {'bios_uuid': 'value', 'name': 'value'}}]
        self.processor.candidate_hosts = candidates
        self.processor.failed_hosts = [
            {self.uuid2: {'bios_uuid': 'value', 'name': 'value'},
             'cause': abstract_processor.FAILED_VALIDATION}]
        self.processor.move_candidates_to_failed()
        self.assertEqual(self.processor.candidate_hosts, [])
        for host in candidates:
            self.assertIn(host, self.processor.failed_hosts)

    def test_determine_retry_limit(self):
        """Test the determine retry method when the retry is at the limit."""
        candidates = [{str(self.uuid3): {'ip_addresses': 'value', 'name': 'value'},
                       'cause': report_slice_processor.FAILED_UPLOAD}]
        self.report_slice.state = ReportSlice.VALIDATED
        self.report_slice.retry_count = 4
        self.report_slice.candidate_hosts = json.dumps(candidates)
        self.report_slice.failed_hosts = json.dumps([])
        self.report_slice.save()
        self.processor.report_or_slice = self.report_slice
        self.processor.candidate_hosts = candidates
        self.processor.failed_hosts = []
        self.processor.determine_retry(ReportSlice.FAILED_HOSTS_UPLOAD,
                                       ReportSlice.VALIDATED)
        self.assertEqual(self.report_slice.state, ReportSlice.FAILED_HOSTS_UPLOAD)
        self.assertEqual(json.loads(self.report_slice.candidate_hosts), [])
        for host in candidates:
            self.assertIn(host, json.loads(self.report_slice.failed_hosts))

    async def async_test_transition_to_hosts_uploaded(self):
        """Test the transition to hosts being uploaded."""
        hosts = [{str(self.uuid): {'bios_uuid': str(self.uuid), 'name': 'value',
                                   'system_platform_id': str(self.uuid)}},
                 {str(self.uuid2): {'insights_client_id': 'value', 'name': 'foo',
                                    'system_platform_id': str(self.uuid2)}},
                 {str(self.uuid3): {'ip_addresses': 'value', 'name': 'foo',
                                    'system_platform_id': str(self.uuid3)}},
                 {str(self.uuid4): {'mac_addresses': 'value', 'name': 'foo',
                                    'system_platform_id': str(self.uuid4)}},
                 {str(self.uuid5): {'vm_uuid': 'value', 'name': 'foo',
                                    'system_platform_id': str(self.uuid5)}},
                 {str(self.uuid6): {'etc_machine_id': 'value',
                                    'system_platform_id': str(self.uuid6)}},
                 {str(self.uuid7): {'subscription_manager_id': 'value',
                                    'system_platform_id': str(self.uuid7)}}]
        self.report_slice.failed_hosts = []
        self.report_slice.candidate_hosts = json.dumps(hosts)
        self.report_slice.save()
        self.processor.report_or_slice = self.report_slice
        self.processor.candidate_hosts = hosts
        self.processor._upload_to_host_inventory_via_kafka = CoroutineMock(
            return_value=[])
        await self.processor.transition_to_hosts_uploaded()
        self.assertEqual(json.loads(self.report_slice.candidate_hosts), [])
        self.assertEqual(self.report_slice.state, ReportSlice.HOSTS_UPLOADED)

    def test_transition_to_hosts_uploaded(self):
        """Test the async hosts uploaded successful."""
        event_loop = asyncio.new_event_loop()
        asyncio.set_event_loop(event_loop)
        coro = asyncio.coroutine(self.async_test_transition_to_hosts_uploaded)
        event_loop.run_until_complete(coro())
        event_loop.close()

    async def async_test_transition_to_hosts_uploaded_kafka_mode(self):
        """Test the transition to hosts being uploaded."""
        hosts = [{str(self.uuid): {'bios_uuid': str(self.uuid), 'name': 'value',
                                   'system_platform_id': str(self.uuid)}},
                 {str(self.uuid2): {'insights_client_id': 'value', 'name': 'foo',
                                    'system_platform_id': str(self.uuid2)}},
                 {str(self.uuid3): {'ip_addresses': 'value', 'name': 'foo',
                                    'system_platform_id': str(self.uuid3)}},
                 {str(self.uuid4): {'mac_addresses': 'value', 'name': 'foo',
                                    'system_platform_id': str(self.uuid4)}},
                 {str(self.uuid5): {'vm_uuid': 'value', 'name': 'foo',
                                    'system_platform_id': str(self.uuid5)}},
                 {str(self.uuid6): {'etc_machine_id': 'value',
                                    'system_platform_id': str(self.uuid6)}},
                 {str(self.uuid7): {'subscription_manager_id': 'value',
                                    'system_platform_id': str(self.uuid7)}}]
        self.report_slice.failed_hosts = []
        self.report_slice.candidate_hosts = json.dumps(hosts)
        self.report_slice.save()
        self.processor.report_or_slice = self.report_slice
        self.processor.candidate_hosts = hosts
        self.processor._upload_to_host_inventory_via_kafka = CoroutineMock(
            return_value=[])
        await self.processor.transition_to_hosts_uploaded()
        self.assertEqual(json.loads(self.report_slice.candidate_hosts), [])
        self.assertEqual(self.report_slice.state, ReportSlice.HOSTS_UPLOADED)

    def test_transition_to_hosts_uploaded_kafka_mode(self):
        """Test the async hosts uploaded successful."""
        event_loop = asyncio.new_event_loop()
        asyncio.set_event_loop(event_loop)
        coro = asyncio.coroutine(self.async_test_transition_to_hosts_uploaded)
        event_loop.run_until_complete(coro())
        event_loop.close()

    async def async_test_transition_to_hosts_uploaded_no_candidates(self):
        """Test the transition to hosts being uploaded."""
        self.report_record.ready_to_archive = True
        self.report_record.save()
        faulty_report = ReportSlice(
            account='987',
            report_platform_id=str(self.uuid2),
            report_slice_id=str(self.uuid),
            state=ReportSlice.NEW,
            report_json=json.dumps(self.report_json),
            state_info=json.dumps([ReportSlice.PENDING, ReportSlice.NEW]),
            last_update_time=datetime.now(pytz.utc),
            candidate_hosts=json.dumps({}),
            failed_hosts=json.dumps([]),
            hosts_count=10,
            retry_count=0)
        faulty_report.save()
        self.processor.report_or_slice = faulty_report
        self.processor.account_number = '987'
        self.processor.state = faulty_report.state
        self.processor.report_platform_id = self.uuid2
        self.processor.report_json = self.report_json
        self.processor.candidate_hosts = {}
        await self.processor.transition_to_hosts_uploaded()
        # assert the processor was reset
        self.check_variables_are_reset()

    def test_test_transition_to_hosts_uploaded_no_candidates(self):
        """Test the async hosts uploaded no candidates."""
        event_loop = asyncio.new_event_loop()
        asyncio.set_event_loop(event_loop)
        coro = asyncio.coroutine(self.async_test_transition_to_hosts_uploaded_no_candidates)
        event_loop.run_until_complete(coro())
        event_loop.close()

    async def async_test_transition_to_hosts_uploaded_exception(self):
        """Test the transition to hosts being uploaded."""
        hosts = {str(self.uuid): {'bios_uuid': str(self.uuid), 'name': 'value'},
                 str(self.uuid2): {'insights_client_id': 'value', 'name': 'foo'},
                 str(self.uuid3): {'ip_addresses': 'value', 'name': 'foo'},
                 str(self.uuid4): {'mac_addresses': 'value', 'name': 'foo'},
                 str(self.uuid5): {'vm_uuid': 'value', 'name': 'foo'},
                 str(self.uuid6): {'etc_machine_id': 'value'},
                 str(self.uuid7): {'subscription_manager_id': 'value'}}
        self.processor.candidate_hosts = hosts
        self.processor.report_or_slice = self.report_slice

        def hosts_upload_side_effect():
            raise Exception('Test')

        with patch(
                'processor.report_slice_processor.'
                'ReportSliceProcessor._upload_to_host_inventory_via_kafka',
                side_effect=hosts_upload_side_effect):
            await self.processor.transition_to_hosts_uploaded()
            self.assertEqual(self.report_slice.state, Report.VALIDATED)
            self.assertEqual(self.report_slice.retry_count, 1)

    def test_test_transition_to_hosts_uploaded_exception(self):
        """Test the async hosts uploaded exception."""
        event_loop = asyncio.new_event_loop()
        asyncio.set_event_loop(event_loop)
        coro = asyncio.coroutine(self.async_test_transition_to_hosts_uploaded_exception)
        event_loop.run_until_complete(coro())
        event_loop.close()

    async def async_test_upload_to_host_inventory_via_kafka(self):
        """Test uploading to inventory via kafka."""
        self.processor.report_or_slice = self.report_slice
        hosts = {
            str(self.uuid): {'bios_uuid': str(self.uuid), 'name': 'value'},
            str(self.uuid2): {'insights_client_id': 'value', 'name': 'foo'},
            str(self.uuid3): {'ip_addresses': 'value', 'name': 'foo'},
            str(self.uuid4): {'mac_addresses': 'value', 'name': 'foo'},
            str(self.uuid5): {'vm_uuid': 'value', 'name': 'foo'},
            str(self.uuid6): {'etc_machine_id': 'value'},
            str(self.uuid7): {'subscription_manager_id': 'value'},
            str(self.uuid8): {'system_profile': {'os_release': '7',
                                                 'os_kernel_version': '2.6.32'}
                              }}
        test_producer = AIOKafkaProducer(
            loop=report_slice_processor.SLICE_PROCESSING_LOOP,
            bootstrap_servers=report_slice_processor.INSIGHTS_KAFKA_ADDRESS
        )
        test_producer.start = CoroutineMock()
        test_producer.send = CoroutineMock()
        test_producer.stop = CoroutineMock()
        with patch('processor.report_slice_processor.AIOKafkaProducer',
                   return_value=test_producer):
            with patch('processor.report_slice_processor.asyncio.wait',
                       side_effect=None):
                # all though we are not asserting any results, the test here is
                # that no error was raised
                await self.processor._upload_to_host_inventory_via_kafka(hosts)

    def test_upload_to_host_inventory_via_kafka(self):
        """Test the async hosts uploaded exception."""
        event_loop = asyncio.new_event_loop()
        asyncio.set_event_loop(event_loop)
        coro = asyncio.coroutine(self.async_test_upload_to_host_inventory_via_kafka)
        event_loop.run_until_complete(coro())
        event_loop.close()

    async def async_test_upload_to_host_inventory_via_kafka_exception(self):
        """Test uploading to inventory via kafka."""
        self.processor.report_or_slice = self.report_slice
        hosts = {str(self.uuid): {'bios_uuid': str(self.uuid), 'name': 'value'},
                 str(self.uuid2): {'insights_client_id': 'value', 'name': 'foo'},
                 str(self.uuid3): {'ip_addresses': 'value', 'name': 'foo'},
                 str(self.uuid4): {'mac_addresses': 'value', 'name': 'foo'},
                 str(self.uuid5): {'vm_uuid': 'value', 'name': 'foo'},
                 str(self.uuid6): {'etc_machine_id': 'value'},
                 str(self.uuid7): {'subscription_manager_id': 'value'}}
        test_producer = AIOKafkaProducer(
            loop=report_slice_processor.SLICE_PROCESSING_LOOP,
            bootstrap_servers=report_slice_processor.INSIGHTS_KAFKA_ADDRESS
        )

        # test KafkaConnectionException
        def raise_kafka_error():
            """Raise a kafka error."""
            raise KafkaConnectionError('Test')

        test_producer.start = CoroutineMock(side_effect=raise_kafka_error)
        test_producer.send = CoroutineMock()
        test_producer.stop = CoroutineMock()
        with self.assertRaises(msg_handler.KafkaMsgHandlerError):
            with patch('processor.report_slice_processor.AIOKafkaProducer',
                       return_value=test_producer):
                await self.processor._upload_to_host_inventory_via_kafka(hosts)

    def test_upload_to_host_inventory_via_kafka_exception(self):
        """Test the async hosts uploaded exception."""
        event_loop = asyncio.new_event_loop()
        asyncio.set_event_loop(event_loop)
        coro = asyncio.coroutine(
            self.async_test_upload_to_host_inventory_via_kafka_exception)
        event_loop.run_until_complete(coro())
        event_loop.close()

    async def async_test_upload_to_host_inventory_via_kafka_send_exception(self):
        """Test uploading to inventory via kafka."""
        self.processor.report_or_slice = self.report_slice
        hosts = {str(self.uuid): {'bios_uuid': str(self.uuid), 'name': 'value'},
                 str(self.uuid2): {'insights_client_id': 'value', 'name': 'foo'},
                 str(self.uuid3): {'ip_addresses': 'value', 'name': 'foo'},
                 str(self.uuid4): {'mac_addresses': 'value', 'name': 'foo'},
                 str(self.uuid5): {'vm_uuid': 'value', 'name': 'foo'},
                 str(self.uuid6): {'etc_machine_id': 'value'},
                 str(self.uuid7): {'subscription_manager_id': 'value'}}
        test_producer = AIOKafkaProducer(
            loop=report_slice_processor.SLICE_PROCESSING_LOOP,
            bootstrap_servers=report_slice_processor.INSIGHTS_KAFKA_ADDRESS
        )

        # test KafkaConnectionException
        def raise_error():
            """Raise a general error."""
            raise Exception('Test')

        test_producer.start = CoroutineMock()
        test_producer.send = CoroutineMock(side_effect=raise_error)
        test_producer.stop = CoroutineMock()
        with self.assertRaises(msg_handler.KafkaMsgHandlerError):
            with patch('processor.report_slice_processor.AIOKafkaProducer',
                       return_value=test_producer):
                await self.processor._upload_to_host_inventory_via_kafka(hosts)

    def test_upload_to_host_inventory_via_kafka_send_exception(self):
        """Test the async hosts uploaded exception."""
        event_loop = asyncio.new_event_loop()
        asyncio.set_event_loop(event_loop)
        coro = asyncio.coroutine(
            self.async_test_upload_to_host_inventory_via_kafka_send_exception)
        event_loop.run_until_complete(coro())
        event_loop.close()

    def test_archive_report_and_slices_in_failed_state(self):
        """Test the archive method in a failed state."""
        self.report_record.ready_to_archive = True
        self.report_record.report_platform_id = str(self.uuid)
        self.report_record.save()
        self.report_slice.ready_to_archive = True
        self.report_slice.report_platform_id = str(self.uuid)
        self.report_slice.report_slice_id = str(self.uuid2)
        self.report_slice.state = ReportSlice.FAILED_HOSTS_UPLOAD
        self.report_slice.save()
        self.processor.report_or_slice = self.report_slice
        self.processor.report_platform_id = str(self.uuid)

        self.processor.archive_report_and_slices()
        # assert the report doesn't exist
        with self.assertRaises(Report.DoesNotExist):
            Report.objects.get(id=self.report_record.id)
        # assert the report archive does exist
        archived = ReportArchive.objects.get(account=self.report_record.account)
        archived_slice = ReportSliceArchive.objects.get(
            report_slice_id=self.report_slice.report_slice_id)
        self.assertEqual(str(archived.report_platform_id), str(self.uuid))
        self.assertEqual(str(archived_slice.report_platform_id), str(self.uuid))
        self.assertIsNotNone(archived_slice.processing_end_time)
        # assert the processor was reset
        self.check_variables_are_reset()

    def test_archive_report_and_slices_in_success_state(self):
        """Test the archive method in a failed state."""
        self.report_record.ready_to_archive = True
        self.report_record.report_platform_id = str(self.uuid)
        self.report_record.save()
        self.report_slice.ready_to_archive = True
        self.report_slice.report_platform_id = str(self.uuid)
        self.report_slice.report_slice_id = str(self.uuid2)
        self.report_slice.state = ReportSlice.HOSTS_UPLOADED
        self.report_slice.save()
        self.processor.report_or_slice = self.report_slice
        self.processor.report_platform_id = str(self.uuid)

        self.processor.archive_report_and_slices()
        # assert the report doesn't exist
        with self.assertRaises(Report.DoesNotExist):
            Report.objects.get(id=self.report_record.id)
        # assert the report archive does exist
        archived = ReportArchive.objects.get(account=self.report_record.account)
        archived_slice = ReportSliceArchive.objects.get(
            report_slice_id=self.report_slice.report_slice_id)
        self.assertEqual(str(archived.report_platform_id), str(self.uuid))
        self.assertEqual(str(archived_slice.report_platform_id), str(self.uuid))
        # assert the processor was reset
        self.check_variables_are_reset()

    def test_archive_report_and_slices_not_ready(self):
        """Test the archive method with slice not ready."""
        self.report_record.ready_to_archive = True
        self.report_record.report_platform_id = str(self.uuid)
        self.report_record.save()
        self.report_slice.ready_to_archive = False
        self.report_slice.report_platform_id = str(self.uuid)
        self.report_slice.report_slice_id = str(self.uuid2)
        self.report_slice.save()
        self.processor.report_or_slice = self.report_slice
        self.processor.report_platform_id = str(self.uuid)

        self.processor.archive_report_and_slices()
        # assert the report doesn't exist
        existing = Report.objects.get(id=self.report_record.id)
        # assert the report archive does exist
        with self.assertRaises(ReportArchive.DoesNotExist):
            ReportArchive.objects.get(account=self.report_record.account)
        with self.assertRaises(ReportSliceArchive.DoesNotExist):
            ReportSliceArchive.objects.get(
                report_slice_id=self.report_slice.report_slice_id)
        self.assertEqual(str(existing.report_platform_id), str(self.uuid))
        # assert the processor was reset
        self.check_variables_are_reset()

    def test_get_stale_time(self):
        """Test the get stale time method."""
        self.processor.report_or_slice = self.report_record
        self.processor.report_or_slice.source = 'satellite'
        self.processor.report_or_slice.save()
        current_time = datetime.utcnow()
        stale_time = current_time + timedelta(hours=int(SATELLITE_HOST_TTL))
        expected = stale_time.strftime('%Y-%m-%dT%H:%M:%S.%f')[:-3] + 'Z'
        actual = self.processor.get_stale_time()
        # the format looks like this: 2019-11-14T19:58:13.037Z
        # by cutting off the last 13 i am comparing 2019-11-14T
        # which is the year/month/day
        self.assertEqual(expected[:-13], actual[:-13])

    def test_transform_os_release(self):
        """Test transform host os_release."""
        host = {'system_profile': {
            'os_release': 'Red Hat Enterprise Linux Server 6.10 (Santiago)'
        }}
        host = self.processor._transform_single_host(
            self.report_record.request_id, '123', host
        )
        self.assertEqual(host, {'system_profile': {'operating_system': {
            'major': '6', 'minor': '10', 'name': 'RHEL'}, 'os_release': '6.10'}})

    def test_do_not_transform_when_only_version(self):
        """Test do not transform os_release when only version."""
        host = {'system_profile': {'os_release': '7'}}
        host = self.processor._transform_single_host(
            self.report_record.request_id, '123', host
        )
        self.assertEqual(host, {'system_profile': {'os_release': '7'}})

    def test_remove_os_release_when_no_version(self):
        """Test remove host os_release."""
        host = {
            'system_profile': {
                'os_release': 'Red Hat Enterprise Linux Server'}}
        host = self.processor._transform_single_host(
            self.report_record.request_id, '123', host
        )
        self.assertEqual(host, {'system_profile': {}})

    def test_remove_os_release_when_no_version_with_parentheses(self):
        """Test remove host os_release when include empty parentheses."""
        host = {'system_profile': {'os_release': '  ()'}}
        host = self.processor._transform_single_host(
            self.report_record.request_id, '123', host
        )
        self.assertEqual(host, {'system_profile': {}})

    def test_remove_os_release_when_only_string_in_parentheses(self):
        """Test remove host os_release when only string in parentheses."""
        host = {'system_profile': {'os_release': '  (Core)'}}
        host = self.processor._transform_single_host(
            self.report_record.request_id, '123', host
        )
        self.assertEqual(host, {'system_profile': {}})

    def test_remove_os_release_when_empty_string(self):
        """Test remove host os_release when empty string."""
        host = {'system_profile': {'os_release': ''}}
        host = self.processor._transform_single_host(
            self.report_record.request_id, '123', host
        )
        self.assertEqual(host, {'system_profile': {}})

    def test_transform_os_release_when_non_rhel_os(self):
        """Test transform host os_release when non rhel."""
        host = {'system_profile': {'os_release': 'openSUSE Leap 15.3'}}
        host = self.processor._transform_single_host(
            self.report_record.request_id, '123', host
        )
        self.assertEqual(host, {'system_profile': {'os_release': '15.3'}})

    def test_transform_os_release_when_centos(self):
        """Test transform host os_release when centos."""
        host = {'system_profile': {'os_release': 'CentOS Linux 7 (Core)'}}
        host = self.processor._transform_single_host(
            self.report_record.request_id, '123', host
        )
        self.assertEqual(host, {'system_profile': {'operating_system': {
            'major': '7', 'minor': '0', 'name': 'CentOS'}, 'os_release': '7'}})

    def test_transform_os_fields(self):
        """Test transform os fields."""
        host = {'system_profile': {
            'os_release': '7', 'os_kernel_version': '3.10.0-1127.el7.x86_64'
        }}
        host = self.processor._transform_single_host(
            self.report_record.request_id, '123', host
        )
        self.assertEqual(
            host,
            {'system_profile': {
                'os_release': '7', 'os_kernel_version': '3.10.0'}})

    def test_do_not_tranform_os_fields(self):
        """Test do not transform os fields when already in format."""
        host = {'system_profile': {
            'os_release': '7', 'os_kernel_version': '2.6.32'}}
        host = self.processor._transform_single_host(
            self.report_record.request_id, '123', host
        )
        self.assertEqual(
            host, {'system_profile': {
                'os_release': '7', 'os_kernel_version': '2.6.32'}})

    def test_do_not_tranform_os_release_with_number_field(self):
        """Test do not transform os release when passed as number."""
        host = {'system_profile': {'os_release': 7}}
        host = self.processor._transform_single_host(
            self.report_record.request_id, '123', host
        )
        self.assertEqual(
            host,
            {'system_profile': {'os_release': 7}}
        )

    def test_match_regex_and_find_os_details(self):
        """Test match Regex with os_release and return os_version."""
        host = {'system_profile': {
            'os_release': 'Red Hat Enterprise Linux Server 7'}}
        host_os_version = '7'
        os_version = self.processor._match_regex_and_find_os_details(
            host['system_profile']['os_release'])
        self.assertEqual(host_os_version, os_version['major'])

    def test_remove_display_name(self):
        """Test remove host display_name."""
        host = {'display_name': 'test.example.com'}
        host, _ = self.processor._remove_display_name(host)
        self.assertEqual(host, {})

    def test_remove_empty_ip_addresses(self):
        """Test remove host ip_addresses."""
        host = {
            'ip_addresses': []}
        host, _ = self.processor._remove_empty_ip_addresses(host)
        self.assertEqual(host, {})

    def test_transform_mac_addresses(self):
        """Test transform mac_addresses."""
        host = {
            'mac_addresses': []}
        host, _ = self.processor._transform_mac_addresses(host)
        self.assertEqual(host, {})

    def test_remove_both_empty_ip_mac_addresses(self):
        """Test remove both empty ip and mac addresses."""
        host = {}
        host, _ = self.processor._remove_empty_ip_addresses(host)
        host, _ = self.processor._transform_mac_addresses(host)
        self.assertEqual(host, {})

    def test_do_not_remove_set_ip_addresses(self):
        """Test do not remove set host ip_addresses."""
        host = {
            'ip_addresses': ['192.168.10.10']}
        host, _ = self.processor._remove_empty_ip_addresses(host)
        self.assertEqual(host, {'ip_addresses': ['192.168.10.10']})

    def test_do_not_remove_set_mac_addresses(self):
        """Test do not remove set host mac_addresses."""
        host = {
            'mac_addresses': ['aa:bb:00:11:22:33']}
        host, _ = self.processor._transform_mac_addresses(host)
        self.assertEqual(host, {'mac_addresses': ['aa:bb:00:11:22:33']})

    def test_transform_mtu_to_integer(self):
        """Test mtu transformation for host."""
        host = {
            'system_profile': {
                'network_interfaces': [
                    {'ipv4_addresses': [], 'ipv6_addresses': [],
                     'mtu': 1400, 'name': 'eth0'},
                    {'ipv4_addresses': [], 'ipv6_addresses': [],
                     'mtu': '1500', 'name': 'eth1'}]
            }}
        host = self.processor._transform_single_host(
            self.report_record.request_id, '123', host
        )
        self.assertEqual(
            host,
            {
                'system_profile': {
                    'network_interfaces': [
                        {'ipv4_addresses': [], 'ipv6_addresses': [],
                         'mtu': 1400, 'name': 'eth0'},
                        {'ipv4_addresses': [], 'ipv6_addresses': [],
                         'mtu': 1500, 'name': 'eth1'}]
                }
            })

    def test_do_not_run_mtu_transformation_when_none(self):
        """Test not to run mtu transformation when it is None."""
        host = {
            'system_profile': {
                'network_interfaces': [
                    {'ipv4_addresses': [], 'ipv6_addresses': [],
                     'mtu': None, 'name': 'eth0'}]
            }}

        host = self.processor._transform_single_host(
            self.report_record.request_id, '123', host
        )
        self.assertEqual(
            host,
            {
                'system_profile': {
                    'network_interfaces': [
                        {'ipv4_addresses': [], 'ipv6_addresses': [],
                         'mtu':None, 'name':'eth0'}]
                }
            })

    def test_do_not_run_mtu_transformation_when_not_exists(self):
        """Test not to run mtu transformation when it doesn't exist."""
        host = {
            'system_profile': {
                'network_interfaces': [
                    {'ipv4_addresses': [], 'ipv6_addresses': [],
                     'name': 'eth0'}]
            }}

        host = self.processor._transform_single_host(
            self.report_record.request_id, '123', host
        )
        self.assertEqual(
            host,
            {
                'system_profile': {
                    'network_interfaces': [
                        {'ipv4_addresses': [], 'ipv6_addresses': [],
                         'name':'eth0'}]
                }
            })

    def test_remove_nic_when_empty_string_in_name(self):
        """Test to remove network_interface when name is empty."""
        host = {
            'system_profile': {
                'network_interfaces': [
                    {'ipv4_addresses': [], 'ipv6_addresses': [], 'name': ''},
                    {'ipv4_addresses': [], 'ipv6_addresses': []},
                    {'ipv4_addresses': [],
                     'ipv6_addresses': [], 'name': 'eth0'}
                ]
            }}

        host = self.processor._transform_single_host(
            self.report_record.request_id, '123', host
        )
        self.assertEqual(
            host,
            {
                'system_profile': {
                    'network_interfaces': [
                        {'ipv4_addresses': [], 'ipv6_addresses': [],
                         'name':'eth0'}]
                }
            })

    def test_remove_empty_strings_in_ipv6_addresses(self):
        """Test to verify transformation for 'ipv6 addresses' in host."""
        ipv6_address = '2021:0db8:85a3:0000:0000:8a2e:0370:7335'
        host = {
            'system_profile': {
                'network_interfaces': [
                    {'ipv4_addresses': [],
                     'ipv6_addresses': ['', ipv6_address, ''],
                     'name':'eth0'},
                    {'ipv4_addresses': [],
                     'ipv6_addresses': [''], 'name':'eth1'}]
            }}

        host = self.processor._transform_single_host(
            self.report_record.request_id, '123', host
        )
        self.assertEqual(
            host,
            {
                'system_profile': {
                    'network_interfaces': [
                        {'ipv4_addresses': [],
                         'ipv6_addresses': [ipv6_address],
                         'name':'eth0'},
                        {'ipv4_addresses': [],
                         'ipv6_addresses': [], 'name':'eth1'}]
                }
            })
        nics = host['system_profile']['network_interfaces']
        self.assertEqual(len(nics), 2)
        filtered_nics = [nic for nic in nics if nic.get('name') == 'eth0']
        self.assertTrue(len(filtered_nics))
        self.assertEqual(len(filtered_nics[0]['ipv6_addresses']), 1)

    def test_remove_invalid_bios_uuid(self):
        """Test remove invalid bios UUID."""
        host = {
            'fqdn': 'virt-who.example.com',
            'bios_uuid': '45AA7104-5CB0-4A75-945D-7173C8DC5744443'
        }
        host, _ = self.processor._remove_invalid_bios_uuid(host)
        self.assertEqual(host, {'fqdn': 'virt-who.example.com'})

    def test_remove_invalid_bios_uuid_of_boolean_type(self):
        """Test remove invalid bios UUID of boolean type."""
        host = {
            'fqdn': 'virt-who.example.com',
            'bios_uuid': True
        }
        host, _ = self.processor._remove_invalid_bios_uuid(host)
        self.assertEqual(host, {'fqdn': 'virt-who.example.com'})

    def test_remove_invalid_bios_uuid_of_number_type(self):
        """Test remove invalid bios UUID of number type."""
        host = {
            'fqdn': 'virt-who.example.com',
            'bios_uuid': 100
        }
        host, _ = self.processor._remove_invalid_bios_uuid(host)
        self.assertEqual(host, {'fqdn': 'virt-who.example.com'})

    def test_remove_empty_bios_uuid(self):
        """Test remove empty bios UUID field."""
        host = {
            'fqdn': 'virt-who.example.com',
            'bios_uuid': ''
        }
        host, _ = self.processor._remove_invalid_bios_uuid(host)
        self.assertEqual(host, {'fqdn': 'virt-who.example.com'})

    def test_do_not_remove_valid_bios_uuid(self):
        """Test do not remove valid bios UUID."""
        host = {
            'fqdn': 'virt-who.example.com',
            'bios_uuid': '123e4567-e89b-12d3-a456-426614174000'
        }
        new_host, _ = self.processor._remove_invalid_bios_uuid(host)
        self.assertEqual(new_host, host)

    def test_bios_uuid_validation_should_be_case_insensitive(self):
        """Test bios UUID validation should be case insensitive."""
        host = {
            'fqdn': 'virt-who.example.com',
            'bios_uuid': '801CA199-9402-41CE-98DC-F3AA6E5BC6B3'
        }
        new_host, _ = self.processor._remove_invalid_bios_uuid(host)
        self.assertEqual(new_host, host)

    def test_transform_tags_value_to_string(self):
        """Test tags transformation for host."""
        host = {'tags': [
            {
                'namespace': 'satellite_parameter',
                'key': 'host_registration_insights',
                'value': True
            },
            {
                'namespace': 'satellite_parameter',
                'key': 'host_registration_remote_execution',
                'value': False
            },
            {
                'namespace': 'satellite',
                'key': 'organization_id',
                'value': 1
            }
        ]}
        host = self.processor._transform_single_host(
            self.report_record.request_id, '123', host
        )
        self.assertEqual(
            host,
            {'tags': [
                {
                    'namespace': 'satellite_parameter',
                    'key': 'host_registration_insights',
                    'value': 'true'
                },
                {
                    'namespace': 'satellite_parameter',
                    'key': 'host_registration_remote_execution',
                    'value': 'false'
                },
                {
                    'namespace': 'satellite',
                    'key': 'organization_id',
                    'value': '1'
                }
            ]}
        )

    def test_remove_installed_packages(self):
        """Test remove installed_packages when message size exceeds."""
        host = {
            'system_profile': {
                'installed_packages': [
                    'pkg1', 'pkg2', 'pkg3'
                ]
            }
        }
        host_request_size = bytes(json.dumps(host), 'utf-8')
        if len(host_request_size) >= KAFKA_PRODUCER_OVERRIDE_MAX_REQUEST_SIZE:
            host, _ = self.processor._remove_installed_packages(host)
            self.assertEqual(
                host,
                {
                    'system_profile': {}
                }
            )
        else:
            self.assertEqual(
                host,
                {
                    'system_profile': {
                        'installed_packages': ['pkg1', 'pkg2', 'pkg3']
                    }
                }
            )
예제 #17
0
from api.serializers import UserSerializer, ReportSerializer, ImageReportSerializer, CommentSerializer
from rest_framework.renderers import JSONRenderer
from rest_framework.parsers import JSONParser

user = User(name='Horacio',
            email='*****@*****.**',
            password='******',
            profile_picture='api/media/profile_pic.png')
user.save()

#get id
report = Report(user_fk=user,
                name_subject='Antonio Campos',
                status='encontrado',
                gender='masculino',
                birth_date='1993-02-02',
                state='Nayarit',
                city='San Blas',
                missing_date='2014-04-02',
                description='Le encanta la polla al mojo de ajo')
report.save()

imageReport = ImageReport(report_fk=report,
                          image='api/media/default/missing_pic.png')
imageReport.save()

comment = Comment(report_fk=report,
                  comment_date='2014-04-02',
                  content='Me encontre un calcetin mojado')
comment.save()
    def test_calculating_queued_reports(self):
        """Test the calculate_queued_reports method."""
        status_info = Status()
        current_time = datetime.now(pytz.utc)
        self.report_record.state = Report.NEW
        self.report_record.save()
        reports_to_process = self.processor.calculate_queued_objects(
            current_time, status_info)
        self.assertEqual(reports_to_process, 1)

        min_old_time = current_time - timedelta(hours=8)
        older_report = Report(upload_srv_kafka_msg=json.dumps(self.msg),
                              account='4321',
                              report_platform_id=self.uuid2,
                              state=Report.STARTED,
                              state_info=json.dumps([Report.NEW]),
                              last_update_time=min_old_time,
                              retry_count=1,
                              retry_type=Report.TIME)
        older_report.save()

        retry_commit_report = Report(upload_srv_kafka_msg=json.dumps(self.msg),
                                     account='4321',
                                     report_platform_id=self.uuid2,
                                     state=Report.DOWNLOADED,
                                     state_info=json.dumps([Report.NEW]),
                                     last_update_time=min_old_time,
                                     git_commit='3948384729',
                                     retry_type=Report.GIT_COMMIT,
                                     retry_count=1)
        retry_commit_report.save()

        # create some reports that should not be counted
        not_old_enough = current_time - timedelta(hours=1)
        too_young_report = Report(upload_srv_kafka_msg=json.dumps(self.msg),
                                  account='4321',
                                  report_platform_id=self.uuid2,
                                  state=Report.DOWNLOADED,
                                  state_info=json.dumps([Report.NEW]),
                                  last_update_time=not_old_enough,
                                  git_commit='3948384729',
                                  retry_type=Report.TIME,
                                  retry_count=1)
        too_young_report.save()

        same_commit_report = Report(upload_srv_kafka_msg=json.dumps(self.msg),
                                    account='4321',
                                    report_platform_id=self.uuid2,
                                    state=Report.DOWNLOADED,
                                    state_info=json.dumps([Report.NEW]),
                                    last_update_time=min_old_time,
                                    git_commit=status_info.git_commit,
                                    retry_type=Report.GIT_COMMIT,
                                    retry_count=1)
        same_commit_report.save()

        reports_to_process = self.processor.calculate_queued_objects(
            current_time, status_info)
        self.assertEqual(reports_to_process, 3)

        # delete the older report object
        Report.objects.get(id=older_report.id).delete()
        Report.objects.get(id=retry_commit_report.id).delete()
        Report.objects.get(id=too_young_report.id).delete()
        Report.objects.get(id=same_commit_report.id).delete()
예제 #19
0
from api.app import db
from api.models import Report, Profile, Image

LAT = 43.655305
LNG = -79.402269

if __name__ == "__main__":
    for i in range(2):
        p = Profile()
        p.license_plate = ''.join([random.choice('ABCDEFGHIJKLMNOP1234567890') for i in range(8)])
        p.vehicle = ''.join(random.choice(['SUV', 'Sedan', 'Truck', 'Van']))

        for j in range(random.randint(2, 4)):
            r = Report(
                reporter_id=random.randint(0, 345245),
                report_type=random.choice(['hit', 'witness', 'red-light', 'collision']), 
                timestamp=1566682828+random.randint(-10000, 10000), 
                longitude=LNG+(random.random()), 
                latitude=LAT+(random.random()), 
                analysis_complete=False, 
                video_id=None
            )

            db.session.add(r)
            db.session.commit()

            p.reports.append(r)
       
        db.session.add(p)
        db.session.commit()
예제 #20
0
from api.models import User, Report, ImageReport, Comment
from api.serializers import UserSerializer, ReportSerializer, ImageReportSerializer, CommentSerializer
from rest_framework.renderers import JSONRenderer
from rest_framework.parsers import JSONParser

user = User(name='Horacio', email='*****@*****.**', password='******', profile_picture='api/media/profile_pic.png')
user.save()

#get id
report = Report(user_fk=user, name_subject='Antonio Campos', status='encontrado', gender='masculino', birth_date='1993-02-02', state='Nayarit', city='San Blas', missing_date='2014-04-02', description='Le encanta la polla al mojo de ajo')
report.save()

imageReport = ImageReport(report_fk=report, image='api/media/default/missing_pic.png')
imageReport.save()

comment = Comment(report_fk=report, comment_date='2014-04-02', content='Me encontre un calcetin mojado')
comment.save()

userSerializer = UserSerializer(user)
userSerializer.data

rs = ReportSerializer(report)
rs.data

irs = ImageReportSerializer(imageReport)
irs.data

cs = CommentSerializer(comment)
cs.data

content = JSONRenderer().render(userSerializer.data)
class ReportProcessorTests(TransactionTestCase):
    """Test Cases for the Message processor."""
    def setUp(self):
        """Create test setup."""
        self.payload_url = 'http://insights-upload.com/q/file_to_validate'
        self.uuid = uuid.uuid4()
        self.uuid2 = uuid.uuid4()
        self.uuid3 = uuid.uuid4()
        self.fake_record = test_handler.KafkaMsg(msg_handler.MKT_TOPIC,
                                                 'http://internet.com')
        self.report_consumer = msg_handler.ReportConsumer()
        self.msg = self.report_consumer.unpack_consumer_record(
            self.fake_record)
        self.report_json = {
            'report_id': 1,
            'report_slice_id': str(self.uuid2),
            'report_type': 'insights',
            'status': 'completed',
            'report_platform_id': '5f2cc1fd-ec66-4c67-be1b-171a595ce319'
        }
        self.report_record = Report(upload_srv_kafka_msg=json.dumps(self.msg),
                                    account='1234',
                                    state=Report.NEW,
                                    state_info=json.dumps([Report.NEW]),
                                    last_update_time=datetime.now(pytz.utc),
                                    retry_count=0,
                                    ready_to_archive=False,
                                    source=uuid.uuid4(),
                                    arrival_time=datetime.now(pytz.utc),
                                    processing_start_time=datetime.now(
                                        pytz.utc))
        self.report_record.save()

        self.report_slice = ReportSlice(
            report_platform_id=self.uuid,
            report_slice_id=self.uuid2,
            account='13423',
            report_json=json.dumps(self.report_json),
            state=ReportSlice.NEW,
            state_info=json.dumps([ReportSlice.NEW]),
            retry_count=0,
            last_update_time=datetime.now(pytz.utc),
            report=self.report_record,
            ready_to_archive=True,
            creation_time=datetime.now(pytz.utc),
            processing_start_time=datetime.now(pytz.utc))
        self.report_slice.save()

        self.processor = report_processor.ReportProcessor()
        self.processor.report = self.report_record

    def check_variables_are_reset(self):
        """Check that report processor members have been cleared."""
        processor_attributes = [
            self.processor.report_platform_id, self.processor.report,
            self.processor.state, self.processor.account_number,
            self.processor.upload_message, self.processor.status,
            self.processor.report_json
        ]
        for attribute in processor_attributes:
            self.assertEqual(attribute, None)

    def test_archiving_report(self):
        """Test archiving creates archive, deletes current rep, and resets processor."""
        report_to_archive = Report(upload_srv_kafka_msg=json.dumps(self.msg),
                                   account='4321',
                                   report_platform_id=self.uuid2,
                                   state=Report.NEW,
                                   state_info=json.dumps([Report.NEW]),
                                   last_update_time=datetime.now(pytz.utc),
                                   retry_count=0,
                                   ready_to_archive=True,
                                   arrival_time=datetime.now(pytz.utc),
                                   processing_start_time=datetime.now(
                                       pytz.utc))
        report_to_archive.save()
        self.processor.report_or_slice = report_to_archive
        self.processor.account_number = '4321'
        self.processor.upload_message = self.msg
        self.processor.state = report_to_archive.state
        self.processor.report_platform_id = self.uuid
        self.processor.status = report_processor.SUCCESS_CONFIRM_STATUS

        self.processor.archive_report_and_slices()
        # assert the report doesn't exist
        with self.assertRaises(Report.DoesNotExist):
            Report.objects.get(id=report_to_archive.id)
        # assert the report archive does exist
        archived = ReportArchive.objects.get(account='4321')
        self.assertEqual(json.loads(archived.state_info), [Report.NEW])
        self.assertIsNotNone(archived.processing_end_time)
        # assert the processor was reset
        self.check_variables_are_reset()

    def test_archiving_report_not_ready(self):
        """Test that archiving fails if report not ready to archive."""
        report_to_archive = Report(upload_srv_kafka_msg=json.dumps(self.msg),
                                   account='4321',
                                   report_platform_id=self.uuid2,
                                   state=Report.NEW,
                                   state_info=json.dumps([Report.NEW]),
                                   last_update_time=datetime.now(pytz.utc),
                                   retry_count=0,
                                   ready_to_archive=False)
        report_to_archive.save()
        self.processor.report_or_slice = report_to_archive
        self.processor.account_number = '4321'
        self.processor.upload_message = self.msg
        self.processor.state = report_to_archive.state
        self.processor.report_platform_id = self.uuid
        self.processor.status = report_processor.SUCCESS_CONFIRM_STATUS

        self.processor.archive_report_and_slices()
        # assert the report still exist
        existing_report = Report.objects.get(id=report_to_archive.id)
        self.assertEqual(existing_report, report_to_archive)
        # assert the report archive does not exist
        with self.assertRaises(ReportArchive.DoesNotExist):
            ReportArchive.objects.get(account='4321')
        # assert the processor was reset
        self.check_variables_are_reset()

    def test_deduplicating_report(self):
        """Test that archiving creates archive rep, deletes report, and resets the processor."""
        self.report_record.report_platform_id = self.uuid
        self.report_record.save()
        report_to_dedup = Report(upload_srv_kafka_msg=json.dumps(self.msg),
                                 account='4321',
                                 report_platform_id=self.uuid,
                                 state=Report.NEW,
                                 upload_ack_status='success',
                                 state_info=json.dumps([Report.NEW]),
                                 last_update_time=datetime.now(pytz.utc),
                                 retry_count=0,
                                 ready_to_archive=True,
                                 arrival_time=datetime.now(pytz.utc),
                                 processing_start_time=datetime.now(pytz.utc))
        report_to_dedup.save()
        self.processor.report_or_slice = report_to_dedup
        self.processor.account_number = '4321'
        self.processor.upload_message = self.msg
        self.processor.state = report_to_dedup.state
        self.processor.report_platform_id = self.uuid
        self.processor.status = report_processor.SUCCESS_CONFIRM_STATUS

        self.processor.deduplicate_reports()
        # assert the report doesn't exist
        with self.assertRaises(Report.DoesNotExist):
            Report.objects.get(id=report_to_dedup.id)
        # assert the report archive does exist
        archived = ReportArchive.objects.get(account='4321')
        self.assertEqual(json.loads(archived.state_info), [Report.NEW])
        # assert the processor was reset
        self.check_variables_are_reset()

    def test_determine_retry_limit(self):
        """Test the determine retry method when the retry is at the limit."""
        self.report_record.state = Report.STARTED
        self.report_record.retry_count = 4
        self.report_record.save()
        self.processor.report_or_slice = self.report_record
        self.processor.determine_retry(Report.FAILED_DOWNLOAD, Report.STARTED)
        self.assertEqual(self.report_record.state, Report.FAILED_DOWNLOAD)
        self.assertEqual(self.report_record.ready_to_archive, True)

    def test_update_report_state(self):
        """Test updating the report state."""
        # set the base line values
        self.report_record.retry_count = 0
        self.report_record.save()
        self.processor.next_state = Report.STARTED
        # set the values we will update with
        self.processor.report_or_slice = self.report_record
        options = {
            'retry': abstract_processor.RETRY.increment,
            'retry_type': Report.GIT_COMMIT,
            'report_platform_id': self.uuid3
        }
        self.processor.update_object_state(options=options)
        self.assertEqual(self.report_record.retry_count, 1)
        self.assertEqual(self.report_record.retry_type, Report.GIT_COMMIT)
        self.assertEqual(self.report_record.report_platform_id, self.uuid3)

    async def async_test_run_method(self):
        """Test the run method."""
        self.report_record.state = Report.NEW
        self.report_record.save()
        self.processor.report_or_slice = None
        self.processor.should_run = True

        def transition_side_effect():
            self.processor.should_run = False

        with patch(
                'processor.abstract_processor.'
                'AbstractProcessor.transition_to_started',
                side_effect=transition_side_effect):
            await self.processor.run()
            self.assertEqual(self.processor.report_or_slice,
                             self.report_record)

    def test_run_method(self):
        """Test the async run function."""
        event_loop = asyncio.new_event_loop()
        asyncio.set_event_loop(event_loop)
        coro = asyncio.coroutine(self.async_test_run_method)
        event_loop.run_until_complete(coro())
        event_loop.close()

    def test_assign_report_new(self):
        """Test the assign report function with only a new report."""
        self.report_record.state = Report.NEW
        self.report_record.save()
        self.processor.report = None
        self.processor.assign_object()
        self.assertEqual(self.processor.report_or_slice, self.report_record)

    def test_assign_report_oldest_time(self):
        """Test the assign report function with older report."""
        current_time = datetime.now(pytz.utc)
        hours_old_time = current_time - timedelta(hours=9)
        older_report = Report(upload_srv_kafka_msg=json.dumps(self.msg),
                              account='4321',
                              report_platform_id=self.uuid2,
                              state=Report.NEW,
                              state_info=json.dumps([Report.NEW]),
                              last_update_time=hours_old_time,
                              retry_count=1)
        older_report.save()
        self.report_record.state = Report.NEW
        self.report_record.save()
        self.processor.report_or_slice = None
        self.processor.assign_object()
        self.assertEqual(self.processor.report_or_slice, older_report)
        # delete the older report object
        Report.objects.get(id=older_report.id).delete()

    def test_assign_report_not_old_enough(self):
        """Test the assign report function with young report."""
        # delete the report record
        Report.objects.get(id=self.report_record.id).delete()
        self.processor.report_or_slice = None
        current_time = datetime.now(pytz.utc)
        min_old_time = current_time - timedelta(minutes=1)
        older_report = Report(upload_srv_kafka_msg=json.dumps(self.msg),
                              account='4321',
                              report_platform_id=self.uuid2,
                              state=Report.STARTED,
                              state_info=json.dumps([Report.NEW]),
                              last_update_time=min_old_time,
                              retry_count=1)
        older_report.save()
        self.processor.assign_object()
        self.assertEqual(self.processor.report_or_slice, None)
        # delete the older report object
        Report.objects.get(id=older_report.id).delete()

    def test_assign_report_oldest_commit(self):
        """Test the assign report function with retry type as commit."""
        current_time = datetime.now(pytz.utc)
        twentyminold_time = current_time - timedelta(minutes=20)
        older_report = Report(upload_srv_kafka_msg=json.dumps(self.msg),
                              account='4321',
                              report_platform_id=self.uuid2,
                              state=Report.DOWNLOADED,
                              state_info=json.dumps(
                                  [Report.NEW, Report.DOWNLOADED]),
                              last_update_time=twentyminold_time,
                              retry_count=1,
                              retry_type=Report.GIT_COMMIT,
                              git_commit='1234')
        older_report.save()
        self.report_record.state = Report.DOWNLOADED
        self.report_record.save()
        self.processor.report_or_slice = None
        # the commit should always be different from 1234
        self.processor.assign_object()
        self.assertEqual(self.processor.report_or_slice, older_report)
        self.assertEqual(self.processor.report_or_slice.state,
                         Report.DOWNLOADED)
        # delete the older report object
        Report.objects.get(id=older_report.id).delete()

    def test_assign_report_no_reports(self):
        """Test the assign report method with no reports."""
        # delete the report record
        Report.objects.get(id=self.report_record.id).delete()
        self.processor.report_or_slice = None
        self.processor.assign_object()
        self.assertEqual(self.processor.report_or_slice, None)

    async def async_test_delegate_state(self):
        """Set up the test for delegate state."""
        self.report_record.state = Report.STARTED
        self.report_record.report_platform_id = self.uuid
        self.report_record.upload_ack_status = report_processor.SUCCESS_CONFIRM_STATUS
        self.report_record.save()
        self.processor.report_or_slice = self.report_record

        def download_side_effect():
            """Transition the state to downloaded."""
            self.processor.state = Report.DOWNLOADED
            self.report_record.state = Report.DOWNLOADED
            self.report_record.save()

        with patch(
                'processor.report_processor.'
                'ReportProcessor.transition_to_downloaded',
                side_effect=download_side_effect):
            await self.processor.delegate_state()
            self.assertEqual(self.processor.report_platform_id,
                             self.report_record.report_platform_id)
            # self.assertEqual(self.processor.report_or_slice.state, Report.DOWNLOADED)
            self.assertEqual(self.processor.status,
                             self.processor.report.upload_ack_status)

        # test the async function call state
        self.report_record.state = Report.VALIDATED
        self.report_record.save()

        def validation_reported_side_effect():
            """Side effect for async transition method."""
            self.report_record.state = Report.VALIDATION_REPORTED
            self.report_record.save()

        self.processor.transition_to_validation_reported = CoroutineMock(
            side_effect=validation_reported_side_effect)
        await self.processor.delegate_state()

    def test_run_delegate(self):
        """Test the async function delegate state."""
        event_loop = asyncio.new_event_loop()
        asyncio.set_event_loop(event_loop)
        coro = asyncio.coroutine(self.async_test_delegate_state)
        event_loop.run_until_complete(coro())
        event_loop.close()

    async def async_test_delegate_state_exception(self):
        """Set up the test for delegate state with exception."""
        self.report_record.state = Report.STARTED
        self.report_record.report_platform_id = self.uuid
        self.report_record.upload_ack_status = report_processor.SUCCESS_CONFIRM_STATUS
        self.report_record.save()
        self.processor.report_or_slice = self.report_record

        def delegate_side_effect():
            """Transition the state to downloaded."""
            self.processor.should_run = False
            raise Exception('Test')

        with patch('processor.report_processor.ReportProcessor.delegate_state',
                   side_effect=delegate_side_effect):
            await self.processor.run()
            self.check_variables_are_reset()

    def test_run_delegate_exception(self):
        """Test the async function delegate state."""
        event_loop = asyncio.new_event_loop()
        asyncio.set_event_loop(event_loop)
        coro = asyncio.coroutine(self.async_test_delegate_state_exception)
        event_loop.run_until_complete(coro())
        event_loop.close()

    def test_reinit_variables(self):
        """Test that reinitting the variables clears the values."""
        # make sure that the variables have values
        self.processor.report_platform_id = self.uuid
        self.processor.report_or_slice = self.report_record
        self.processor.state = Report.NEW
        self.processor.account_number = '1234'
        self.processor.upload_message = self.msg
        self.processor.report_json = {}
        self.processor.status = report_processor.SUCCESS_CONFIRM_STATUS
        self.assertEqual(self.processor.report_or_slice, self.report_record)
        self.assertEqual(self.processor.report_platform_id, self.uuid)
        self.assertEqual(self.processor.state, Report.NEW)
        self.assertEqual(self.processor.account_number, '1234')
        self.assertEqual(self.processor.upload_message, self.msg)
        self.assertEqual(self.processor.status,
                         report_processor.SUCCESS_CONFIRM_STATUS)

        # check all of the variables are None after reinitting
        self.processor.reset_variables()
        self.check_variables_are_reset()

    def test_transition_to_started(self):
        """Test the transition to started state."""
        self.report_record.state = Report.NEW
        self.processor.report_or_slice = self.report_record
        self.processor.transition_to_started()
        self.assertEqual(self.report_record.state, Report.STARTED)
        self.assertEqual(json.loads(self.report_record.state_info),
                         [Report.NEW, Report.STARTED])

    def test_transition_to_downloaded(self):
        """Test that the transition to download works successfully."""
        metadata_json = {
            'report_id': '5f2cc1fd-ec66-4c67-be1b-171a595ce319',
            'source': str(uuid.uuid4()),
            'report_slices': {
                str(self.uuid): {}
            }
        }
        report_json = {
            'report_slice_id': str(self.uuid),
            'report_platform_id': '5f2cc1fd-ec66-4c67-be1b-171a595ce319'
        }

        report_files = {
            '%s.json' % str(self.uuid): report_json,
            'metadata.json': metadata_json
        }
        self.processor.upload_message = {
            'url': self.payload_url,
            'rh_account': '00001'
        }
        self.processor.report_or_slice = self.report_record
        self.processor.account_number = '0001'
        buffer_content = test_handler.create_tar_buffer(report_files)
        with requests_mock.mock() as mock_req:
            mock_req.get(self.payload_url, content=buffer_content)
            self.processor.transition_to_downloaded()
            self.assertEqual(self.report_record.state, Report.DOWNLOADED)

    def test_transition_to_downloaded_exception_retry(self):
        """Test that the transition to download with retry exception."""
        self.processor.upload_message = {
            'url': self.payload_url,
            'rh_account': '00001'
        }
        self.report_record.state = Report.STARTED
        self.report_record.save()
        self.processor.report_or_slice = self.report_record
        with requests_mock.mock() as mock_req:
            mock_req.get(self.payload_url, exc=requests.exceptions.HTTPError)
            self.processor.transition_to_downloaded()
            self.assertEqual(self.report_record.state, Report.STARTED)
            self.assertEqual(self.report_record.retry_count, 1)

    def test_transition_to_downloaded_exception_fail(self):
        """Test that the transition to download with fail exception."""
        self.processor.upload_message = {
            'url': self.payload_url,
            'rh_account': '00001'
        }
        self.report_record.state = Report.STARTED
        self.report_record.save()
        self.processor.report_or_slice = self.report_record

        def download_side_effect():
            """Raise a FailDownloadException."""
            raise report_processor.FailDownloadException()

        with patch(
                'processor.report_processor.ReportProcessor._download_report',
                side_effect=download_side_effect):
            self.processor.transition_to_downloaded()
            self.assertEqual(self.report_record.state, Report.FAILED_DOWNLOAD)

    def test_transition_to_validated_report_exception(self):
        """Test that a report with no report_slice_id is still marked as validated."""
        self.report_record.state = Report.DOWNLOADED
        self.report_record.save()
        report_json = {
            'report_id': 1,
            'report_type': 'insights',
            'status': 'completed',
            'report_platform_id': '5f2cc1fd-ec66-4c67-be1b-171a595ce319'
        }
        self.report_slice.state = ReportSlice.PENDING
        self.report_slice.report_json = json.dumps(report_json)
        self.report_slice.save()
        self.processor.report_or_slice = self.report_record
        self.processor.transition_to_validated()
        self.assertEqual(self.report_record.state, Report.VALIDATED)
        self.assertEqual(self.report_record.upload_ack_status,
                         report_processor.FAILURE_CONFIRM_STATUS)
        self.assertEqual(self.report_record.retry_count, 0)

    def test_transition_to_validated_general_exception(self):
        """Test that exceptions are validated as failure (if no slices are valid)."""
        self.report_record.state = Report.DOWNLOADED
        self.report_record.save()
        self.processor.report_or_slice = self.report_record

        def validate_side_effect():
            """Transition the state to downloaded."""
            raise Exception('Test')

        with patch(
                'processor.report_processor.'
                'ReportProcessor._validate_report_details',
                side_effect=validate_side_effect):
            self.processor.transition_to_validated()
            self.assertEqual(self.report_record.state, Report.VALIDATED)
            self.assertEqual(self.processor.status,
                             report_processor.FAILURE_CONFIRM_STATUS)

    async def async_test_transition_to_validation_reported(self):
        """Set up the test for transitioning to validation reported."""
        self.report_record.state = Report.VALIDATED
        self.report_record.report_platform_id = self.uuid
        self.report_record.upload_ack_status = report_processor.SUCCESS_CONFIRM_STATUS
        self.report_record.save()
        self.processor.report_or_slice = self.report_record
        self.processor.status = report_processor.SUCCESS_CONFIRM_STATUS
        self.processor.upload_message = {'request_id': self.uuid}

        self.processor._send_confirmation = CoroutineMock()
        await self.processor.transition_to_validation_reported()
        self.assertEqual(self.processor.report.state,
                         Report.VALIDATION_REPORTED)

    def test_transition_to_validation_reported(self):
        """Test the async function to transition to validation reported."""
        event_loop = asyncio.new_event_loop()
        asyncio.set_event_loop(event_loop)
        coro = asyncio.coroutine(
            self.async_test_transition_to_validation_reported)
        event_loop.run_until_complete(coro())
        event_loop.close()

    async def async_test_transition_to_validation_reported_exception(self):
        """Set up the test for transitioning to validation reported."""
        self.report_record.state = Report.VALIDATED
        self.report_record.retry_count = 0
        self.report_record.report_platform_id = self.uuid
        self.report_record.upload_ack_status = report_processor.SUCCESS_CONFIRM_STATUS
        self.report_record.save()
        self.processor.report_or_slice = self.report_record
        self.processor.status = report_processor.SUCCESS_CONFIRM_STATUS
        self.processor.upload_message = {'hash': self.uuid}

        def report_side_effect():
            """Transition the state to validation_reported."""
            raise Exception('Test')

        self.processor._send_confirmation = CoroutineMock(
            side_effect=report_side_effect)
        await self.processor.transition_to_validation_reported()
        self.assertEqual(self.report_record.state, Report.VALIDATED)
        self.assertEqual(self.report_record.retry_count, 1)
        self.check_variables_are_reset()

    def test_transition_to_validation_reported_exception(self):
        """Test the async function to transition to validation reported."""
        event_loop = asyncio.new_event_loop()
        asyncio.set_event_loop(event_loop)
        coro = asyncio.coroutine(
            self.async_test_transition_to_validation_reported_exception)
        event_loop.run_until_complete(coro())
        event_loop.close()

    async def async_transition_to_validation_reported_failure_status(self):
        """Set up the test for transitioning to validation reported failure status."""
        report_to_archive = Report(upload_srv_kafka_msg=json.dumps(self.msg),
                                   account='43214',
                                   report_platform_id=self.uuid2,
                                   state=Report.VALIDATED,
                                   state_info=json.dumps([Report.NEW]),
                                   last_update_time=datetime.now(pytz.utc),
                                   retry_count=0,
                                   retry_type=Report.TIME,
                                   ready_to_archive=True,
                                   arrival_time=datetime.now(pytz.utc),
                                   processing_start_time=datetime.now(
                                       pytz.utc))
        report_to_archive.upload_ack_status = report_processor.FAILURE_CONFIRM_STATUS
        report_to_archive.save()
        self.processor.report_or_slice = report_to_archive
        self.processor.report_platform_id = self.uuid2
        self.processor.account_number = '43214'
        self.processor.state = Report.VALIDATED
        self.processor.status = report_processor.FAILURE_CONFIRM_STATUS
        self.processor.upload_message = {'request_id': self.uuid}
        self.processor._send_confirmation = CoroutineMock()
        await self.processor.transition_to_validation_reported()
        with self.assertRaises(Report.DoesNotExist):
            Report.objects.get(id=report_to_archive.id)
        archived = ReportArchive.objects.get(account='43214')
        self.assertEqual(archived.state, Report.VALIDATION_REPORTED)
        self.assertEqual(archived.upload_ack_status,
                         report_processor.FAILURE_CONFIRM_STATUS)
        # assert the processor was reset
        self.check_variables_are_reset()

    def test_transition_to_validation_reported_failure(self):
        """Test the async function for reporting failure status."""
        event_loop = asyncio.new_event_loop()
        asyncio.set_event_loop(event_loop)
        coro = asyncio.coroutine(
            self.async_transition_to_validation_reported_failure_status)
        event_loop.run_until_complete(coro())
        event_loop.close()

    # Tests for the functions that carry out the work ie (download/upload)
    def test_validate_report_success(self):
        """Test that a MKT report with the correct structure passes validation."""
        self.processor.account_number = '123'
        self.processor.report_or_slice = self.report_record
        self.processor.report_json = {
            'report_id': 1,
            'report_slice_id': '5f2cc1fd-ec66-4c67-be1b-171a595ce319-1',
            'report_type': 'insights',
            'status': 'completed',
            'report_platform_id': '5f2cc1fd-ec66-4c67-be1b-171a595ce319'
        }
        valid = self.processor._validate_report_details()
        self.assertEqual(valid, True)

    def test_validate_report_missing_slice_id(self):
        """Test to verify a MKT report with no report_slice_id is failed."""
        self.processor.report_json = {
            'report_id': 1,
            'report_type': 'insights',
            'status': 'completed',
            'report_platform_id': '5f2cc1fd-ec66-4c67-be1b-171a595ce319'
        }
        self.processor.report_or_slice = self.report_record

        with self.assertRaises(msg_handler.MKTReportException):
            self.processor._validate_report_details()

    def test_update_slice_exception(self):
        """Test udpating the slice with invalid data."""
        # test that not providing a state inside options causes
        # an exception to be raised and slice is not updated
        self.report_slice.state = ReportSlice.PENDING
        self.report_slice.save()
        self.processor.update_slice_state({}, self.report_slice)
        self.report_slice.refresh_from_db()
        self.assertEqual(self.report_slice.state, ReportSlice.PENDING)

    def test_extract_and_create_slices_success(self):
        """Testing the extract method with valid buffer content."""
        source_uuid = str(uuid.uuid4())
        metadata_json = {
            'report_id': 1,
            'source': source_uuid,
            'source_metadata': {
                'foo': 'bar'
            },
            'report_slices': {
                str(self.uuid): {}
            }
        }
        report_json = {'report_slice_id': str(self.uuid)}
        report_files = {
            'metadata.json': metadata_json,
            '%s.json' % str(self.uuid): report_json
        }
        self.processor.report_or_slice = self.report_record
        self.processor.account_number = '0001'
        buffer_content = test_handler.create_tar_buffer(report_files)
        result = self.processor._extract_and_create_slices(buffer_content)
        expected_result = {
            'report_platform_id': 1,
            'source': source_uuid,
            'source_metadata': {
                'foo': 'bar'
            }
        }
        self.assertEqual(result, expected_result)

    def test_extract_and_create_slices_mismatch(self):
        """Testing the extract method with mismatched metadata content."""
        metadata_json = {
            'report_id': 1,
            'source': str(uuid.uuid4()),
            'source_metadata': {
                'foo': 'bar'
            },
            'report_slices': {
                str(self.uuid): {
                    'number_hosts': 5
                }
            }
        }
        report_json = {'report_slice_id': '1234556'}
        report_files = {
            'metadata.json': metadata_json,
            '%s.json' % str(self.uuid): report_json
        }
        self.processor.report_or_slice = self.report_record
        self.processor.account_number = '0001'
        buffer_content = test_handler.create_tar_buffer(report_files)
        with self.assertRaises(report_processor.FailExtractException):
            self.processor._extract_and_create_slices(buffer_content)

    def test_extract_and_create_slices_metadata_fail(self):
        """Testing the extract method with invalid metadata buffer content."""
        metadata_json = 'myfakeencodedstring'
        slice_uuid = str(self.uuid)
        report_json = {'report_slice_id': slice_uuid}
        report_files = {
            'metadata.json': metadata_json,
            '%s.json' % slice_uuid: report_json
        }
        self.processor.report_or_slice = self.report_record
        self.processor.account_number = '0001'
        buffer_content = test_handler.create_tar_buffer(report_files,
                                                        meta_encoding='utf-16')
        with self.assertRaises(report_processor.FailExtractException):
            self.processor._extract_and_create_slices(buffer_content)

    def test_extract_and_create_slices_slice_fail(self):
        """Testing the extract method with bad slice."""
        metadata_json = {
            'report_id': 1,
            'source': str(uuid.uuid4()),
            'source_metadata': {
                'foo': 'bar'
            },
            'report_slices': {
                str(self.uuid): {}
            }
        }
        report_json = 'myfakeencodedstring'
        report_files = {
            'metadata.json': metadata_json,
            '%s.json' % str(self.uuid): report_json
        }
        self.processor.report_or_slice = self.report_record
        self.processor.account_number = '0001'
        buffer_content = test_handler.create_tar_buffer(report_files,
                                                        encoding='utf-16',
                                                        meta_encoding='utf-8')
        with self.assertRaises(report_processor.FailExtractException):
            self.processor._extract_and_create_slices(buffer_content)

    def test_create_slice_invalid(self):
        """Test the create slice method with an invalid slice."""
        report_json = None
        slice_id = '1234556'
        with self.assertRaises(Exception):
            options = {
                'report_json': report_json,
                'report_slice_id': slice_id,
                'source': str(uuid.uuid4()),
            }
            self.processor.create_report_slice(options)

    def test_extract_and_create_slices_two_reps(self):
        """Testing the extract method with valid buffer content."""
        source_uuid = str(uuid.uuid4())
        metadata_json = {
            'source': source_uuid,
            'report_id': '5f2cc1fd-ec66-4c67-be1b-171a595ce319',
            'report_slices': {
                str(self.uuid): {}
            }
        }
        report_json = {'report_slice_id': str(self.uuid)}

        report_files = {
            'metadata.json': metadata_json,
            '%s.json' % str(self.uuid): report_json
        }
        self.processor.report_or_slice = self.report_record
        self.processor.account_number = '0001'
        buffer_content = test_handler.create_tar_buffer(report_files)
        result = self.processor._extract_and_create_slices(buffer_content)
        expected_result = {
            'report_platform_id': '5f2cc1fd-ec66-4c67-be1b-171a595ce319',
            'source': source_uuid,
        }
        self.assertEqual(result, expected_result)

    def test_extract_and_create_slices_failure(self):
        """Testing the extract method failure no matching report_slice."""
        metadata_json = {
            'report_id': 1,
            'report_type': 'insights',
            'report_platform_id': '5f2cc1fd-ec66-4c67-be1b-171a595ce319',
            'report_slices': {
                str(self.uuid): {}
            }
        }
        report_files = {'metadata.json': metadata_json}
        buffer_content = test_handler.create_tar_buffer(report_files)
        with self.assertRaises(report_processor.FailExtractException):
            self.processor._extract_and_create_slices(buffer_content)

    def test_extract_and_create_slices_failure_invalid_metadata(self):
        """Testing the extract method failure no valid metadata."""
        metadata_json = {
            'report_id': 1,
            'report_type': 'deployments',
            'report_platform_id': '5f2cc1fd-ec66-4c67-be1b-171a595ce319',
            'report_slices': {
                str(self.uuid): {}
            }
        }
        report_json = {
            'report_slice_id': str(self.uuid),
            'report_platform_id': '5f2cc1fd-ec66-4c67-be1b-171a595ce319'
        }
        report_files = {
            'metadata.json': metadata_json,
            '%s.json' % str(self.uuid): report_json
        }
        buffer_content = test_handler.create_tar_buffer(report_files)
        with self.assertRaises(report_processor.FailExtractException):
            self.processor._extract_and_create_slices(buffer_content)

    def test_extract_and_create_slices_failure_no_metadata(self):
        """Testing the extract method failure no json file."""
        report_json = {
            'report_slice_id': '2345322',
            'report_platform_id': '5f2cc1fd-ec66-4c67-be1b-171a595ce319'
        }
        report_files = {'2345322.json': report_json}
        buffer_content = test_handler.create_tar_buffer(report_files)
        with self.assertRaises(report_processor.FailExtractException):
            self.processor._extract_and_create_slices(buffer_content)

    def test__extract_and_create_slices_failure_invalid_json(self):
        """Testing the extract method failure invalid json."""
        metadata_json = {
            'report_id': '5f2cc1fd-ec66-4c67-be1b-171a595ce319',
            'source': str(uuid.uuid4()),
            'report_slices': {
                '2345322': {}
            }
        }
        report_json = 'This is not JSON.'
        report_files = {
            '2345322.json': report_json,
            'metadata.json': metadata_json
        }
        buffer_content = test_handler.create_tar_buffer(report_files)
        with self.assertRaises(report_processor.RetryExtractException):
            self.processor._extract_and_create_slices(buffer_content)

    def test__extract_and_create_slices_failure_no_json(self):
        """Testing the extract method failure invalid json."""
        metadata_json = {
            'report_id': '5f2cc1fd-ec66-4c67-be1b-171a595ce319',
            'source': str(uuid.uuid4()),
            'report_slices': {
                '2345322': {}
            }
        }
        report_json = None
        report_files = {
            '2345322.json': report_json,
            'metadata.json': metadata_json
        }
        buffer_content = test_handler.create_tar_buffer(report_files)
        with self.assertRaises(report_processor.FailExtractException):
            self.processor._extract_and_create_slices(buffer_content)

    def test_download_response_content_bad_url(self):
        """Test to verify download exceptions are handled."""
        with requests_mock.mock() as mock_req:
            mock_req.get(self.payload_url, exc=requests.exceptions.HTTPError)
            with self.assertRaises(report_processor.RetryDownloadException):
                self.processor.upload_message = {'url': self.payload_url}
                self.processor._download_report()

    def test_download_response_content_missing_url(self):
        """Test case where url is missing."""
        with self.assertRaises(report_processor.FailDownloadException):
            self.processor.upload_message = {}
            self.processor._download_report()

    def test_download_report_success(self):
        """Test to verify extracting contents is successful."""
        metadata_json = {
            'report_id': 1,
            'report_type': 'insights',
            'report_platform_id': '5f2cc1fd-ec66-4c67-be1b-171a595ce319',
            'report_slices': {
                '2345322': {}
            }
        }
        report_json = {
            'report_slice_id': '2345322',
            'report_platform_id': '5f2cc1fd-ec66-4c67-be1b-171a595ce319'
        }
        report_files = {
            'metadata.json': metadata_json,
            '2345322.json': report_json
        }
        self.processor.upload_message = {
            'url': self.payload_url,
            'rh_account': '00001'
        }
        buffer_content = test_handler.create_tar_buffer(report_files)
        with requests_mock.mock() as mock_req:
            mock_req.get(self.payload_url, content=buffer_content)
            content = self.processor._download_report()
            self.assertEqual(buffer_content, content)

    def test_download_and_validate_contents_invalid_report(self):
        """Test to verify extracting contents fails when report is invalid."""
        self.processor.report_json = {
            'report_type': 'insights',
            'status': 'completed',
            'report_platform_id': '5f2cc1fd-ec66-4c67-be1b-171a595ce319'
        }
        self.processor.report_or_slice = self.report_record
        with self.assertRaises(msg_handler.MKTReportException):
            _, _ = self.processor._validate_report_details()

    def test_download_contents_raises_error(self):
        """Test to verify downloading contents fails when error is raised."""
        report_json = {
            'report_id': 1,
            'report_type': 'insights',
            'status': 'completed',
            'report_platform_id': '5f2cc1fd-ec66-4c67-be1b-171a595ce319'
        }
        self.processor.upload_message = {
            'url': self.payload_url,
            'rh_account': '00001'
        }
        report_files = {'report.json': report_json}
        buffer_content = test_handler.create_tar_buffer(report_files)
        with requests_mock.mock() as mock_req:
            mock_req.get(self.payload_url, content=buffer_content)
            with patch('requests.get',
                       side_effect=requests.exceptions.HTTPError):
                with self.assertRaises(
                        report_processor.RetryDownloadException):
                    content = self.processor._download_report()
                    self.assertEqual(content, buffer_content)

    def test_download_with_404(self):
        """Test downloading a URL and getting 404."""
        with requests_mock.mock() as mock_req:
            mock_req.get(self.payload_url, status_code=404)
            with self.assertRaises(report_processor.RetryDownloadException):
                self.processor.upload_message = {'url': self.payload_url}
                self.processor._download_report()

    def test_value_error__extract_and_create_slices(self):
        """Testing value error when extracting json from tar.gz."""
        invalid_json = '["report_id": 1]'
        tar_buffer = io.BytesIO()
        with tarfile.open(fileobj=tar_buffer, mode='w:gz') as tar_file:
            file_name = 'file.json'
            file_content = invalid_json
            file_buffer = io.BytesIO(file_content.encode('utf-8'))
            info = tarfile.TarInfo(name=file_name)
            info.size = len(file_buffer.getvalue())
            tar_file.addfile(tarinfo=info, fileobj=file_buffer)
        tar_buffer.seek(0)
        buffer_content = tar_buffer.getvalue()
        with self.assertRaises(report_processor.FailExtractException):
            self.processor._extract_and_create_slices(buffer_content)

    def test_no_json_files__extract_and_create_slices(self):
        """Testing no json files found in tar.gz."""
        invalid_json = '["report_id": 1]'
        tar_buffer = io.BytesIO()
        with tarfile.open(fileobj=tar_buffer, mode='w:gz') as tar_file:
            file_name = 'file.csv'
            file_content = invalid_json
            file_buffer = io.BytesIO(file_content.encode('utf-8'))
            info = tarfile.TarInfo(name=file_name)
            info.size = len(file_buffer.getvalue())
            tar_file.addfile(tarinfo=info, fileobj=file_buffer)
        tar_buffer.seek(0)
        buffer_content = tar_buffer.getvalue()
        with self.assertRaises(report_processor.FailExtractException):
            self.processor._extract_and_create_slices(buffer_content)

    def test__extract_and_create_slices_general_except(self):
        """Testing general exception raises retry exception."""
        def extract_side_effect():
            """Raise general error."""
            raise Exception('Test')

        with patch('processor.report_processor.tarfile.open',
                   side_effect=extract_side_effect):
            with self.assertRaises(report_processor.RetryExtractException):
                self.processor._extract_and_create_slices(None)

    def test_calculating_queued_reports(self):
        """Test the calculate_queued_reports method."""
        status_info = Status()
        current_time = datetime.now(pytz.utc)
        self.report_record.state = Report.NEW
        self.report_record.save()
        reports_to_process = self.processor.calculate_queued_objects(
            current_time, status_info)
        self.assertEqual(reports_to_process, 1)

        min_old_time = current_time - timedelta(hours=8)
        older_report = Report(upload_srv_kafka_msg=json.dumps(self.msg),
                              account='4321',
                              report_platform_id=self.uuid2,
                              state=Report.STARTED,
                              state_info=json.dumps([Report.NEW]),
                              last_update_time=min_old_time,
                              retry_count=1,
                              retry_type=Report.TIME)
        older_report.save()

        retry_commit_report = Report(upload_srv_kafka_msg=json.dumps(self.msg),
                                     account='4321',
                                     report_platform_id=self.uuid2,
                                     state=Report.DOWNLOADED,
                                     state_info=json.dumps([Report.NEW]),
                                     last_update_time=min_old_time,
                                     git_commit='3948384729',
                                     retry_type=Report.GIT_COMMIT,
                                     retry_count=1)
        retry_commit_report.save()

        # create some reports that should not be counted
        not_old_enough = current_time - timedelta(hours=1)
        too_young_report = Report(upload_srv_kafka_msg=json.dumps(self.msg),
                                  account='4321',
                                  report_platform_id=self.uuid2,
                                  state=Report.DOWNLOADED,
                                  state_info=json.dumps([Report.NEW]),
                                  last_update_time=not_old_enough,
                                  git_commit='3948384729',
                                  retry_type=Report.TIME,
                                  retry_count=1)
        too_young_report.save()

        same_commit_report = Report(upload_srv_kafka_msg=json.dumps(self.msg),
                                    account='4321',
                                    report_platform_id=self.uuid2,
                                    state=Report.DOWNLOADED,
                                    state_info=json.dumps([Report.NEW]),
                                    last_update_time=min_old_time,
                                    git_commit=status_info.git_commit,
                                    retry_type=Report.GIT_COMMIT,
                                    retry_count=1)
        same_commit_report.save()

        reports_to_process = self.processor.calculate_queued_objects(
            current_time, status_info)
        self.assertEqual(reports_to_process, 3)

        # delete the older report object
        Report.objects.get(id=older_report.id).delete()
        Report.objects.get(id=retry_commit_report.id).delete()
        Report.objects.get(id=too_young_report.id).delete()
        Report.objects.get(id=same_commit_report.id).delete()

    def test_state_to_metric(self):
        """Test the state_to_metric function."""
        self.processor.state = Report.FAILED_DOWNLOAD
        self.processor.account_number = '1234'
        failed_download_before = \
            REGISTRY.get_sample_value(
                'failed_download', {'account_number': '1234'}) or 0.0
        self.processor.record_failed_state_metrics()
        failed_download_after = REGISTRY.get_sample_value(
            'failed_download', {'account_number': '1234'})
        self.assertEqual(1.0,
                         float(failed_download_after) - failed_download_before)
예제 #22
0
class ReportSliceProcessorTests(TestCase):
    """Test Cases for the Message processor."""
    def setUp(self):
        """Create test setup."""
        self.payload_url = 'http://insights-upload.com/q/file_to_validate'
        self.uuid = uuid.uuid4()
        self.uuid2 = uuid.uuid4()
        self.uuid3 = uuid.uuid4()
        self.uuid4 = uuid.uuid4()
        self.uuid5 = uuid.uuid4()
        self.uuid6 = uuid.uuid4()
        self.uuid7 = uuid.uuid4()
        self.fake_record = test_handler.KafkaMsg(msg_handler.MKT_TOPIC,
                                                 'http://internet.com')
        self.report_consumer = msg_handler.ReportConsumer()
        self.msg = self.report_consumer.unpack_consumer_record(
            self.fake_record)
        self.report_json = {
            'report_id': 1,
            'report_type': 'insights',
            'status': 'completed',
            'report_platform_id': '5f2cc1fd-ec66-4c67-be1b-171a595ce319'
        }
        self.report_record = Report(upload_srv_kafka_msg=json.dumps(self.msg),
                                    account='1234',
                                    state=Report.NEW,
                                    state_info=json.dumps([Report.NEW]),
                                    last_update_time=datetime.now(pytz.utc),
                                    retry_count=0,
                                    ready_to_archive=False,
                                    source=uuid.uuid4(),
                                    arrival_time=datetime.now(pytz.utc),
                                    processing_start_time=datetime.now(
                                        pytz.utc))
        self.report_record.save()

        self.report_slice = ReportSlice(
            report_platform_id=self.uuid,
            report_slice_id=self.uuid2,
            account='13423',
            report_json=json.dumps(self.report_json),
            state=ReportSlice.NEW,
            state_info=json.dumps([ReportSlice.NEW]),
            retry_count=0,
            last_update_time=datetime.now(pytz.utc),
            report=self.report_record,
            ready_to_archive=True,
            source=uuid.uuid4(),
            creation_time=datetime.now(pytz.utc),
            processing_start_time=datetime.now(pytz.utc))
        self.report_slice.save()
        self.report_record.save()
        self.processor = report_slice_processor.ReportSliceProcessor()
        self.processor.report = self.report_slice

    def check_variables_are_reset(self):
        """Check that report processor members have been cleared."""
        processor_attributes = [
            self.processor.report_platform_id, self.processor.report,
            self.processor.state, self.processor.account_number,
            self.processor.upload_message, self.processor.status,
            self.processor.report_json
        ]
        for attribute in processor_attributes:
            self.assertEqual(attribute, None)

    def test_assign_report_slice_new(self):
        """Test the assign report slice function with only a new report slice."""
        self.report_slice.state = ReportSlice.NEW
        self.report_slice.save()
        self.processor.report_or_slice = None
        self.processor.assign_object()
        self.assertEqual(self.processor.report_or_slice, self.report_slice)
        queued_slices = REGISTRY.get_sample_value('queued_report_slices')
        self.assertEqual(queued_slices, 1)

    def test_update_slice_state(self):
        """Test updating the slice state."""
        self.report_slice.save()
        report_json = {
            'report_id': 1,
            'report_type': 'deployments',
            'status': 'completed',
            'report_platform_id': '5f2cc1fd-ec66-4c67-be1b-171a595ce319'
        }
        self.processor.report_or_slice = self.report_slice
        self.processor.next_state = ReportSlice.VALIDATED
        options = {'report_json': report_json}
        self.processor.update_object_state(options=options)
        self.assertEqual(json.loads(self.report_slice.report_json),
                         report_json)

    def test_transition_to_validated_general_exception(self):
        """Test that when a general exception is raised, we don't pass validation."""
        self.report_slice.state = ReportSlice.RETRY_VALIDATION
        self.report_slice.save()
        self.processor.report_or_slice = self.report_slice

        def validate_side_effect():
            """Transition the state to downloaded."""
            raise Exception('Test')

        with patch(
                'processor.report_slice_processor.'
                'ReportSliceProcessor._validate_report_details',
                side_effect=validate_side_effect):
            self.processor.transition_to_validated()
            self.assertEqual(self.report_slice.state,
                             ReportSlice.RETRY_VALIDATION)
            self.assertEqual(self.report_slice.retry_count, 1)

    def test_transition_to_validated(self):
        """Test that when a general exception is raised, we don't pass validation."""
        self.report_slice.state = ReportSlice.RETRY_VALIDATION
        report_json = {'report_slice_id': '384794738'}
        self.report_slice.report_json = json.dumps(report_json)
        self.report_slice.save()
        self.processor.report_or_slice = self.report_slice
        self.processor.transition_to_validated()
        self.assertEqual(self.report_slice.state, ReportSlice.VALIDATED)
        self.assertEqual(self.report_slice.retry_count, 0)

    def test_transition_to_validated_failed(self):
        """Test report missing slice id."""
        self.report_slice.state = ReportSlice.RETRY_VALIDATION
        report_json = {
            'report_id': 1,
            'status': 'completed',
            'report_platform_id': '5f2cc1fd-ec66-4c67-be1b-171a595ce319'
        }
        self.report_slice.report_json = json.dumps(report_json)
        self.report_slice.save()
        self.processor.report_or_slice = self.report_slice
        self.processor.transition_to_validated()
        self.assertEqual(self.report_slice.state,
                         ReportSlice.FAILED_VALIDATION)
        self.assertEqual(self.report_slice.retry_count, 0)
        self.assertEqual(self.report_slice.ready_to_archive, True)

    def test_determine_retry_limit(self):
        """Test the determine retry method when the retry is at the limit."""
        self.report_slice.state = ReportSlice.VALIDATED
        self.report_slice.retry_count = 4
        self.report_slice.save()
        self.processor.report_or_slice = self.report_slice
        self.processor.determine_retry(ReportSlice.FAILED_METRICS_UPLOAD,
                                       ReportSlice.VALIDATED)
        self.assertEqual(self.report_slice.state,
                         ReportSlice.FAILED_METRICS_UPLOAD)

    def test_archive_report_and_slices_in_failed_state(self):
        """Test the archive method in a failed state."""
        self.report_record.ready_to_archive = True
        self.report_record.report_platform_id = str(self.uuid)
        self.report_record.save()
        self.report_slice.ready_to_archive = True
        self.report_slice.report_platform_id = str(self.uuid)
        self.report_slice.report_slice_id = str(self.uuid2)
        self.report_slice.state = ReportSlice.FAILED_METRICS_UPLOAD
        self.report_slice.save()
        self.processor.report_or_slice = self.report_slice
        self.processor.report_platform_id = str(self.uuid)

        self.processor.archive_report_and_slices()
        # assert the report doesn't exist
        with self.assertRaises(Report.DoesNotExist):
            Report.objects.get(id=self.report_record.id)
        # assert the report archive does exist
        archived = ReportArchive.objects.get(
            account=self.report_record.account)
        archived_slice = ReportSliceArchive.objects.get(
            report_slice_id=self.report_slice.report_slice_id)
        self.assertEqual(str(archived.report_platform_id), str(self.uuid))
        self.assertEqual(str(archived_slice.report_platform_id),
                         str(self.uuid))
        self.assertIsNotNone(archived_slice.processing_end_time)
        # assert the processor was reset
        self.check_variables_are_reset()

    def test_archive_report_and_slices_in_success_state(self):
        """Test the archive method in a failed state."""
        self.report_record.ready_to_archive = True
        self.report_record.report_platform_id = str(self.uuid)
        self.report_record.save()
        self.report_slice.ready_to_archive = True
        self.report_slice.report_platform_id = str(self.uuid)
        self.report_slice.report_slice_id = str(self.uuid2)
        self.report_slice.state = ReportSlice.METRICS_UPLOADED
        self.report_slice.save()
        self.processor.report_or_slice = self.report_slice
        self.processor.report_platform_id = str(self.uuid)

        self.processor.archive_report_and_slices()
        # assert the report doesn't exist
        with self.assertRaises(Report.DoesNotExist):
            Report.objects.get(id=self.report_record.id)
        # assert the report archive does exist
        archived = ReportArchive.objects.get(
            account=self.report_record.account)
        archived_slice = ReportSliceArchive.objects.get(
            report_slice_id=self.report_slice.report_slice_id)
        self.assertEqual(str(archived.report_platform_id), str(self.uuid))
        self.assertEqual(str(archived_slice.report_platform_id),
                         str(self.uuid))
        # assert the processor was reset
        self.check_variables_are_reset()

    def test_archive_report_and_slices_not_ready(self):
        """Test the archive method with slice not ready."""
        self.report_record.ready_to_archive = True
        self.report_record.report_platform_id = str(self.uuid)
        self.report_record.save()
        self.report_slice.ready_to_archive = False
        self.report_slice.report_platform_id = str(self.uuid)
        self.report_slice.report_slice_id = str(self.uuid2)
        self.report_slice.save()
        self.processor.report_or_slice = self.report_slice
        self.processor.report_platform_id = str(self.uuid)

        self.processor.archive_report_and_slices()
        # assert the report doesn't exist
        existing = Report.objects.get(id=self.report_record.id)
        # assert the report archive does exist
        with self.assertRaises(ReportArchive.DoesNotExist):
            ReportArchive.objects.get(account=self.report_record.account)
        with self.assertRaises(ReportSliceArchive.DoesNotExist):
            ReportSliceArchive.objects.get(
                report_slice_id=self.report_slice.report_slice_id)
        self.assertEqual(str(existing.report_platform_id), str(self.uuid))
        # assert the processor was reset
        self.check_variables_are_reset()

    def test_get_minio_client_not_configured(self):
        """Test getting minio client when not configured."""
        report_slice_processor.MINIO_ENDPOINT = None
        report_slice_processor.MINIO_ACCESS_KEY = None
        report_slice_processor.MINIO_SECRET_KEY = None
        processor = report_slice_processor.ReportSliceProcessor()
        minio_client = processor.get_minio_client()
        self.assertIsNone(minio_client)

    def test_get_minio_client_configured(self):
        """Test get minio client when configured."""
        report_slice_processor.MINIO_ENDPOINT = 'minio:9001'
        report_slice_processor.MINIO_ACCESS_KEY = 'access'
        report_slice_processor.MINIO_SECRET_KEY = 'secret'
        processor = report_slice_processor.ReportSliceProcessor()
        minio_client = processor.get_minio_client()
        self.assertIsNotNone(minio_client)

        # call again for branch path where already created
        self.assertIsNotNone(processor.get_minio_client())

    def test_upload_to_object_storage_none_client(self):
        """Test error raised when client is not configured."""
        event_loop = asyncio.new_event_loop()
        asyncio.set_event_loop(event_loop)
        coro = asyncio.coroutine(
            self.async_test_upload_to_objectstore_none_client)
        event_loop.run_until_complete(coro())
        event_loop.close()

    async def async_test_upload_to_objectstore_none_client(self):
        """Async setup for none client test."""
        report_slice_processor.MINIO_ENDPOINT = None
        report_slice_processor.MINIO_ACCESS_KEY = None
        report_slice_processor.MINIO_SECRET_KEY = None
        with self.assertRaises(
                report_slice_processor.RetryUploadTimeException):
            await self.processor._upload_to_object_storage()

    def test_upload_to_object_storage_no_bucket(self):
        """Test error raised when bucket does not exist."""
        event_loop = asyncio.new_event_loop()
        asyncio.set_event_loop(event_loop)
        coro = asyncio.coroutine(
            self.async_test_upload_to_objectstore_no_bucket)
        event_loop.run_until_complete(coro())
        event_loop.close()

    async def async_test_upload_to_objectstore_no_bucket(self):
        """Async setup for no bucket test."""
        mock_minio = Mock()
        mock_minio.bucket_exists.return_value = False
        with patch(
                'processor.report_slice_processor.'
                'ReportSliceProcessor.get_minio_client',
                return_value=mock_minio):
            with self.assertRaises(
                    report_slice_processor.RetryUploadTimeException):
                await self.processor._upload_to_object_storage()

    def test_upload_to_object_storage_upload_error(self):
        """Test error raised an upload error occurs."""
        event_loop = asyncio.new_event_loop()
        asyncio.set_event_loop(event_loop)
        coro = asyncio.coroutine(self.async_test_upload_to_objectstore_error)
        event_loop.run_until_complete(coro())
        event_loop.close()

    async def async_test_upload_to_objectstore_error(self):
        """Async setup for upload error test."""
        mock_minio = Mock()
        mock_minio.bucket_exists.return_value = True

        # test KafkaConnectionException
        def raise_error():
            """Raise a general error."""
            raise Exception('Test')

        mock_minio.fput_object.side_effect = raise_error
        with patch(
                'processor.report_slice_processor.'
                'ReportSliceProcessor.get_minio_client',
                return_value=mock_minio):
            with self.assertRaises(Exception):
                await self.processor._upload_to_object_storage()

    def test_upload_to_object_storage_upload_success(self):
        """Test upload success pass."""
        event_loop = asyncio.new_event_loop()
        asyncio.set_event_loop(event_loop)
        coro = asyncio.coroutine(self.async_test_upload_to_objectstore)
        event_loop.run_until_complete(coro())
        event_loop.close()

    async def async_test_upload_to_objectstore(self):
        """Async setup for upload error test."""
        mock_minio = Mock()
        mock_minio.bucket_exists.return_value = True
        mock_minio.fput_object.return_value = True
        report_json = {'report_slice_id': '384794738'}
        self.processor.report_or_slice = self.report_slice
        self.processor.report_or_slice.report_json = json.dumps(report_json)
        with patch(
                'processor.report_slice_processor.'
                'ReportSliceProcessor.get_minio_client',
                return_value=mock_minio):
            try:
                await self.processor._upload_to_object_storage()
            except Exception as err:  # pylint: disable=broad-except
                self.fail(f'Unexpected exception {err}')
예제 #23
0
class ReportSliceProcessorTests(TestCase):
    """Test Cases for the Message processor."""
    def setUp(self):
        """Create test setup."""
        self.payload_url = 'http://insights-upload.com/q/file_to_validate'
        self.uuid = uuid.uuid4()
        self.uuid2 = uuid.uuid4()
        self.uuid3 = uuid.uuid4()
        self.uuid4 = uuid.uuid4()
        self.uuid5 = uuid.uuid4()
        self.uuid6 = uuid.uuid4()
        self.uuid7 = uuid.uuid4()
        self.fake_record = test_handler.KafkaMsg(msg_handler.QPC_TOPIC,
                                                 'http://internet.com')
        self.report_consumer = msg_handler.ReportConsumer()
        self.msg = self.report_consumer.unpack_consumer_record(
            self.fake_record)
        self.report_json = {
            'report_id': 1,
            'report_type': 'insights',
            'report_version': '1.0.0.1b025b8',
            'status': 'completed',
            'report_platform_id': '5f2cc1fd-ec66-4c67-be1b-171a595ce319',
            'hosts': [{
                'bios_uuid': 'value'
            }, {
                'invalid': 'value'
            }]
        }
        self.report_record = Report(upload_srv_kafka_msg=json.dumps(self.msg),
                                    account='1234',
                                    state=Report.NEW,
                                    state_info=json.dumps([Report.NEW]),
                                    last_update_time=datetime.now(pytz.utc),
                                    retry_count=0,
                                    ready_to_archive=False,
                                    source='satellite',
                                    arrival_time=datetime.now(pytz.utc),
                                    processing_start_time=datetime.now(
                                        pytz.utc))
        self.report_record.save()

        self.report_slice = ReportSlice(
            report_platform_id=self.uuid,
            report_slice_id=self.uuid2,
            account='13423',
            report_json=json.dumps(self.report_json),
            state=ReportSlice.NEW,
            state_info=json.dumps([ReportSlice.NEW]),
            retry_count=0,
            last_update_time=datetime.now(pytz.utc),
            failed_hosts=[],
            candidate_hosts=[],
            report=self.report_record,
            ready_to_archive=True,
            hosts_count=2,
            source='satellite',
            creation_time=datetime.now(pytz.utc),
            processing_start_time=datetime.now(pytz.utc))
        self.report_slice.save()
        self.report_record.save()
        self.processor = report_slice_processor.ReportSliceProcessor()
        self.processor.report = self.report_slice

    def check_variables_are_reset(self):
        """Check that report processor members have been cleared."""
        processor_attributes = [
            self.processor.report_platform_id, self.processor.report,
            self.processor.state, self.processor.account_number,
            self.processor.upload_message, self.processor.status,
            self.processor.report_json, self.processor.candidate_hosts,
            self.processor.failed_hosts
        ]
        for attribute in processor_attributes:
            self.assertEqual(attribute, None)

    def test_assign_report_slice_new(self):
        """Test the assign report slice function with only a new report slice."""
        self.report_slice.state = ReportSlice.NEW
        self.report_slice.save()
        self.processor.report_or_slice = None
        self.processor.assign_object()
        self.assertEqual(self.processor.report_or_slice, self.report_slice)
        queued_slices = REGISTRY.get_sample_value('queued_report_slices')
        self.assertEqual(queued_slices, 1)

    async def async_test_delegate_state(self):
        """Set up the test for delegate state."""
        self.report_slice.state = ReportSlice.VALIDATED
        self.report_slice.report_platform_id = self.uuid
        self.report_slice.candidate_hosts = json.dumps([{
            str(self.uuid3): {
                'ip_addresses': 'value',
                'name': 'value'
            },
            'cause':
            report_slice_processor.FAILED_UPLOAD
        }])
        self.report_slice.failed_hosts = json.dumps([{
            str(self.uuid2): {
                'ip_addresses': 'value',
                'name': 'value'
            },
            'cause':
            abstract_processor.FAILED_VALIDATION
        }])
        self.report_slice.save()
        self.processor.report_or_slice = self.report_slice

        def upload_side_effect():
            """Transition the state to uploaded."""
            self.processor.state = ReportSlice.HOSTS_UPLOADED
            self.report_slice.state = ReportSlice.HOSTS_UPLOADED
            self.report_slice.save()

        with patch(
                'processor.report_slice_processor.'
                'ReportSliceProcessor.transition_to_hosts_uploaded',
                side_effect=upload_side_effect):
            await self.processor.delegate_state()
            self.check_variables_are_reset()

        # test pending state for delegate
        self.report_slice.state = ReportSlice.PENDING
        self.processor.report_or_slice = self.report_slice
        await self.processor.delegate_state()
        self.check_variables_are_reset()

    def test_run_delegate(self):
        """Test the async function delegate state."""
        event_loop = asyncio.new_event_loop()
        asyncio.set_event_loop(event_loop)
        coro = asyncio.coroutine(self.async_test_delegate_state)
        event_loop.run_until_complete(coro())
        event_loop.close()

    def test_update_slice_state(self):
        """Test updating the slice state."""
        self.report_slice.failed_hosts = json.dumps([])
        self.report_slice.save()
        report_json = {
            'report_id': 1,
            'report_type': 'deployments',
            'report_version': '1.0.0.1b025b8',
            'status': 'completed',
            'report_platform_id': '5f2cc1fd-ec66-4c67-be1b-171a595ce319',
            'hosts': {
                str(self.uuid): {
                    'key': 'value'
                }
            }
        }
        failed_hosts = [{
            str(self.uuid6): {
                'etc_machine_id': 'value'
            }
        }, {
            str(self.uuid7): {
                'subscription_manager_id': 'value'
            }
        }]
        self.processor.report_or_slice = self.report_slice
        self.processor.next_state = ReportSlice.VALIDATED
        options = {'report_json': report_json, 'failed_hosts': failed_hosts}
        self.processor.update_object_state(options=options)
        self.assertEqual(json.loads(self.report_slice.report_json),
                         report_json)
        self.assertEqual(json.loads(self.report_slice.failed_hosts),
                         failed_hosts)

    def test_transition_to_validated_general_exception(self):
        """Test that when a general exception is raised, we don't pass validation."""
        self.report_slice.state = ReportSlice.RETRY_VALIDATION
        self.report_slice.save()
        self.processor.report_or_slice = self.report_slice

        def validate_side_effect():
            """Transition the state to downloaded."""
            raise Exception('Test')

        with patch(
                'processor.report_slice_processor.'
                'ReportSliceProcessor._validate_report_details',
                side_effect=validate_side_effect):
            self.processor.transition_to_validated()
            self.assertEqual(self.report_slice.state,
                             ReportSlice.RETRY_VALIDATION)
            self.assertEqual(self.report_slice.retry_count, 1)

    def test_transition_to_validated(self):
        """Test that when a general exception is raised, we don't pass validation."""
        self.report_slice.state = ReportSlice.RETRY_VALIDATION
        report_json = {
            'report_slice_id': '384794738',
            'hosts': [{
                'ip_addresses': 'value'
            }]
        }
        self.report_slice.report_json = json.dumps(report_json)
        self.report_slice.save()
        self.processor.report_or_slice = self.report_slice
        self.processor.transition_to_validated()
        self.assertEqual(self.report_slice.state, ReportSlice.VALIDATED)
        self.assertEqual(self.report_slice.retry_count, 0)

    def test_transition_to_validated_failed(self):
        """Test report missing slice id."""
        self.report_slice.state = ReportSlice.RETRY_VALIDATION
        report_json = {
            'report_id': 1,
            'report_type': 'insights',
            'report_version': '1.0.0.1b025b8',
            'status': 'completed',
            'report_platform_id': '5f2cc1fd-ec66-4c67-be1b-171a595ce319',
            'hosts': {
                str(self.uuid): {
                    'ip_addresses': 'value'
                }
            }
        }
        self.report_slice.report_json = json.dumps(report_json)
        self.report_slice.save()
        self.processor.report_or_slice = self.report_slice
        self.processor.transition_to_validated()
        self.assertEqual(self.report_slice.state,
                         ReportSlice.FAILED_VALIDATION)
        self.assertEqual(self.report_slice.retry_count, 0)
        self.assertEqual(self.report_slice.ready_to_archive, True)

    def test_moved_candidates_to_failed(self):
        """Test that we reset candidates after moving them to failed."""
        candidates = [{self.uuid: {'bios_uuid': 'value', 'name': 'value'}}]
        self.processor.candidate_hosts = candidates
        self.processor.failed_hosts = [{
            self.uuid2: {
                'bios_uuid': 'value',
                'name': 'value'
            },
            'cause':
            abstract_processor.FAILED_VALIDATION
        }]
        self.processor.move_candidates_to_failed()
        self.assertEqual(self.processor.candidate_hosts, [])
        for host in candidates:
            self.assertIn(host, self.processor.failed_hosts)

    def test_determine_retry_limit(self):
        """Test the determine retry method when the retry is at the limit."""
        candidates = [{
            str(self.uuid3): {
                'ip_addresses': 'value',
                'name': 'value'
            },
            'cause': report_slice_processor.FAILED_UPLOAD
        }]
        self.report_slice.state = ReportSlice.VALIDATED
        self.report_slice.retry_count = 4
        self.report_slice.candidate_hosts = json.dumps(candidates)
        self.report_slice.failed_hosts = json.dumps([])
        self.report_slice.save()
        self.processor.report_or_slice = self.report_slice
        self.processor.candidate_hosts = candidates
        self.processor.failed_hosts = []
        self.processor.determine_retry(ReportSlice.FAILED_HOSTS_UPLOAD,
                                       ReportSlice.VALIDATED)
        self.assertEqual(self.report_slice.state,
                         ReportSlice.FAILED_HOSTS_UPLOAD)
        self.assertEqual(json.loads(self.report_slice.candidate_hosts), [])
        for host in candidates:
            self.assertIn(host, json.loads(self.report_slice.failed_hosts))

    async def async_test_transition_to_hosts_uploaded(self):
        """Test the transition to hosts being uploaded."""
        hosts = [{
            str(self.uuid): {
                'bios_uuid': 'value',
                'name': 'value',
                'system_platform_id': str(self.uuid)
            }
        }, {
            str(self.uuid2): {
                'insights_client_id': 'value',
                'name': 'foo',
                'system_platform_id': str(self.uuid2)
            }
        }, {
            str(self.uuid3): {
                'ip_addresses': 'value',
                'name': 'foo',
                'system_platform_id': str(self.uuid3)
            }
        }, {
            str(self.uuid4): {
                'mac_addresses': 'value',
                'name': 'foo',
                'system_platform_id': str(self.uuid4)
            }
        }, {
            str(self.uuid5): {
                'vm_uuid': 'value',
                'name': 'foo',
                'system_platform_id': str(self.uuid5)
            }
        }, {
            str(self.uuid6): {
                'etc_machine_id': 'value',
                'system_platform_id': str(self.uuid6)
            }
        }, {
            str(self.uuid7): {
                'subscription_manager_id': 'value',
                'system_platform_id': str(self.uuid7)
            }
        }]
        self.report_slice.failed_hosts = []
        self.report_slice.candidate_hosts = json.dumps(hosts)
        self.report_slice.save()
        self.processor.report_or_slice = self.report_slice
        self.processor.candidate_hosts = hosts
        self.processor._upload_to_host_inventory_via_kafka = CoroutineMock(
            return_value=[])
        await self.processor.transition_to_hosts_uploaded()
        self.assertEqual(json.loads(self.report_slice.candidate_hosts), [])
        self.assertEqual(self.report_slice.state, ReportSlice.HOSTS_UPLOADED)

    def test_transition_to_hosts_uploaded(self):
        """Test the async hosts uploaded successful."""
        event_loop = asyncio.new_event_loop()
        asyncio.set_event_loop(event_loop)
        coro = asyncio.coroutine(self.async_test_transition_to_hosts_uploaded)
        event_loop.run_until_complete(coro())
        event_loop.close()

    async def async_test_transition_to_hosts_uploaded_kafka_mode(self):
        """Test the transition to hosts being uploaded."""
        hosts = [{
            str(self.uuid): {
                'bios_uuid': 'value',
                'name': 'value',
                'system_platform_id': str(self.uuid)
            }
        }, {
            str(self.uuid2): {
                'insights_client_id': 'value',
                'name': 'foo',
                'system_platform_id': str(self.uuid2)
            }
        }, {
            str(self.uuid3): {
                'ip_addresses': 'value',
                'name': 'foo',
                'system_platform_id': str(self.uuid3)
            }
        }, {
            str(self.uuid4): {
                'mac_addresses': 'value',
                'name': 'foo',
                'system_platform_id': str(self.uuid4)
            }
        }, {
            str(self.uuid5): {
                'vm_uuid': 'value',
                'name': 'foo',
                'system_platform_id': str(self.uuid5)
            }
        }, {
            str(self.uuid6): {
                'etc_machine_id': 'value',
                'system_platform_id': str(self.uuid6)
            }
        }, {
            str(self.uuid7): {
                'subscription_manager_id': 'value',
                'system_platform_id': str(self.uuid7)
            }
        }]
        self.report_slice.failed_hosts = []
        self.report_slice.candidate_hosts = json.dumps(hosts)
        self.report_slice.save()
        self.processor.report_or_slice = self.report_slice
        self.processor.candidate_hosts = hosts
        self.processor._upload_to_host_inventory_via_kafka = CoroutineMock(
            return_value=[])
        await self.processor.transition_to_hosts_uploaded()
        self.assertEqual(json.loads(self.report_slice.candidate_hosts), [])
        self.assertEqual(self.report_slice.state, ReportSlice.HOSTS_UPLOADED)

    def test_transition_to_hosts_uploaded_kafka_mode(self):
        """Test the async hosts uploaded successful."""
        event_loop = asyncio.new_event_loop()
        asyncio.set_event_loop(event_loop)
        coro = asyncio.coroutine(self.async_test_transition_to_hosts_uploaded)
        event_loop.run_until_complete(coro())
        event_loop.close()

    async def async_test_transition_to_hosts_uploaded_no_candidates(self):
        """Test the transition to hosts being uploaded."""
        self.report_record.ready_to_archive = True
        self.report_record.save()
        faulty_report = ReportSlice(
            account='987',
            report_platform_id=str(self.uuid2),
            report_slice_id=str(self.uuid),
            state=ReportSlice.NEW,
            report_json=json.dumps(self.report_json),
            state_info=json.dumps([ReportSlice.PENDING, ReportSlice.NEW]),
            last_update_time=datetime.now(pytz.utc),
            candidate_hosts=json.dumps({}),
            failed_hosts=json.dumps([]),
            hosts_count=10,
            retry_count=0)
        faulty_report.save()
        self.processor.report_or_slice = faulty_report
        self.processor.account_number = '987'
        self.processor.state = faulty_report.state
        self.processor.report_platform_id = self.uuid2
        self.processor.report_json = self.report_json
        self.processor.candidate_hosts = {}
        await self.processor.transition_to_hosts_uploaded()
        # assert the processor was reset
        self.check_variables_are_reset()

    def test_test_transition_to_hosts_uploaded_no_candidates(self):
        """Test the async hosts uploaded no candidates."""
        event_loop = asyncio.new_event_loop()
        asyncio.set_event_loop(event_loop)
        coro = asyncio.coroutine(
            self.async_test_transition_to_hosts_uploaded_no_candidates)
        event_loop.run_until_complete(coro())
        event_loop.close()

    async def async_test_transition_to_hosts_uploaded_exception(self):
        """Test the transition to hosts being uploaded."""
        hosts = {
            str(self.uuid): {
                'bios_uuid': 'value',
                'name': 'value'
            },
            str(self.uuid2): {
                'insights_client_id': 'value',
                'name': 'foo'
            },
            str(self.uuid3): {
                'ip_addresses': 'value',
                'name': 'foo'
            },
            str(self.uuid4): {
                'mac_addresses': 'value',
                'name': 'foo'
            },
            str(self.uuid5): {
                'vm_uuid': 'value',
                'name': 'foo'
            },
            str(self.uuid6): {
                'etc_machine_id': 'value'
            },
            str(self.uuid7): {
                'subscription_manager_id': 'value'
            }
        }
        self.processor.candidate_hosts = hosts
        self.processor.report_or_slice = self.report_slice

        def hosts_upload_side_effect():
            raise Exception('Test')

        with patch(
                'processor.report_slice_processor.'
                'ReportSliceProcessor._upload_to_host_inventory_via_kafka',
                side_effect=hosts_upload_side_effect):
            await self.processor.transition_to_hosts_uploaded()
            self.assertEqual(self.report_slice.state, Report.VALIDATED)
            self.assertEqual(self.report_slice.retry_count, 1)

    def test_test_transition_to_hosts_uploaded_exception(self):
        """Test the async hosts uploaded exception."""
        event_loop = asyncio.new_event_loop()
        asyncio.set_event_loop(event_loop)
        coro = asyncio.coroutine(
            self.async_test_transition_to_hosts_uploaded_exception)
        event_loop.run_until_complete(coro())
        event_loop.close()

    async def async_test_upload_to_host_inventory_via_kafka(self):
        """Test uploading to inventory via kafka."""
        self.processor.report_or_slice = self.report_slice
        hosts = {
            str(self.uuid): {
                'bios_uuid': 'value',
                'name': 'value'
            },
            str(self.uuid2): {
                'insights_client_id': 'value',
                'name': 'foo'
            },
            str(self.uuid3): {
                'ip_addresses': 'value',
                'name': 'foo'
            },
            str(self.uuid4): {
                'mac_addresses': 'value',
                'name': 'foo'
            },
            str(self.uuid5): {
                'vm_uuid': 'value',
                'name': 'foo'
            },
            str(self.uuid6): {
                'etc_machine_id': 'value'
            },
            str(self.uuid7): {
                'subscription_manager_id': 'value'
            }
        }
        test_producer = AIOKafkaProducer(
            loop=report_slice_processor.SLICE_PROCESSING_LOOP,
            bootstrap_servers=report_slice_processor.INSIGHTS_KAFKA_ADDRESS)
        test_producer.start = CoroutineMock()
        test_producer.send = CoroutineMock()
        test_producer.stop = CoroutineMock()
        with patch('processor.report_slice_processor.AIOKafkaProducer',
                   return_value=test_producer):
            with patch('processor.report_slice_processor.asyncio.wait',
                       side_effect=None):
                # all though we are not asserting any results, the test here is
                # that no error was raised
                await self.processor._upload_to_host_inventory_via_kafka(hosts)

    def test_upload_to_host_inventory_via_kafka(self):
        """Test the async hosts uploaded exception."""
        event_loop = asyncio.new_event_loop()
        asyncio.set_event_loop(event_loop)
        coro = asyncio.coroutine(
            self.async_test_upload_to_host_inventory_via_kafka)
        event_loop.run_until_complete(coro())
        event_loop.close()

    async def async_test_upload_to_host_inventory_via_kafka_exception(self):
        """Test uploading to inventory via kafka."""
        self.processor.report_or_slice = self.report_slice
        hosts = {
            str(self.uuid): {
                'bios_uuid': 'value',
                'name': 'value'
            },
            str(self.uuid2): {
                'insights_client_id': 'value',
                'name': 'foo'
            },
            str(self.uuid3): {
                'ip_addresses': 'value',
                'name': 'foo'
            },
            str(self.uuid4): {
                'mac_addresses': 'value',
                'name': 'foo'
            },
            str(self.uuid5): {
                'vm_uuid': 'value',
                'name': 'foo'
            },
            str(self.uuid6): {
                'etc_machine_id': 'value'
            },
            str(self.uuid7): {
                'subscription_manager_id': 'value'
            }
        }
        test_producer = AIOKafkaProducer(
            loop=report_slice_processor.SLICE_PROCESSING_LOOP,
            bootstrap_servers=report_slice_processor.INSIGHTS_KAFKA_ADDRESS)

        # test KafkaConnectionException
        def raise_kafka_error():
            """Raise a kafka error."""
            raise KafkaConnectionError('Test')

        test_producer.start = CoroutineMock(side_effect=raise_kafka_error)
        test_producer.send = CoroutineMock()
        test_producer.stop = CoroutineMock()
        with self.assertRaises(msg_handler.KafkaMsgHandlerError):
            with patch('processor.report_slice_processor.AIOKafkaProducer',
                       return_value=test_producer):
                await self.processor._upload_to_host_inventory_via_kafka(hosts)

    def test_upload_to_host_inventory_via_kafka_exception(self):
        """Test the async hosts uploaded exception."""
        event_loop = asyncio.new_event_loop()
        asyncio.set_event_loop(event_loop)
        coro = asyncio.coroutine(
            self.async_test_upload_to_host_inventory_via_kafka_exception)
        event_loop.run_until_complete(coro())
        event_loop.close()

    async def async_test_upload_to_host_inventory_via_kafka_send_exception(
            self):
        """Test uploading to inventory via kafka."""
        self.processor.report_or_slice = self.report_slice
        hosts = {
            str(self.uuid): {
                'bios_uuid': 'value',
                'name': 'value'
            },
            str(self.uuid2): {
                'insights_client_id': 'value',
                'name': 'foo'
            },
            str(self.uuid3): {
                'ip_addresses': 'value',
                'name': 'foo'
            },
            str(self.uuid4): {
                'mac_addresses': 'value',
                'name': 'foo'
            },
            str(self.uuid5): {
                'vm_uuid': 'value',
                'name': 'foo'
            },
            str(self.uuid6): {
                'etc_machine_id': 'value'
            },
            str(self.uuid7): {
                'subscription_manager_id': 'value'
            }
        }
        test_producer = AIOKafkaProducer(
            loop=report_slice_processor.SLICE_PROCESSING_LOOP,
            bootstrap_servers=report_slice_processor.INSIGHTS_KAFKA_ADDRESS)

        # test KafkaConnectionException
        def raise_error():
            """Raise a general error."""
            raise Exception('Test')

        test_producer.start = CoroutineMock()
        test_producer.send = CoroutineMock(side_effect=raise_error)
        test_producer.stop = CoroutineMock()
        with self.assertRaises(msg_handler.KafkaMsgHandlerError):
            with patch('processor.report_slice_processor.AIOKafkaProducer',
                       return_value=test_producer):
                await self.processor._upload_to_host_inventory_via_kafka(hosts)

    def test_upload_to_host_inventory_via_kafka_send_exception(self):
        """Test the async hosts uploaded exception."""
        event_loop = asyncio.new_event_loop()
        asyncio.set_event_loop(event_loop)
        coro = asyncio.coroutine(
            self.async_test_upload_to_host_inventory_via_kafka_send_exception)
        event_loop.run_until_complete(coro())
        event_loop.close()

    def test_archive_report_and_slices_in_failed_state(self):
        """Test the archive method in a failed state."""
        self.report_record.ready_to_archive = True
        self.report_record.report_platform_id = str(self.uuid)
        self.report_record.save()
        self.report_slice.ready_to_archive = True
        self.report_slice.report_platform_id = str(self.uuid)
        self.report_slice.report_slice_id = str(self.uuid2)
        self.report_slice.state = ReportSlice.FAILED_HOSTS_UPLOAD
        self.report_slice.save()
        self.processor.report_or_slice = self.report_slice
        self.processor.report_platform_id = str(self.uuid)

        self.processor.archive_report_and_slices()
        # assert the report doesn't exist
        with self.assertRaises(Report.DoesNotExist):
            Report.objects.get(id=self.report_record.id)
        # assert the report archive does exist
        archived = ReportArchive.objects.get(
            account=self.report_record.account)
        archived_slice = ReportSliceArchive.objects.get(
            report_slice_id=self.report_slice.report_slice_id)
        self.assertEqual(str(archived.report_platform_id), str(self.uuid))
        self.assertEqual(str(archived_slice.report_platform_id),
                         str(self.uuid))
        self.assertIsNotNone(archived_slice.processing_end_time)
        # assert the processor was reset
        self.check_variables_are_reset()

    def test_archive_report_and_slices_in_success_state(self):
        """Test the archive method in a failed state."""
        self.report_record.ready_to_archive = True
        self.report_record.report_platform_id = str(self.uuid)
        self.report_record.save()
        self.report_slice.ready_to_archive = True
        self.report_slice.report_platform_id = str(self.uuid)
        self.report_slice.report_slice_id = str(self.uuid2)
        self.report_slice.state = ReportSlice.HOSTS_UPLOADED
        self.report_slice.save()
        self.processor.report_or_slice = self.report_slice
        self.processor.report_platform_id = str(self.uuid)

        self.processor.archive_report_and_slices()
        # assert the report doesn't exist
        with self.assertRaises(Report.DoesNotExist):
            Report.objects.get(id=self.report_record.id)
        # assert the report archive does exist
        archived = ReportArchive.objects.get(
            account=self.report_record.account)
        archived_slice = ReportSliceArchive.objects.get(
            report_slice_id=self.report_slice.report_slice_id)
        self.assertEqual(str(archived.report_platform_id), str(self.uuid))
        self.assertEqual(str(archived_slice.report_platform_id),
                         str(self.uuid))
        # assert the processor was reset
        self.check_variables_are_reset()

    def test_archive_report_and_slices_not_ready(self):
        """Test the archive method with slice not ready."""
        self.report_record.ready_to_archive = True
        self.report_record.report_platform_id = str(self.uuid)
        self.report_record.save()
        self.report_slice.ready_to_archive = False
        self.report_slice.report_platform_id = str(self.uuid)
        self.report_slice.report_slice_id = str(self.uuid2)
        self.report_slice.save()
        self.processor.report_or_slice = self.report_slice
        self.processor.report_platform_id = str(self.uuid)

        self.processor.archive_report_and_slices()
        # assert the report doesn't exist
        existing = Report.objects.get(id=self.report_record.id)
        # assert the report archive does exist
        with self.assertRaises(ReportArchive.DoesNotExist):
            ReportArchive.objects.get(account=self.report_record.account)
        with self.assertRaises(ReportSliceArchive.DoesNotExist):
            ReportSliceArchive.objects.get(
                report_slice_id=self.report_slice.report_slice_id)
        self.assertEqual(str(existing.report_platform_id), str(self.uuid))
        # assert the processor was reset
        self.check_variables_are_reset()

    def test_get_stale_time(self):
        """Test the get stale date method."""
        self.processor.report_or_slice = self.report_record
        self.processor.report_or_slice.source = 'satellite'
        self.processor.report_or_slice.save()
        current_time = datetime.utcnow()
        stale_time = current_time + timedelta(days=int(SATELLITE_HOST_TTL))
        expected = stale_time.strftime('%Y-%m-%dT%H:%M:%S.%f')[:-3] + 'Z'
        actual = self.processor.get_stale_date()
        # the format looks like this: 2019-11-14T19:58:13.037Z
        # by cutting off the last 13 i am comparing 2019-11-14T
        # which is the year/month/day
        self.assertEqual(expected[:-13], actual[:-13])