def test_couch_synclogs(self):
        synclog = SyncLog(domain='test',
                          user_id='user1',
                          date=datetime.datetime(2015, 7, 1, 0, 0))
        SyncLog.get_db().save_doc(synclog)
        self.assertEqual(self._sql_count(), 0)
        self.assertEqual(self._couch_count(), 1)

        delete_synclog(synclog._id)
        self.assertEqual(self._sql_count(), 0)
        self.assertEqual(self._couch_count(), 0)

        with self.assertRaises(MissingSyncLog):
            delete_synclog(synclog._id)
def get_all_sync_logs_docs():
    assert settings.UNIT_TESTING
    all_sync_log_ids = [row['id'] for row in SyncLog.view(
        "phone/sync_logs_by_user",
        reduce=False,
    )]
    return iter_docs(SyncLog.get_db(), all_sync_log_ids)
Beispiel #3
0
    def rows(self):
        base_link_url = '{}?q={{id}}'.format(reverse('global_quick_find'))

        user_id = self.request.GET.get('individual')
        if not user_id:
            return []

        # security check
        get_document_or_404(CommCareUser, self.domain, user_id)

        sync_log_ids = [row['id'] for row in SyncLog.view(
            "phone/sync_logs_by_user",
            startkey=[user_id, {}],
            endkey=[user_id],
            descending=True,
            reduce=False,
            limit=10
        )]

        def _sync_log_to_row(sync_log):
            def _fmt_duration(duration):
                if isinstance(duration, int):
                    return format_datatables_data(
                        '<span class="{cls}">{text}</span>'.format(
                            cls=_bootstrap_class(duration or 0, 60, 20),
                            text=_('{} seconds').format(duration),
                        ),
                        duration
                    )
                else:
                    return format_datatables_data(
                        '<span class="label">{text}</span>'.format(
                            text=_("Unknown"),
                        ),
                        -1,
                    )

            def _fmt_id(sync_log_id):
                href = base_link_url.format(id=sync_log_id)
                return '<a href="{href}" target="_blank">{id:.5}...</a>'.format(
                    href=href,
                    id=sync_log_id
                )

            num_cases = len(sync_log.cases_on_phone)
            columns = [
                _fmt_date(sync_log.date),
                format_datatables_data(num_cases, num_cases),
                _fmt_duration(sync_log.duration),
            ]
            if self.show_extra_columns:
                columns.append(_fmt_id(sync_log.get_id))

            return columns

        return [
            _sync_log_to_row(SyncLog.wrap(sync_log_json))
            for sync_log_json in iter_docs(SyncLog.get_db(), sync_log_ids)
        ]
Beispiel #4
0
def get_all_sync_logs_docs():
    all_sync_log_ids = [
        row['id'] for row in SyncLog.view(
            "phone/sync_logs_by_user",
            reduce=False,
        )
    ]
    return iter_docs(SyncLog.get_db(), all_sync_log_ids)
Beispiel #5
0
def delete_sync_logs(before_date, limit=1000):
    from casexml.apps.phone.dbaccessors.sync_logs_by_user import get_synclog_ids_before_date
    from casexml.apps.phone.models import SyncLog
    from dimagi.utils.couch.database import iter_bulk_delete_with_doc_type_verification
    sync_log_ids = get_synclog_ids_before_date(before_date, limit)
    return iter_bulk_delete_with_doc_type_verification(SyncLog.get_db(),
                                                       sync_log_ids,
                                                       'SyncLog',
                                                       chunksize=5)
Beispiel #6
0
def delete_sync_logs(before_date, limit=1000, num_tries=10):
    from casexml.apps.phone.dbaccessors.sync_logs_by_user import get_synclog_ids_before_date
    from casexml.apps.phone.models import SyncLog
    from dimagi.utils.couch.database import iter_bulk_delete_with_doc_type_verification

    for i in range(num_tries):
        try:
            sync_log_ids = get_synclog_ids_before_date(before_date, limit)
            return iter_bulk_delete_with_doc_type_verification(
                SyncLog.get_db(), sync_log_ids, 'SyncLog', chunksize=25)
        except BulkSaveError:
            pass

    raise CouldNotPruneSyncLogs()
Beispiel #7
0
def get_user_sync_history_pillow(pillow_id='UpdateUserSyncHistoryPillow',
                                 **kwargs):
    """
    This gets a pillow which iterates through all synclogs
    """
    couch_db = SyncLog.get_db()
    change_feed = CouchChangeFeed(couch_db, include_docs=True)
    checkpoint = PillowCheckpoint('synclog', change_feed.sequence_format)
    form_processor = UserSyncHistoryProcessor()
    return ConstructedPillow(
        name=pillow_id,
        checkpoint=checkpoint,
        change_feed=change_feed,
        processor=form_processor,
        change_processed_event_handler=PillowCheckpointEventHandler(
            checkpoint=checkpoint, checkpoint_frequency=100),
    )
    def rows(self):
        user_id = self.request.GET.get('individual')
        if not user_id:
            return []

        # security check
        get_document_or_404(CommCareUser, self.domain, user_id)

        sync_log_ids = [row['id'] for row in SyncLog.view(
            "phone/sync_logs_by_user",
            startkey=[user_id, {}],
            endkey=[user_id],
            descending=True,
            reduce=False,
        )]

        def _sync_log_to_row(sync_log):
            def _fmt_duration(duration):
                if isinstance(duration, int):
                    return format_datatables_data(
                        '<span class="{cls}">{text}</span>'.format(
                            cls=_bootstrap_class(duration or 0, 20, 60),
                            text=_('{} seconds').format(duration),
                        ),
                        duration
                    )
                else:
                    return format_datatables_data(
                        '<span class="label">{text}</span>'.format(
                            text=_("Unknown"),
                        ),
                        -1,
                    )

            num_cases = len(sync_log.cases_on_phone)
            return [
                _fmt_date(sync_log.date),
                format_datatables_data(num_cases, num_cases),
                _fmt_duration(sync_log.duration),
            ]

        return [
            _sync_log_to_row(SyncLog.wrap(sync_log_json))
            for sync_log_json in iter_docs(SyncLog.get_db(), sync_log_ids)
        ]
Beispiel #9
0
    def handle(self, filename, *args, **kwargs):
        database = SyncLog.get_db()
        all_sync_log_ids = [
            row['id'] for row in database.view(
                'phone/sync_logs_by_user', reduce=False, include_docs=False)
        ]
        total_count = len(all_sync_log_ids)

        headers = [
            'date',
            'user',
            'cases',
            'dependent cases',
            'total cases',
            'initial',
            'duration',
            'duration per case (ms/case)',
        ]

        with open(filename, 'wb') as f:
            writer = csv.writer(f, dialect=csv.excel)
            writer.writerow(headers)
            for i, sync_log_dict in enumerate(
                    iter_docs(database, all_sync_log_ids, 500)):
                duration = sync_log_dict.get('duration')
                cases = len(sync_log_dict.get('cases_on_phone', []))
                dependent_cases = len(
                    sync_log_dict.get('dependent_cases_on_phone', []))
                total_cases = cases + dependent_cases
                if duration and total_cases:
                    average_time = float(duration) * 1000 / float(total_cases)
                    writer.writerow([
                        (sync_log_dict.get('date')
                         or '1980-01-01')[:10],  # strip times off of the dates
                        sync_log_dict.get('user_id'),
                        cases,
                        dependent_cases,
                        cases + dependent_cases,
                        bool(sync_log_dict.get('previous_log_id')),
                        duration,
                        '{0:.2f}'.format(average_time)
                    ])
                if i % 500 == 0:
                    print 'processed {}/{} logs'.format(i, total_count)
Beispiel #10
0
    def handle(self, filename, *args, **kwargs):
        # Doesn't work since this queries from Couch
        # Todo: Migrate to SQL
        raise CommandError("This doesn't work since the synclogs are now migrated to SQL")
        database = SyncLog.get_db()
        all_sync_log_ids = [
            row['id'] for row in
            database.view('phone/sync_logs_by_user', reduce=False, include_docs=False)
        ]
        total_count = len(all_sync_log_ids)

        headers = [
            'date', 'user', 'cases', 'dependent cases', 'total cases', 'initial', 'duration',
            'duration per case (ms/case)',
        ]

        with open(filename, 'wb') as f:
            writer = csv.writer(f, dialect=csv.excel)
            writer.writerow(
                headers
            )
            for i, sync_log_dict in enumerate(iter_docs(database, all_sync_log_ids, 500)):
                duration = sync_log_dict.get('duration')
                cases = len(sync_log_dict.get('cases_on_phone', []))
                dependent_cases = len(sync_log_dict.get('dependent_cases_on_phone', []))
                total_cases = cases + dependent_cases
                if duration and total_cases:
                    average_time = float(duration) * 1000 / float(total_cases)
                    writer.writerow([
                        (sync_log_dict.get('date') or '1980-01-01')[:10],  # strip times off of the dates
                        sync_log_dict.get('user_id'),
                        cases,
                        dependent_cases,
                        cases + dependent_cases,
                        bool(sync_log_dict.get('previous_log_id')),
                        duration,
                        '{0:.2f}'.format(average_time)

                    ])
                if i % 500 == 0:
                    print('processed {}/{} logs'.format(i, total_count))
Beispiel #11
0
def delete_all_sync_logs():
    # handle with care
    _delete_all(SyncLog.get_db(), 'phone/sync_logs_by_user')
Beispiel #12
0
 def delete_all_sync_logs(cls):
     logger.debug("Deleting all synclogs")
     cls._delete_all_from_view(SyncLog.get_db(), 'phone/sync_logs_by_user')
Beispiel #13
0
def delete_all_sync_logs():
    # handle with care
    _delete_all(SyncLog.get_db(), 'phone/sync_logs_by_user')
Beispiel #14
0
 def delete_all_sync_logs(cls):
     cls._delete_all(SyncLog.get_db(), 'phone/sync_logs_by_user')
Beispiel #15
0
 def delete_all_sync_logs(cls):
     logger.debug("Deleting all synclogs")
     cls._delete_all(SyncLog.get_db(), 'phone/sync_logs_by_user')
Beispiel #16
0
 def record_iter(cls, start_datetime, end_datetime):
     synclog_ids = get_synclog_ids_by_date(start_datetime, end_datetime)
     return iter_docs(SyncLog.get_db(), synclog_ids)
Beispiel #17
0
def delete_all_sync_logs():
    _delete_all(SyncLog.get_db(), 'phone/sync_logs_by_user')
Beispiel #18
0
def synclog_view(view_name, **params):
    return combine_views([SyncLog.get_db(), get_db(None)], view_name, **params)
Beispiel #19
0
 def delete_all_sync_logs(cls):
     SyncLogSQL.objects.all().delete()
     cls._delete_all_from_view(SyncLog.get_db(), 'phone/sync_logs_by_user')
Beispiel #20
0
    def rows(self):
        base_link_url = '{}?q={{id}}'.format(reverse('global_quick_find'))

        user_id = self.request.GET.get('individual')
        if not user_id:
            return []

        # security check
        get_document_or_404(CommCareUser, self.domain, user_id)

        sync_log_ids = [
            row['id'] for row in SyncLog.view(
                "phone/sync_logs_by_user",
                startkey=[user_id, {}],
                endkey=[user_id],
                descending=True,
                reduce=False,
                limit=self.limit,
            )
        ]

        def _sync_log_to_row(sync_log):
            def _fmt_duration(duration):
                if isinstance(duration, int):
                    return format_datatables_data(
                        '<span class="{cls}">{text}</span>'.format(
                            cls=_bootstrap_class(duration or 0, 60, 20),
                            text=_('{} seconds').format(duration),
                        ), duration)
                else:
                    return format_datatables_data(
                        '<span class="label">{text}</span>'.format(
                            text=_("Unknown"), ),
                        -1,
                    )

            def _fmt_id(sync_log_id):
                href = base_link_url.format(id=sync_log_id)
                return '<a href="{href}" target="_blank">{id:.5}...</a>'.format(
                    href=href, id=sync_log_id)

            def _fmt_error_info(sync_log):
                if not sync_log.had_state_error:
                    return u'<span class="label label-success">&#10003;</span>'
                else:
                    return (
                        u'<span class="label label-important">X</span>'
                        u'State error {}<br>Expected hash: {:.10}...').format(
                            _naturaltime_with_hover(sync_log.error_date),
                            sync_log.error_hash,
                        )

            num_cases = sync_log.case_count()
            columns = [
                _fmt_date(sync_log.date),
                format_datatables_data(num_cases, num_cases),
                _fmt_duration(sync_log.duration),
            ]
            if self.show_extra_columns:
                columns.append(_fmt_id(sync_log.get_id))
                columns.append(sync_log.log_format)
                columns.append(
                    _fmt_id(sync_log.previous_log_id) if sync_log.
                    previous_log_id else '---')
                columns.append(_fmt_error_info(sync_log))
                columns.append('{:.10}...'.format(sync_log.get_state_hash()))
                columns.append(_naturaltime_with_hover(
                    sync_log.last_submitted))
                columns.append(u'{}<br>{:.10}'.format(
                    _naturaltime_with_hover(sync_log.last_cached),
                    sync_log.hash_at_last_cached))

            return columns

        return [
            _sync_log_to_row(properly_wrap_sync_log(sync_log_json))
            for sync_log_json in iter_docs(SyncLog.get_db(), sync_log_ids)
        ]
Beispiel #21
0
    def rows(self):
        base_link_url = '{}?q={{id}}'.format(reverse('global_quick_find'))

        user_id = self.request.GET.get('individual')
        if not user_id:
            return []

        # security check
        get_document_or_404(CommCareUser, self.domain, user_id)

        sync_log_ids = [row['id'] for row in SyncLog.view(
            "phone/sync_logs_by_user",
            startkey=[user_id, {}],
            endkey=[user_id],
            descending=True,
            reduce=False,
            limit=self.limit,
        )]

        def _sync_log_to_row(sync_log):
            def _fmt_duration(duration):
                if isinstance(duration, int):
                    return format_datatables_data(
                        '<span class="{cls}">{text}</span>'.format(
                            cls=_bootstrap_class(duration or 0, 60, 20),
                            text=_('{} seconds').format(duration),
                        ),
                        duration
                    )
                else:
                    return format_datatables_data(
                        '<span class="label">{text}</span>'.format(
                            text=_("Unknown"),
                        ),
                        -1,
                    )

            def _fmt_id(sync_log_id):
                href = base_link_url.format(id=sync_log_id)
                return '<a href="{href}" target="_blank">{id:.5}...</a>'.format(
                    href=href,
                    id=sync_log_id
                )

            def _fmt_error_info(sync_log):
                if not sync_log.had_state_error:
                    return u'<span class="label label-success">&#10003;</span>'
                else:
                    return (u'<span class="label label-important">X</span>'
                            u'State error {}<br>Expected hash: {:.10}...').format(
                        _naturaltime_with_hover(sync_log.error_date),
                        sync_log.error_hash,
                    )

            num_cases = sync_log.case_count()
            columns = [
                _fmt_date(sync_log.date),
                format_datatables_data(num_cases, num_cases),
                _fmt_duration(sync_log.duration),
            ]
            if self.show_extra_columns:
                columns.append(_fmt_id(sync_log.get_id))
                columns.append(sync_log.log_format)
                columns.append(_fmt_id(sync_log.previous_log_id) if sync_log.previous_log_id else '---')
                columns.append(_fmt_error_info(sync_log))
                columns.append('{:.10}...'.format(sync_log.get_state_hash()))
                columns.append(_naturaltime_with_hover(sync_log.last_submitted))
                columns.append(u'{}<br>{:.10}'.format(_naturaltime_with_hover(sync_log.last_cached),
                                                     sync_log.hash_at_last_cached))

            return columns

        return [
            _sync_log_to_row(properly_wrap_sync_log(sync_log_json))
            for sync_log_json in iter_docs(SyncLog.get_db(), sync_log_ids)
        ]