예제 #1
0
    def handle(self, *args, **options):
        from casexml.apps.phone.models import properly_wrap_sync_log, SyncLog, SimplifiedSyncLog

        if len(args) < 1:
            print "Usage: ./manage.py sync_log_debugger <filename1> [<filename2>] [<filename3>]..."
            sys.exit(0)

        logs = []
        log_names = []
        for filename in args:
            if os.path.isdir(filename):
                filenames = [os.path.join(filename, item) for item in sorted(os.listdir(filename))]
            else:
                filenames = [filename]

            for filename in filenames:
                log_name = os.path.basename(filename)
                log_names.append(log_name)
                with open(filename) as f:
                    wrapped_log = properly_wrap_sync_log(json.loads(f.read()))
                    logs.append(wrapped_log)
                    if isinstance(wrapped_log, SyncLog):
                        log_names.append("migrated-{}".format(log_name))
                        logs.append(SimplifiedSyncLog.from_other_format(wrapped_log))
                    elif getattr(wrapped_log, "migrated_from", None):
                        log_names.append("migrated_from-{}".format(log_name))
                        logs.append(properly_wrap_sync_log(wrapped_log.to_json()["migrated_from"]))

        print "state hashes"
        for i in range(len(log_names)):
            print "{} ({}): {}".format(log_names[i], logs[i]._id, logs[i].get_state_hash())

        print "\ncase diffs"
        for i in range(len(log_names)):
            for j in range(len(log_names)):
                if i != j:
                    case_diff = set(logs[i].get_footprint_of_cases_on_phone()) - set(
                        logs[j].get_footprint_of_cases_on_phone()
                    )
                    if case_diff:
                        print "cases on {} and not {}: {}".format(
                            log_names[i], log_names[j], ", ".join(sorted(case_diff))
                        )

        if options["debugger"]:
            union_of_ids = set().union(*[set(log.get_footprint_of_cases_on_phone()) for log in logs])
            intersection_of_ids = set().intersection(*[set(log.get_footprint_of_cases_on_phone()) for log in logs])
            import pdb

            pdb.set_trace()

        if options["check_hash"]:
            log_to_check = logs[int(options["index"])]
            result = _brute_force_search(
                log_to_check.case_ids_on_phone, options["check_hash"], depth=int(options["depth"])
            )
            if result:
                print "check successful - missing ids {}".format(result)
            else:
                print "no match found"
예제 #2
0
    def handle(self, sync_logs, **options):
        from casexml.apps.phone.models import properly_wrap_sync_log, SyncLog, SimplifiedSyncLog

        logs = []
        log_names = []
        for filename in sync_logs:
            if os.path.isdir(filename):
                filenames = [os.path.join(filename, item) for item in sorted(os.listdir(filename))]
            else:
                filenames = [filename]

            for filename in filenames:
                log_name = os.path.basename(filename)
                log_names.append(log_name)
                with open(filename, encoding='utf-8') as f:
                    wrapped_log = properly_wrap_sync_log(json.loads(f.read()))
                    logs.append(wrapped_log)
                    if isinstance(wrapped_log, SyncLog):
                        log_names.append('migrated-{}'.format(log_name))
                        logs.append(SimplifiedSyncLog.from_other_format(wrapped_log))
                    elif getattr(wrapped_log, 'migrated_from', None):
                        log_names.append('migrated_from-{}'.format(log_name))
                        logs.append(properly_wrap_sync_log(wrapped_log.to_json()['migrated_from']))

        print('state hashes')
        for i in range(len(log_names)):
            print('{} ({}): {}'.format(log_names[i], logs[i]._id, logs[i].get_state_hash()))

        print('\ncase diffs')
        for i in range(len(log_names)):
            for j in range(len(log_names)):
                if i != j:
                    case_diff = set(logs[i].get_footprint_of_cases_on_phone()) - \
                        set(logs[j].get_footprint_of_cases_on_phone())
                    if case_diff:
                        print('cases on {} and not {}: {}'.format(
                            log_names[i],
                            log_names[j],
                            ', '.join(sorted(case_diff))
                        ))

        if options['debugger']:
            union_of_ids = set().union(*[set(log.get_footprint_of_cases_on_phone()) for log in logs])
            intersection_of_ids = set().intersection(*[set(log.get_footprint_of_cases_on_phone()) for log in logs])
            import pdb
            pdb.set_trace()

        if options['check_hash']:
            log_to_check = logs[int(options['index'])]
            result = _brute_force_search(
                log_to_check.case_ids_on_phone, options['check_hash'], depth=int(options['depth'])
            )
            if result:
                print('check successful - missing ids {}'.format(result))
            else:
                print('no match found')
예제 #3
0
def get_synclogs_for_user(user_id, limit=10, wrap=True):
    synclogs = SyncLogSQL.objects.filter(user_id=user_id).order_by('date')[:limit]
    docs = [synclog.doc for synclog in synclogs]

    if wrap:
        return [properly_wrap_sync_log(doc) for doc in docs]
    else:
        return [doc for doc in docs]
예제 #4
0
def get_synclogs_for_user(user_id, limit=10, wrap=True):
    synclogs = SyncLogSQL.objects.filter(user_id=user_id).order_by('date')[:limit]
    docs = [synclog.doc for synclog in synclogs]

    if wrap:
        return [properly_wrap_sync_log(doc) for doc in docs]
    else:
        return [doc for doc in docs]
예제 #5
0
 def handle(self, user_id, date, **options):
     # SQL
     synclogs_sql = SyncLogSQL.objects.filter(
         user_id=user_id, date=date, log_format=LOG_FORMAT_SIMPLIFY)
     for synclog in synclogs_sql:
         doc = properly_wrap_sync_log(synclog.doc)
         doc.case_ids_on_phone = {'broken to force 412'}
     bulk_update_helper(synclogs_sql)
예제 #6
0
 def handle(self, user_id, date, **options):
     # SQL
     synclogs_sql = SyncLogSQL.objects.filter(
         user_id=user_id,
         date=date,
         log_format=LOG_FORMAT_SIMPLIFY
     )
     for synclog in synclogs_sql:
         doc = properly_wrap_sync_log(synclog.doc)
         doc.case_ids_on_phone = {'broken to force 412'}
     bulk_update_helper(synclogs_sql)
예제 #7
0
def get_last_synclog_for_user(user_id):
    result = SyncLog.view("phone/sync_logs_by_user",
                          startkey=[user_id, {}],
                          endkey=[user_id],
                          descending=True,
                          limit=1,
                          reduce=False,
                          include_docs=True,
                          wrap_doc=False)
    if result:
        row, = result
        return properly_wrap_sync_log(row['doc'])
예제 #8
0
def get_sync_logs_for_user(user_id, limit):
    rows = synclog_view(
        "phone/sync_logs_by_user",
        startkey=[user_id, {}],
        endkey=[user_id],
        descending=True,
        reduce=False,
        limit=limit,
        include_docs=True,
        stale=stale_ok()
    )
    sync_log_jsons = (row['doc'] for row in rows)
    return [properly_wrap_sync_log(sync_log_json) for sync_log_json in sync_log_jsons]
예제 #9
0
    def handle(self, *args, **options):
        from casexml.apps.phone.models import properly_wrap_sync_log, SyncLog, SimplifiedSyncLog

        if len(args) < 1:
            print 'Usage: ./manage.py sync_log_debugger <filename1> [<filename2>] [<filename3>]...'
            sys.exit(0)

        logs = []
        log_names = []
        for filename in args:
            if os.path.isdir(filename):
                filenames = [
                    os.path.join(filename, item)
                    for item in sorted(os.listdir(filename))
                ]
            else:
                filenames = [filename]

            for filename in filenames:
                log_name = os.path.basename(filename)
                log_names.append(log_name)
                with open(filename) as f:
                    wrapped_log = properly_wrap_sync_log(json.loads(f.read()))
                    logs.append(wrapped_log)
                    if isinstance(wrapped_log, SyncLog):
                        log_names.append('migrated-{}'.format(log_name))
                        logs.append(
                            SimplifiedSyncLog.from_other_format(wrapped_log))

        print 'state hashes'
        for i in range(len(log_names)):
            print '{} ({}): {}'.format(log_names[i], logs[i]._id,
                                       logs[i].get_state_hash())

        print '\ncase diffs'
        for i in range(len(log_names)):
            for j in range(len(log_names)):
                if i != j:
                    case_diff = set(logs[i].get_footprint_of_cases_on_phone()) - \
                        set(logs[j].get_footprint_of_cases_on_phone())
                    if case_diff:
                        print 'cases on {} and not {}: {}'.format(
                            log_names[i], log_names[j], ', '.join(case_diff))

        if options['debugger']:
            union_of_ids = set().union(
                *[set(log.get_footprint_of_cases_on_phone()) for log in logs])
            intersection_of_ids = set().intersection(
                *[set(log.get_footprint_of_cases_on_phone()) for log in logs])
            import pdb
            pdb.set_trace()
예제 #10
0
 def handle(self, user_id, date, **options):
     # SQL
     synclogs_sql = SyncLogSQL.objects.filter(
         user_id=user_id, date=date, log_format=LOG_FORMAT_SIMPLIFY)
     for synclog in synclogs_sql:
         doc = properly_wrap_sync_log(synclog.doc)
         doc.case_ids_on_phone = {'broken to force 412'}
     bulk_update_helper(synclogs_sql)
     # Couch - ToDo - delete after Synclog SQL migration is over
     synclogs_couch = SimplifiedSyncLog.view("phone/sync_logs_by_user",
                                             startkey=[user_id, {}],
                                             endkey=[user_id, date],
                                             descending=True,
                                             reduce=False,
                                             include_docs=True)
     logs = []
     for log in synclogs_couch:
         log.case_ids_on_phone = {'broken to force 412'}
         logs.append(log)
     SimplifiedSyncLog.bulk_save(logs)
예제 #11
0
    def handle(self, *args, **options):
        from casexml.apps.phone.models import properly_wrap_sync_log, SyncLog, SimplifiedSyncLog

        if len(args) < 1:
            print(
                "Usage:\n"
                "./manage.py sync_log_debugger <synclog1> [synclog2 synclog3]...\n"
                "    <synclog> is a json file of the synclog you are trying to compare. Passing\n"
                "    in a folder will compare all of the files in that folder.\n"
            )
            sys.exit(0)

        logs = []
        log_names = []
        for filename in args:
            if os.path.isdir(filename):
                filenames = [
                    os.path.join(filename, item)
                    for item in sorted(os.listdir(filename))
                ]
            else:
                filenames = [filename]

            for filename in filenames:
                log_name = os.path.basename(filename)
                log_names.append(log_name)
                with open(filename) as f:
                    wrapped_log = properly_wrap_sync_log(json.loads(f.read()))
                    logs.append(wrapped_log)
                    if isinstance(wrapped_log, SyncLog):
                        log_names.append('migrated-{}'.format(log_name))
                        logs.append(
                            SimplifiedSyncLog.from_other_format(wrapped_log))
                    elif getattr(wrapped_log, 'migrated_from', None):
                        log_names.append('migrated_from-{}'.format(log_name))
                        logs.append(
                            properly_wrap_sync_log(
                                wrapped_log.to_json()['migrated_from']))

        print 'state hashes'
        for i in range(len(log_names)):
            print '{} ({}): {}'.format(log_names[i], logs[i]._id,
                                       logs[i].get_state_hash())

        print '\ncase diffs'
        for i in range(len(log_names)):
            for j in range(len(log_names)):
                if i != j:
                    case_diff = set(logs[i].get_footprint_of_cases_on_phone()) - \
                        set(logs[j].get_footprint_of_cases_on_phone())
                    if case_diff:
                        print 'cases on {} and not {}: {}'.format(
                            log_names[i], log_names[j],
                            ', '.join(sorted(case_diff)))

        if options['debugger']:
            union_of_ids = set().union(
                *[set(log.get_footprint_of_cases_on_phone()) for log in logs])
            intersection_of_ids = set().intersection(
                *[set(log.get_footprint_of_cases_on_phone()) for log in logs])
            import pdb
            pdb.set_trace()

        if options['check_hash']:
            log_to_check = logs[int(options['index'])]
            result = _brute_force_search(log_to_check.case_ids_on_phone,
                                         options['check_hash'],
                                         depth=int(options['depth']))
            if result:
                print 'check successful - missing ids {}'.format(result)
            else:
                print 'no match found'
예제 #12
0
 def _get_latest_synclog(self):
     return properly_wrap_sync_log(SyncLogSQL.objects.order_by('date').last().doc)
예제 #13
0
def get_last_synclog_for_user(user_id):
    result = SyncLogSQL.objects.filter(user_id=user_id).order_by('date').last()
    if result:
        return properly_wrap_sync_log(result.doc)
예제 #14
0
    def rows(self):
        base_link_url = '{}?q={{id}}'.format(reverse('global_quick_find'))

        user_id = self.request.GET.get('individual')
        if not user_id:
            return []

        # security check
        get_document_or_404(CommCareUser, self.domain, user_id)

        sync_log_ids = [
            row['id'] for row in SyncLog.view(
                "phone/sync_logs_by_user",
                startkey=[user_id, {}],
                endkey=[user_id],
                descending=True,
                reduce=False,
                limit=self.limit,
            )
        ]

        def _sync_log_to_row(sync_log):
            def _fmt_duration(duration):
                if isinstance(duration, int):
                    return format_datatables_data(
                        '<span class="{cls}">{text}</span>'.format(
                            cls=_bootstrap_class(duration or 0, 60, 20),
                            text=_('{} seconds').format(duration),
                        ), duration)
                else:
                    return format_datatables_data(
                        '<span class="label">{text}</span>'.format(
                            text=_("Unknown"), ),
                        -1,
                    )

            def _fmt_id(sync_log_id):
                href = base_link_url.format(id=sync_log_id)
                return '<a href="{href}" target="_blank">{id:.5}...</a>'.format(
                    href=href, id=sync_log_id)

            def _fmt_error_info(sync_log):
                if not sync_log.had_state_error:
                    return u'<span class="label label-success">&#10003;</span>'
                else:
                    return (
                        u'<span class="label label-important">X</span>'
                        u'State error {}<br>Expected hash: {:.10}...').format(
                            _naturaltime_with_hover(sync_log.error_date),
                            sync_log.error_hash,
                        )

            num_cases = sync_log.case_count()
            columns = [
                _fmt_date(sync_log.date),
                format_datatables_data(num_cases, num_cases),
                _fmt_duration(sync_log.duration),
            ]
            if self.show_extra_columns:
                columns.append(_fmt_id(sync_log.get_id))
                columns.append(sync_log.log_format)
                columns.append(
                    _fmt_id(sync_log.previous_log_id) if sync_log.
                    previous_log_id else '---')
                columns.append(_fmt_error_info(sync_log))
                columns.append('{:.10}...'.format(sync_log.get_state_hash()))
                columns.append(_naturaltime_with_hover(
                    sync_log.last_submitted))
                columns.append(u'{}<br>{:.10}'.format(
                    _naturaltime_with_hover(sync_log.last_cached),
                    sync_log.hash_at_last_cached))

            return columns

        return [
            _sync_log_to_row(properly_wrap_sync_log(sync_log_json))
            for sync_log_json in iter_docs(SyncLog.get_db(), sync_log_ids)
        ]
예제 #15
0
 def get_all_syncslogs():
     return [
         properly_wrap_sync_log(log.doc)
         for log in SyncLogSQL.objects.all()
     ]
예제 #16
0
def get_last_synclog_for_user(user_id):
    result = SyncLogSQL.objects.filter(user_id=user_id).order_by('date').last()
    if result:
        return properly_wrap_sync_log(result.doc)
예제 #17
0
 def _get_the_first_synclog(self):
     return properly_wrap_sync_log(SyncLogSQL.objects.first().doc)
예제 #18
0
 def _get_latest_synclog(self):
     return properly_wrap_sync_log(SyncLogSQL.objects.order_by('date').last().doc)
예제 #19
0
 def _get_the_first_synclog(self):
     return properly_wrap_sync_log(SyncLogSQL.objects.first().doc)
예제 #20
0
    def handle(self, *args, **options):
        from casexml.apps.phone.models import properly_wrap_sync_log, SyncLog, SimplifiedSyncLog

        if len(args) < 1:
            print(
                "Usage:\n"
                "./manage.py sync_log_debugger <synclog1> [synclog2 synclog3]...\n"
                "    <synclog> is a json file of the synclog you are trying to compare. Passing\n"
                "    in a folder will compare all of the files in that folder.\n"
            )
            sys.exit(0)

        logs = []
        log_names = []
        for filename in args:
            if os.path.isdir(filename):
                filenames = [os.path.join(filename, item) for item in sorted(os.listdir(filename))]
            else:
                filenames = [filename]

            for filename in filenames:
                log_name = os.path.basename(filename)
                log_names.append(log_name)
                with open(filename) as f:
                    wrapped_log = properly_wrap_sync_log(json.loads(f.read()))
                    logs.append(wrapped_log)
                    if isinstance(wrapped_log, SyncLog):
                        log_names.append('migrated-{}'.format(log_name))
                        logs.append(SimplifiedSyncLog.from_other_format(wrapped_log))
                    elif getattr(wrapped_log, 'migrated_from', None):
                        log_names.append('migrated_from-{}'.format(log_name))
                        logs.append(properly_wrap_sync_log(wrapped_log.to_json()['migrated_from']))

        print 'state hashes'
        for i in range(len(log_names)):
            print '{} ({}): {}'.format(log_names[i], logs[i]._id, logs[i].get_state_hash())

        print '\ncase diffs'
        for i in range(len(log_names)):
            for j in range(len(log_names)):
                if i != j:
                    case_diff = set(logs[i].get_footprint_of_cases_on_phone()) - \
                        set(logs[j].get_footprint_of_cases_on_phone())
                    if case_diff:
                        print 'cases on {} and not {}: {}'.format(
                            log_names[i],
                            log_names[j],
                            ', '.join(sorted(case_diff))
                        )

        if options['debugger']:
            union_of_ids = set().union(*[set(log.get_footprint_of_cases_on_phone()) for log in logs])
            intersection_of_ids = set().intersection(*[set(log.get_footprint_of_cases_on_phone()) for log in logs])
            import pdb
            pdb.set_trace()

        if options['check_hash']:
            log_to_check = logs[int(options['index'])]
            result = _brute_force_search(
                log_to_check.case_ids_on_phone, options['check_hash'], depth=int(options['depth'])
            )
            if result:
                print 'check successful - missing ids {}'.format(result)
            else:
                print 'no match found'
예제 #21
0
 def get_all_syncslogs():
     return [properly_wrap_sync_log(log.doc) for log in SyncLogSQL.objects.all()]
예제 #22
0
 def _oldest_synclog(self, user_id):
     result = SyncLogSQL.objects.filter(
         user_id=user_id).order_by('date').first()
     if result:
         return properly_wrap_sync_log(result.doc)
예제 #23
0
    def rows(self):
        base_link_url = '{}?q={{id}}'.format(reverse('global_quick_find'))

        user_id = self.request.GET.get('individual')
        if not user_id:
            return []

        # security check
        get_document_or_404(CommCareUser, self.domain, user_id)

        sync_log_ids = [row['id'] for row in SyncLog.view(
            "phone/sync_logs_by_user",
            startkey=[user_id, {}],
            endkey=[user_id],
            descending=True,
            reduce=False,
            limit=self.limit,
        )]

        def _sync_log_to_row(sync_log):
            def _fmt_duration(duration):
                if isinstance(duration, int):
                    return format_datatables_data(
                        '<span class="{cls}">{text}</span>'.format(
                            cls=_bootstrap_class(duration or 0, 60, 20),
                            text=_('{} seconds').format(duration),
                        ),
                        duration
                    )
                else:
                    return format_datatables_data(
                        '<span class="label">{text}</span>'.format(
                            text=_("Unknown"),
                        ),
                        -1,
                    )

            def _fmt_id(sync_log_id):
                href = base_link_url.format(id=sync_log_id)
                return '<a href="{href}" target="_blank">{id:.5}...</a>'.format(
                    href=href,
                    id=sync_log_id
                )

            def _fmt_error_info(sync_log):
                if not sync_log.had_state_error:
                    return u'<span class="label label-success">&#10003;</span>'
                else:
                    return u'<span class="label label-important">X</span> State error {}'.format(
                        naturaltime(sync_log.error_date),
                    )

            num_cases = sync_log.case_count()
            columns = [
                _fmt_date(sync_log.date),
                format_datatables_data(num_cases, num_cases),
                _fmt_duration(sync_log.duration),
            ]
            if self.show_extra_columns:
                columns.append(_fmt_id(sync_log.get_id))
                columns.append(sync_log.log_format)
                columns.append(_fmt_id(sync_log.previous_log_id) if sync_log.previous_log_id else '---')
                columns.append(_fmt_error_info(sync_log))

            return columns

        return [
            _sync_log_to_row(properly_wrap_sync_log(sync_log_json))
            for sync_log_json in iter_docs(SyncLog.get_db(), sync_log_ids)
        ]