Exemplo n.º 1
0
    def get_data(self):
        # todo: this will probably have to paginate eventually
        if self.all_relevant_forms:
            sp_ids = get_relevant_supply_point_ids(
                self.domain,
                self.active_location,
            )

            form_xmlnses = [form['xmlns'] for form in self.all_relevant_forms.values()]
            spoint_loc_map = {
                doc['_id']: doc['location_id']
                for doc in iter_docs(SupplyPointCase.get_db(), sp_ids)
            }
            locations = {
                doc['_id']: Location.wrap(doc)
                for doc in iter_docs(Location.get_db(), spoint_loc_map.values())
            }

            for spoint_id, loc_id in spoint_loc_map.items():
                loc = locations[loc_id]

                form_ids = StockReport.objects.filter(
                    stocktransaction__case_id=spoint_id
                ).exclude(
                    date__lte=self.start_date
                ).exclude(
                    date__gte=self.end_date
                ).values_list(
                    'form_id', flat=True
                ).order_by('-date').distinct()  # not truly distinct due to ordering
                matched = False
                for form_id in form_ids:
                    try:
                        if XFormInstance.get(form_id).xmlns in form_xmlnses:
                            yield {
                                'loc_id': loc._id,
                                'loc_path': loc.path,
                                'name': loc.name,
                                'type': loc.location_type,
                                'reporting_status': 'reporting',
                                'geo': loc._geopoint,
                            }
                            matched = True
                            break
                    except ResourceNotFound:
                        logging.error('Stock report for location {} in {} references non-existent form {}'.format(
                            loc._id, loc.domain, form_id
                        ))
                if not matched:
                    yield {
                        'loc_id': loc._id,
                        'loc_path': loc.path,
                        'name': loc.name,
                        'type': loc.location_type,
                        'reporting_status': 'nonreporting',
                        'geo': loc._geopoint,
                    }
Exemplo n.º 2
0
def get_data(ids):
    """
    returns the data in the format:
    {
        '2015-03': {
            'domain1': {
                'KOOKOO': {'calls': 40, 'minutes': 45}
            },
            'domain2': {
                'KOOKOO': {'calls': 20, 'minutes': 25}
                'TELERIVET': {'calls': 5, 'minutes': 0}
            }
        }
    }
    """
    data = {}
    for doc in iter_docs(CallLog.get_db(), ids):
        call = CallLog.wrap(doc)
        month_data = get_month_data(data, call.date)
        domain_data = get_domain_data(month_data, call.domain)
        backend_api = get_backend_api(call)
        backend_data = get_backend_data(domain_data, backend_api)
        backend_data['calls'] += 1
        duration = (call.duration or 0) / 60.0
        duration = int(ceil(duration))
        backend_data['minutes'] += duration
    return data
Exemplo n.º 3
0
 def by_domain(cls, domain):
     relevant_ids = set([r['id'] for r in cls.get_db().view('locations/by_type',
         reduce=False,
         startkey=[domain],
         endkey=[domain, {}],
     ).all()])
     return (cls.wrap(l) for l in iter_docs(cls.get_db(), list(relevant_ids)))
    def handle(self, ids_file, **options):
        with open(ids_file, encoding='utf-8') as f:
            doc_ids = [line.strip() for line in f]
        total_doc_ids = len(doc_ids)
        doc_ids = set(doc_ids)
        print("{} total doc ids, {} unique".format(total_doc_ids, len(doc_ids)))

        db = XFormInstance.get_db()  # Both forms and cases are in here
        with IterDB(db) as iter_db:
            for doc in iter_docs(db, with_progress_bar(doc_ids)):
                iter_db.save(doc)

        print("{} docs saved".format(len(iter_db.saved_ids)))
        print("{} docs errored".format(len(iter_db.error_ids)))
        not_found = len(doc_ids) - len(iter_db.saved_ids) - len(iter_db.error_ids)
        print("{} docs not found".format(not_found))

        filename = '{}_{}.csv'.format(ids_file.split('/')[-1],
                                      datetime.datetime.now().isoformat())
        with open(filename, 'w', encoding='utf-8') as f:
            writer = csv.writer(f)
            writer.writerow(['doc_id', 'status'])
            for doc_id in doc_ids:
                if doc_id in iter_db.saved_ids:
                    status = "saved"
                elif doc_id in iter_db.error_ids:
                    status = "errored"
                else:
                    status = "not_found"
                writer.writerow([doc_id, status])

        print("Saved results to {}".format(filename))
Exemplo n.º 5
0
def export_events(request):
    """
    Nothing fancy, just a simple csv dump of all the WisePill event
    data stored for debugging. This can't really be a domain-specific
    report because we may not be able to tie an event to a domain if
    the device was not configured properly in CommCareHQ.
    """
    attrs = [
        '_id',
        'domain',
        'data',
        'received_on',
        'case_id',
        'processed',
        'serial_number',
        'timestamp',
    ]
    response = HttpResponse(content_type='text/csv')
    response['Content-Disposition'] = 'attachment; filename="device_events.csv"'
    writer = csv.writer(response)
    writer.writerow(attrs)

    ids = WisePillDeviceEvent.get_all_ids()
    for doc in iter_docs(WisePillDeviceEvent.get_db(), ids):
        event = WisePillDeviceEvent.wrap(doc)
        writer.writerow([getattr(event, attr) for attr in attrs])

    return response
    def handle(self, *args, **options):
        for domain in Domain.get_all_names():
            fields_definition = cdm.CustomDataFieldsDefinition.get_or_create(
                domain,
                'UserFields'
            )
            had_fields = bool(fields_definition.fields)

            user_ids = (CommCareUser.ids_by_domain(domain) +
                        CommCareUser.ids_by_domain(domain, is_active=False))

            existing_field_slugs = set([field.slug for field in fields_definition.fields])
            for user in iter_docs(CommCareUser.get_db(), user_ids):
                user_data = user.get('user_data', {})
                for key in user_data.keys():
                    if (key and key not in existing_field_slugs
                        and not cdm.is_system_key(key)):
                        existing_field_slugs.add(key)
                        fields_definition.fields.append(cdm.CustomDataField(
                            slug=key,
                            label=key,
                            is_required=False,
                        ))

            for field in fields_definition.fields:
                if cdm.is_system_key(field.slug):
                    fields_definition.fields.remove(field)
            # Only save a definition for domains which use custom user data
            if fields_definition.fields or had_fields:
                fields_definition.save()
            print 'finished domain "{}"'.format(domain.name)
Exemplo n.º 7
0
def archive_forms(domain, user, uploaded_data):
    response = {"errors": [], "success": []}

    form_ids = [row.get("form_id") for row in uploaded_data]
    missing_forms = set(form_ids)

    for xform_doc in iter_docs(XFormInstance.get_db(), form_ids):
        xform = XFormInstance.wrap(xform_doc)
        missing_forms.discard(xform["_id"])

        if xform["domain"] != domain:
            response["errors"].append(
                _(u"XFORM {form_id} does not belong to domain {domain}").format(
                    form_id=xform["_id"], domain=xform["domain"]
                )
            )
            continue

        xform_string = _(u"XFORM {form_id} for domain {domain} by user '{username}'").format(
            form_id=xform["_id"], domain=xform["domain"], username=user.username
        )

        try:
            xform.archive(user=user.username)
            response["success"].append(_(u"Successfully archived {form}").format(form=xform_string))
        except Exception as e:
            response["errors"].append(_(u"Could not archive {form}: {error}").format(form=xform_string, error=e))

    for missing_form_id in missing_forms:
        response["errors"].append(_(u"Could not find XForm {form_id}").format(form_id=missing_form_id))

    return response
    def handle(self, *args, **options):
        self.stdout.write("Processing locations...\n")

        relevant_ids = set([r['id'] for r in Location.get_db().view(
            'commtrack/locations_by_code',
            reduce=False,
        ).all()])

        to_save = []

        for location in iter_docs(Location.get_db(), relevant_ids):
            # exclude any psi domain to make this take a realistic
            # amount fo time
            if (
                not location.get('last_modified', False) and
                'psi' not in location.get('domain', '')
            ):
                location['last_modified'] = datetime.now().isoformat()
                to_save.append(location)

                if len(to_save) > 500:
                    Location.get_db().bulk_save(to_save)
                    to_save = []

        if to_save:
            Location.get_db().bulk_save(to_save)
    def handle(self, *args, **options):
        start = options['startdate']
        end = options['enddate']

        print 'Starting...\n'
        ids = get_build_ids(start, end)

        print 'Checking {} builds\n'.format(len(ids))
        case_types_by_domain = {}
        all_form_xmlns = set()
        for build in iter_docs(Application.get_db(), ids):
            domain = build.get('domain')
            errors = forms_with_empty_case_block(build)
            if not errors:
                continue

            case_types, form_xmlns = errors
            all_form_xmlns |= form_xmlns
            domain_case_counts = case_types_by_domain.setdefault(domain, {})
            case_counts = {
                case_type: get_number_of_cases_in_domain(domain, case_type)
                for case_type in case_types
                if case_type not in domain_case_counts
            }
            domain_case_counts.update(case_counts)

        import pprint
        pprint.pprint(case_types_by_domain)

        print

        print all_form_xmlns
    def handle(self, **options):
        roles = UserRole.view(
            'users/roles_by_domain',
            include_docs=False,
            reduce=False
        ).all()
        for role_doc in iter_docs(UserRole.get_db(), [r['id'] for r in roles]):
            role = UserRole.wrap(role_doc)
            save_role = False

            if role.permissions.edit_web_users:
                role.permissions.view_web_users = True
                role.permissions.view_roles = True
                save_role = True

            if role.permissions.edit_commcare_users:
                role.permissions.view_commcare_users = True
                role.permissions.edit_groups = True
                role.permissions.view_groups = True
                save_role = True

            if role.permissions.edit_locations:
                role.permissions.view_locations = True
                save_role = True

            if save_role:
                role.save()
Exemplo n.º 11
0
 def get_latest_schema(self):
     last_export = self.last_checkpoint()
     schema = self.cleanup(dict(last_export.schema) if last_export else None)
     doc_ids = last_export.get_new_ids(self.database) if last_export else self.all_doc_ids
     for doc in iter_docs(self.database, doc_ids):
         schema = extend_schema(schema, self.cleanup(doc))
     return schema
Exemplo n.º 12
0
    def handle(self, *args, **options):
        self.stdout.write("...\n")

        relevant_ids = set([r['id'] for r in CouchUser.get_db().view(
            'users/by_username',
            reduce=False,
        ).all()])

        to_save = []

        for user_doc in iter_docs(CouchUser.get_db(), relevant_ids):
            if 'commtrack_location' in user_doc:
                user = CommCareUser.get(user_doc['_id'])

                try:
                    original_location_object = Location.get(user['commtrack_location'])
                except ResourceNotFound:
                    # if there was bad data in there before, we can ignore it
                    continue
                user.set_locations([original_location_object])

                del user_doc['commtrack_location']

                to_save.append(user_doc)

                if len(to_save) > 500:
                    CouchUser.get_db().bulk_save(to_save)
                    to_save = []

        if to_save:
            CouchUser.get_db().bulk_save(to_save)
Exemplo n.º 13
0
    def response_cloudcare(self):
        """
        CloudCare enabled apps will have cloudcare_enabled set to false on downgrade.
        """
        key = [self.domain.name]
        db = Application.get_db()
        domain_apps = db.view(
            'app_manager/applications_brief',
            reduce=False,
            startkey=key,
            endkey=key + [{}],
        ).all()

        cloudcare_enabled_apps = []
        for app_doc in iter_docs(db, [a['id'] for a in domain_apps]):
            if app_doc.get('cloudcare_enabled', False):
                cloudcare_enabled_apps.append((app_doc['_id'], app_doc['name']))

        if not cloudcare_enabled_apps:
            return None

        num_apps = len(cloudcare_enabled_apps)
        return self._fmt_alert(
            ungettext(
                "You have %(num_apps)d application that will lose CloudCare access if you select this plan.",
                "You have %(num_apps)d applications that will lose CloudCare access if you select this plan.",
                num_apps
            ) % {
                'num_apps': num_apps,
            },
            [mark_safe('<a href="%(url)s">%(title)s</a>') % {
                'title': a[1],
                'url': reverse('view_app', args=[self.domain.name, a[0]])
            } for a in cloudcare_enabled_apps],
        )
Exemplo n.º 14
0
    def handle(self, *args, **options):
        if len(args) < 2:
            print "please specify a filepath and an archiving_user"
            return
        filepath = args[0]
        archiving_user = args[1]

        try:
            form_ids = open(filepath)
        except Exception as e:
            print "there was an issue opening the file: %s" % e
            return

        try:
            form_ids = [f[0] for f in csv.reader(form_ids)]
        except Exception as e:
            print "there was an issue reading the file %s" % e
            return

        for xform_doc in iter_docs(XFormInstance.get_db(), form_ids):
            try:
                xform = XFormInstance.wrap(xform_doc)
                xform.archive(user_id=archiving_user)
                print "Archived form %s in domain %s" % (
                    xform._id, xform.domain
                )
            except Exception as e:
                print "Issue archiving XFORM %s for domain %s: %s" % (
                    xform_doc['_id'], xform_doc['domain'], e
                )
Exemplo n.º 15
0
    def copy_docs(self, sourcedb, domain, simulate, startkey=None, endkey=None, doc_ids=None,
                  type=None, since=None, exclude_types=None, postgres_db=None, exclude_attachments=False):

        if not doc_ids:
            doc_ids = [result["id"] for result in sourcedb.view("domain/docs", startkey=startkey,
                                                                endkey=endkey, reduce=False)]
        total = len(doc_ids)
        count = 0
        msg = "Found %s matching documents in domain: %s" % (total, domain)
        msg += " of type: %s" % (type) if type else ""
        msg += " since: %s" % (since) if since else ""
        print msg

        err_log = self._get_err_log()

        queue = Queue(150)
        for i in range(NUM_PROCESSES):
            Worker(queue, sourcedb, self.targetdb, exclude_types, total, simulate, err_log, exclude_attachments).start()

        for doc in iter_docs(sourcedb, doc_ids, chunksize=100):
            count += 1
            queue.put((doc, count))

        # shutdown workers
        for i in range(NUM_PROCESSES):
            queue.put(None)

        err_log.close()
        if os.stat(err_log.name)[6] == 0:
            os.remove(err_log.name)
        else:
            print 'Failed document IDs written to %s' % err_log.name

        if postgres_db:
            copy_postgres_data_for_docs(postgres_db, doc_ids=doc_ids, simulate=simulate)
Exemplo n.º 16
0
 def get_forms(self):
     """
     Gets the form docs associated with a case. If it can't find a form
     it won't be included.
     """
     forms = iter_docs(self.get_db(), self.xform_ids)
     return [XFormInstance(form) for form in forms]
Exemplo n.º 17
0
 def get_cases(cls, ids):
     return [
         CommCareCase.wrap(doc) for doc in iter_docs(
             CommCareCase.get_db(),
             ids
         )
     ]
Exemplo n.º 18
0
def _build_indicators(indicator_config_id, relevant_ids):
    config = _get_config_by_id(indicator_config_id)
    adapter = IndicatorSqlAdapter(config)
    couchdb = _get_db(config.referenced_doc_type)
    redis_client = get_redis_client().client.get_client()
    redis_key = _get_redis_key_for_config(config)

    for doc in iter_docs(couchdb, relevant_ids, chunksize=500):
        try:
            # save is a noop if the filter doesn't match
            adapter.save(doc)
            redis_client.srem(redis_key, doc.get('_id'))
        except Exception as e:
            logging.exception('problem saving document {} to table. {}'.format(doc['_id'], e))

    if not is_static(indicator_config_id):
        redis_client.delete(redis_key)
        config.meta.build.finished = True
        try:
            config.save()
        except ResourceConflict:
            current_config = DataSourceConfiguration.get(config._id)
            # check that a new build has not yet started
            if config.meta.build.initiated == current_config.meta.build.initiated:
                current_config.meta.build.finished = True
                current_config.save()
Exemplo n.º 19
0
    def update_schema(self):
        key = [self.domain, self.app_id]
        all_apps = Application.get_db().view(
            'app_manager/saved_app',
            startkey=key + [self.last_processed_version],
            endkey=key + [{}],
            reduce=False,
            include_docs=False,
            skip=(1 if self.last_processed_version else 0)).all()

        all_seen_apps = self.apps_with_errors | self.processed_apps
        to_process = [
            app['id'] for app in all_apps if app['id'] not in all_seen_apps
        ]
        if self.app_id not in all_seen_apps:
            to_process.append(self.app_id)

        for app_doc in iter_docs(Application.get_db(), to_process):
            if app_doc['doc_type'] == 'RemoteApp':
                continue
            app = Application.wrap(app_doc)
            try:
                self.update_for_app(app)
            except AppManagerException:
                self.apps_with_errors.add(app.get_id)
                self.last_processed_version = app.version

        if to_process:
            self.save()
Exemplo n.º 20
0
    def handle(self, *args, **options):
        domain, group_name = args
        group = Group.by_name(domain, name=group_name)
        owner_ids = get_all_owner_ids_from_group(group)
        pillow = CareBiharFluffPillow()
        db = CommCareCase.get_db()

        greenlets = []

        def process_case(case):
            pillow.change_transport(pillow.change_transform(case))


        for i, owner_id in enumerate(owner_ids):
            print '{0}/{1} owner_ids'.format(i, len(owner_ids))
            rows = CommCareCase.view(
                'hqcase/by_owner',
                startkey=[domain, owner_id],
                endkey=[domain, owner_id, {}],
                reduce=False,
            ).all()
            case_ids = [row['id'] for row in rows]
            print '{0} case_ids'.format(len(case_ids))
            for case in iter_docs(db, case_ids):
                g = gevent.Greenlet.spawn(process_case, case)
                greenlets.append(g)
        gevent.joinall(greenlets)
Exemplo n.º 21
0
 def get_cases(case_ids, ordered=False):
     return [
         CommCareCase.wrap(doc) for doc in iter_docs(
             CommCareCase.get_db(),
             case_ids
         )
     ]
Exemplo n.º 22
0
 def all(cls):
     ids = [
         res["id"]
         for res in cls.get_db().view("userreports/data_sources_by_domain", reduce=False, include_docs=False)
     ]
     for result in iter_docs(cls.get_db(), ids):
         yield cls.wrap(result)
Exemplo n.º 23
0
    def get_rows(self, datespan):
        def get_awc_filter(awcs):
            return get_nested_terms_filter("awc_name.#value", awcs)

        def get_gp_filter(gp):
            owner_ids = [user._id for user in self.users
                         if getattr(user, 'user_data', {}).get('gp') in self.gp]
            return es_filters.term("owner_id", owner_ids)

        def get_block_filter(block):
            return es_filters.term("block_name.#value", block.lower())

        query = case_es.CaseES().domain(self.domain)\
                .fields([])\
                .opened_range(lte=self.datespan.enddate_utc)\
                .term("type.exact", self.default_case_type)
        query.index = 'report_cases'

        if self.display_open_cases_only:
            query = query.filter(es_filters.OR(
                case_es.is_closed(False),
                case_es.closed_range(gte=self.datespan.enddate_utc)
            ))
        elif self.display_closed_cases_only:
            query = query.filter(case_es.closed_range(lte=self.datespan.enddate_utc))

        if self.awcs:
            query = query.filter(get_awc_filter(self.awcs))
        elif self.gp:
            query = query.filter(get_gp_filter(self.gp))
        elif self.block:
            query = query.filter(get_block_filter(self.block))
        result = query.run()
        return map(CommCareCase, iter_docs(CommCareCase.get_db(), result.ids))
    def handle_one(self, domain, case_type, chunk_size):
        self.log('Copying {case_type} cases in {domain}'
                 .format(case_type=case_type, domain=domain))
        old_db = CommCareCase.get_db()
        new_db = IndicatorCase.get_db()
        assert old_db.uri != new_db.uri
        # this dbaccessor pulls from old_db
        case_ids = get_case_ids_in_domain(domain, case_type)
        self.delete_bad_doc_types(case_ids, chunk_size)
        case_dict_chunks = chunked(iter_docs(old_db, case_ids, chunk_size),
                                   chunk_size)

        for case_dicts in case_dict_chunks:
            for case_dict in case_dicts:
                del case_dict['_rev']
                case_dict.pop('_attachments', None)
                case_dict['doc_type'] = "IndicatorCase"
            try:
                results = new_db.bulk_save(case_dicts)
            except BulkSaveError as error:
                results = error.results
            for result in results:
                if result.get('error') == 'conflict':
                    self.log('- OK: [{id}] is already in the indicator db'
                             .format(id=result.get('id')))
                elif 'error' in result:
                    self.log('- ERROR: [{id}] ({result})'.format(
                        id=result.get('id'),
                        result=json.dumps(result)
                    ))
                else:
                    self.log('- ADDED: [{id}] saved to indicator db'.format(
                        id=result.get('id')
                    ))
Exemplo n.º 25
0
 def cache_users_at_location(self, selected_users):
     user_cache_list = []
     for doc in iter_docs(CommCareUser.get_db(), selected_users):
         display_username = user_display_string(
             doc['username'], doc.get('first_name', ''), doc.get('last_name', ''))
         user_cache_list.append({'text': display_username, 'id': doc['_id']})
     self.get_users_at_location.set_cached_value(self).to(user_cache_list)
Exemplo n.º 26
0
    def _delete_couch_data(self):
        for doc_class, doc_ids in get_doc_ids_to_dump(self.domain_name):
            db = doc_class.get_db()
            for docs in chunked(iter_docs(db, doc_ids), 100):
                db.bulk_delete(docs)

            self.assertEqual(0, len(get_docs(db, doc_ids)))
Exemplo n.º 27
0
    def generate_schema_from_builds(domain, case_type):
        """Builds a schema from Application builds for a given identifier

        :param domain: The domain that the export belongs to
        :param unique_form_id: The unique identifier of the item being exported
        :returns: Returns a ExportDataSchema instance
        """
        app_build_ids = get_all_app_ids(domain)
        all_case_schema = CaseExportDataSchema()

        for app_doc in iter_docs(Application.get_db(), app_build_ids):
            app = Application.wrap(app_doc)
            case_property_mapping = get_case_properties(
                app,
                [case_type],
                include_parent_properties=False
            )
            case_schema = CaseExportDataSchema._generate_schema_from_case_property_mapping(
                case_property_mapping,
                app.version,
            )
            case_history_schema = CaseExportDataSchema._generate_schema_for_case_history(
                case_property_mapping,
                app.version,
            )

            all_case_schema = CaseExportDataSchema._merge_schemas(
                all_case_schema,
                case_schema,
                case_history_schema
            )

        return all_case_schema
Exemplo n.º 28
0
def rebuild_indicators(indicator_config_id):
    is_static = indicator_config_id.startswith(CustomDataSourceConfiguration._datasource_id_prefix)
    if is_static:
        config = CustomDataSourceConfiguration.by_id(indicator_config_id)
    else:
        config = DataSourceConfiguration.get(indicator_config_id)
        # Save the start time now in case anything goes wrong. This way we'll be
        # able to see if the rebuild started a long time ago without finishing.
        config.meta.build.initiated = datetime.datetime.utcnow()
        config.save()

    adapter = IndicatorSqlAdapter(config)
    adapter.rebuild_table()

    couchdb = _get_db(config.referenced_doc_type)
    relevant_ids = get_doc_ids(config.domain, config.referenced_doc_type,
                               database=couchdb)

    for doc in iter_docs(couchdb, relevant_ids, chunksize=500):
        try:
            # save is a noop if the filter doesn't match
            adapter.save(doc)
        except DataError as e:
            logging.exception('problem saving document {} to table. {}'.format(doc['_id'], e))

    if not is_static:
        config.meta.build.finished = True
        config.save()
Exemplo n.º 29
0
    def cases(self):
        if 'debug_case' in self.request.GET:
            case = CommCareCase.get(self.request.GET['debug_case'])
            if case.domain != DOMAIN:
                raise Http404()
            return [case]

        query = case_es.CaseES().domain(self.domain)\
                .fields([])\
                .opened_range(lte=self.datespan.enddate_utc)\
                .case_type(self.default_case_type)
        query.index = 'report_cases'

        if self.case_status == 'open':
            query = query.filter(es_filters.OR(
                case_es.is_closed(False),
                case_es.closed_range(gte=self.datespan.enddate_utc)
            ))
        elif self.case_status == 'closed':
            query = query.filter(case_es.closed_range(lte=self.datespan.enddate_utc))

        query = query.owner([user['doc_id'] for user in self.users_matching_filter])

        result = query.run()

        return [
            CommCareCase.wrap(doc)
            for doc in iter_docs(CommCareCase.get_db(), result.ids)
        ]
Exemplo n.º 30
0
 def get_media_objects(self, languages=None):
     """
         Gets all the media objects stored in the multimedia map.
         If passed a profile, will only get those that are used
         in a language in the profile.
     """
     found_missing_mm = False
     filter_multimedia = languages and self.media_language_map
     if filter_multimedia:
         requested_media = set()
         for lang in languages:
             requested_media.update(self.media_language_map[lang].media_refs)
     # preload all the docs to avoid excessive couch queries.
     # these will all be needed in memory anyway so this is ok.
     expected_ids = [map_item.multimedia_id for map_item in self.multimedia_map.values()]
     raw_docs = dict((d["_id"], d) for d in iter_docs(CommCareMultimedia.get_db(), expected_ids))
     for path, map_item in self.multimedia_map.items():
         if not filter_multimedia or not map_item.form_media or path in requested_media:
             media_item = raw_docs.get(map_item.multimedia_id)
             if media_item:
                 media_cls = CommCareMultimedia.get_doc_class(map_item.media_type)
                 yield path, media_cls.wrap(media_item)
             else:
                 # delete media reference from multimedia map so this doesn't pop up again!
                 del self.multimedia_map[path]
                 found_missing_mm = True
     if found_missing_mm:
         self.save()
Exemplo n.º 31
0
 def get_cases(case_ids, ordered=False, prefetched_indices=None):
     # prefetched_indices is ignored sinces cases already have them
     return [
         CommCareCase.wrap(doc)
         for doc in iter_docs(CommCareCase.get_db(), case_ids)
     ]
Exemplo n.º 32
0
 def _get_products(domain):
     for p_doc in iter_docs(Product.get_db(), Product.ids_by_domain(domain)):
         # filter out archived products from export
         if not ('is_archived' in p_doc and p_doc['is_archived']):
             yield Product.wrap(p_doc)
Exemplo n.º 33
0
    def record_iter(cls, start_datetime, end_datetime):
        group_ids = get_group_ids_by_last_modified(start_datetime,
                                                   end_datetime)

        return iter_docs(Group.get_db(), group_ids)
Exemplo n.º 34
0
 def all(cls):
     for result in iter_docs(cls.get_db(), cls.all_ids()):
         yield cls.wrap(result)
Exemplo n.º 35
0
 def assign_users(self, users):
     for doc in iter_docs(CommCareUser.get_db(), users):
         CommCareUser.wrap(doc).add_to_assigned_locations(self.location)
Exemplo n.º 36
0
 def unassign_users(self, users):
     for doc in iter_docs(CommCareUser.get_db(), users):
         # This could probably be sped up by bulk saving, but there's a lot
         # of stuff going on - seems tricky.
         CommCareUser.wrap(doc).unset_location_by_id(
             self.location.location_id, fall_back_to_next=True)
Exemplo n.º 37
0
 def get_new_docs(self, database=None):
     return iter_docs(self.get_new_ids(database))
Exemplo n.º 38
0
def filter_cases(request, domain, app_id, module_id):
    app = Application.get(app_id)
    module = app.get_module(module_id)
    delegation = request.GET.get('task-list') == 'true'
    auth_cookie = request.COOKIES.get('sessionid')

    suite_gen = SuiteGenerator(app)
    xpath = suite_gen.get_filter_xpath(module, delegation=delegation)
    extra_instances = [{
        'id': inst.id,
        'src': inst.src
    } for inst in suite_gen.get_extra_instances(module)]

    # touchforms doesn't like this to be escaped
    xpath = HTMLParser.HTMLParser().unescape(xpath)
    if delegation:
        case_type = DELEGATION_STUB_CASE_TYPE
    else:
        case_type = module.case_type

    if xpath:
        # if we need to do a custom filter, send it to touchforms for processing
        additional_filters = {
            "properties/case_type": case_type,
            "footprint": True
        }

        helper = SessionDataHelper(domain, request.couch_user)
        result = helper.filter_cases(xpath,
                                     additional_filters,
                                     DjangoAuth(auth_cookie),
                                     extra_instances=extra_instances)
        if result.get('status', None) == 'error':
            return HttpResponseServerError(
                result.get("message",
                           _("Something went wrong filtering your cases.")))

        case_ids = result.get("cases", [])
    else:
        # otherwise just use our built in api with the defaults
        case_ids = [
            res.id
            for res in get_filtered_cases(domain,
                                          status=CASE_STATUS_OPEN,
                                          case_type=case_type,
                                          user_id=request.couch_user._id,
                                          ids_only=True)
        ]

    cases = [
        CommCareCase.wrap(doc)
        for doc in iter_docs(CommCareCase.get_db(), case_ids)
    ]
    # refilter these because we might have accidentally included footprint cases
    # in the results from touchforms. this is a little hacky but the easiest
    # (quick) workaround. should be revisted when we optimize the case list.
    cases = filter(lambda c: c.type == case_type, cases)
    cases = [c.get_json(lite=True) for c in cases if c]
    parents = []
    if delegation:
        for case in cases:
            parent_id = case['indices']['parent']['case_id']
            parents.append(CommCareCase.get(parent_id))
        return json_response({'cases': cases, 'parents': parents})
    else:
        return json_response(cases)
Exemplo n.º 39
0
def get_all_commcare_users_by_domain(domain):
    """Returns all CommCareUsers by domain regardless of their active status"""
    ids = get_all_user_ids_by_domain(domain, include_web_users=False)
    return map(CommCareUser.wrap, iter_docs(CommCareUser.get_db(), ids))
Exemplo n.º 40
0
    def get_data(self):
        # todo: this will probably have to paginate eventually
        if self.all_relevant_forms:
            sp_ids = get_relevant_supply_point_ids(
                self.domain,
                self.active_location,
            )

            form_xmlnses = [
                form['xmlns'] for form in self.all_relevant_forms.values()
            ]
            spoint_loc_map = {
                doc['_id']: doc['location_id']
                for doc in iter_docs(SupplyPointCase.get_db(), sp_ids)
            }
            locations = _location_map(spoint_loc_map.values())

            for spoint_id, loc_id in spoint_loc_map.items():
                if loc_id not in locations:
                    continue  # it's archived, skip
                loc = locations[loc_id]

                results = StockReport.objects.filter(
                    stocktransaction__case_id=spoint_id).filter(
                        date__gte=self.converted_start_datetime,
                        date__lte=self.converted_end_datetime).values_list(
                            'form_id', 'date').distinct(
                            )  # not truly distinct due to ordering

                matched = False
                for form_id, date in results:
                    try:
                        if XFormInstance.get(form_id).xmlns in form_xmlnses:
                            yield {
                                'parent_name':
                                loc.parent.name if loc.parent else '',
                                'loc_id': loc.location_id,
                                'loc_path': loc.path,
                                'name': loc.name,
                                'type': loc.location_type.name,
                                'reporting_status': 'reporting',
                                'geo': geopoint(loc),
                                'last_reporting_date': date,
                            }
                            matched = True
                            break
                    except ResourceNotFound:
                        logging.error(
                            'Stock report for location {} in {} references non-existent form {}'
                            .format(loc.location_id, loc.domain, form_id))

                if not matched:
                    result = StockReport.objects.filter(
                        stocktransaction__case_id=spoint_id).values_list(
                            'date').order_by('-date')[:1]
                    yield {
                        'parent_name': loc.parent.name if loc.parent else '',
                        'loc_id': loc.location_id,
                        'loc_path': loc.path,
                        'name': loc.name,
                        'type': loc.location_type.name,
                        'reporting_status': 'nonreporting',
                        'geo': geopoint(loc),
                        'last_reporting_date': result[0][0] if result else ''
                    }
Exemplo n.º 41
0
 def choices_by_domain(cls, domain):
     group_ids = cls.ids_by_domain(domain)
     group_choices = []
     for group_doc in iter_docs(cls.get_db(), group_ids):
         group_choices.append((group_doc['_id'], group_doc['name']))
     return group_choices
Exemplo n.º 42
0
 def iter_documents(self, ids):
     return iter_docs(self._couch_db, ids, chunksize=500)
Exemplo n.º 43
0
def iter_cases(case_ids, wrap=True):
    from casexml.apps.case.models import CommCareCase
    for doc in iter_docs(CommCareCase.get_db(), case_ids):
        yield CommCareCase.wrap(doc) if wrap else doc
Exemplo n.º 44
0
 def get_users(self):
     user_ids = CommCareUser.ids_by_domain(self.domain)
     for user_doc in iter_docs(CommCareUser.get_db(), user_ids):
         yield CommCareUser.wrap(user_doc)
Exemplo n.º 45
0
 def _get_related_cases(results):
     ids = filter(None, [_related_id(res) for res in results])
     return dict((c['_id'], c) for c in iter_docs(CommCareCase.get_db(), ids))
Exemplo n.º 46
0
def generate_invoices(based_on_date=None):
    """
    Generates all invoices for the past month.
    """
    today = based_on_date or datetime.date.today()
    invoice_start, invoice_end = get_previous_month_date_range(today)
    log_accounting_info("Starting up invoices for %(start)s - %(end)s" % {
        'start': invoice_start.strftime(USER_DATE_FORMAT),
        'end': invoice_end.strftime(USER_DATE_FORMAT),
    })
    all_domain_ids = [d['id'] for d in Domain.get_all(include_docs=False)]
    for domain_doc in iter_docs(Domain.get_db(), all_domain_ids):
        domain_obj = Domain.wrap(domain_doc)
        if not domain_obj.is_active:
            continue
        try:
            invoice_factory = DomainInvoiceFactory(invoice_start, invoice_end, domain_obj)
            invoice_factory.create_invoices()
            log_accounting_info("Sent invoices for domain %s" % domain_obj.name)
        except CreditLineError as e:
            log_accounting_error(
                "There was an error utilizing credits for "
                "domain %s: %s" % (domain_obj.name, e),
                show_stack_trace=True,
            )
        except InvoiceError as e:
            log_accounting_error(
                "Could not create invoice for domain %s: %s" % (domain_obj.name, e),
                show_stack_trace=True,
            )
        except Exception as e:
            log_accounting_error(
                "Error occurred while creating invoice for "
                "domain %s: %s" % (domain_obj.name, e),
                show_stack_trace=True,
            )
    all_customer_billing_accounts = BillingAccount.objects.filter(is_customer_billing_account=True)
    for account in all_customer_billing_accounts:
        try:
            if account.invoicing_plan == InvoicingPlan.QUARTERLY:
                customer_invoice_start = invoice_start - relativedelta(months=2)
            elif account.invoicing_plan == InvoicingPlan.YEARLY:
                customer_invoice_start = invoice_start - relativedelta(months=11)
            else:
                customer_invoice_start = invoice_start
            invoice_factory = CustomerAccountInvoiceFactory(
                account=account,
                date_start=customer_invoice_start,
                date_end=invoice_end
            )
            invoice_factory.create_invoice()
        except CreditLineError as e:
            log_accounting_error(
                "There was an error utilizing credits for "
                "domain %s: %s" % (domain_obj.name, e),
                show_stack_trace=True,
            )
        except InvoiceError as e:
            log_accounting_error(
                "Could not create invoice for domain %s: %s" % (domain_obj.name, e),
                show_stack_trace=True,
            )
        except Exception as e:
            log_accounting_error(
                "Error occurred while creating invoice for "
                "domain %s: %s" % (domain_obj.name, e),
                show_stack_trace=True,
            )

    if not settings.UNIT_TESTING:
        _invoicing_complete_soft_assert(False, "Invoicing is complete!")
Exemplo n.º 47
0
def build_form_multimedia_zip(domain, xmlns, startdate, enddate, app_id, export_id, zip_name, download_id):

    def find_question_id(form, value):
        for k, v in form.iteritems():
            if isinstance(v, dict):
                ret = find_question_id(v, value)
                if ret:
                    return [k] + ret
            else:
                if v == value:
                    return [k]

        return None

    def filename(form_info, question_id, extension):
        fname = u"%s-%s-%s-%s%s"
        if form_info['cases']:
            fname = u'-'.join(form_info['cases']) + u'-' + fname
        return fname % (form_info['name'],
                        unidecode(question_id),
                        form_info['user'],
                        form_info['id'], extension)

    case_ids = set()

    def extract_form_info(form, properties=None, case_ids=case_ids):
        unknown_number = 0
        meta = form['form'].get('meta', dict())
        # get case ids
        case_blocks = extract_case_blocks(form)
        cases = {c['@case_id'] for c in case_blocks}
        case_ids |= cases

        form_info = {
            'form': form,
            'attachments': list(),
            'name': form['form'].get('@name', 'unknown form'),
            'user': meta.get('username', 'unknown_user'),
            'cases': cases,
            'id': form['_id']
        }
        for k, v in form['_attachments'].iteritems():
            if v['content_type'] == 'text/xml':
                continue
            try:
                question_id = unicode(u'-'.join(find_question_id(form['form'], k)))
            except TypeError:
                question_id = unicode(u'unknown' + unicode(unknown_number))
                unknown_number += 1

            if not properties or question_id in properties:
                extension = unicode(os.path.splitext(k)[1])
                form_info['attachments'].append({
                    'size': v['length'],
                    'name': k,
                    'question_id': question_id,
                    'extension': extension,
                    'timestamp': parse(form['received_on']).timetuple(),
                })

        return form_info

    key = [domain, app_id, xmlns]
    form_ids = {f['id'] for f in XFormInstance.get_db().view("attachments/attachments",
                                                             start_key=key + [startdate],
                                                             end_key=key + [enddate, {}],
                                                             reduce=False)}

    properties = set()
    if export_id:
        schema = FormExportSchema.get(export_id)
        for table in schema.tables:
            # - in question id is replaced by . in excel exports
            properties |= {c.display.replace('.', '-') for c in table.columns}

    if not app_id:
        zip_name = 'Unrelated Form'
    forms_info = list()
    for form in iter_docs(XFormInstance.get_db(), form_ids):
        if not zip_name:
            zip_name = unidecode(form['form'].get('@name', 'unknown form'))
        forms_info.append(extract_form_info(form, properties))

    num_forms = len(forms_info)
    DownloadBase.set_progress(build_form_multimedia_zip, 0, num_forms)

    # get case names
    case_id_to_name = {c: c for c in case_ids}
    for case in iter_docs(CommCareCase.get_db(), case_ids):
        if case['name']:
            case_id_to_name[case['_id']] = case['name']

    use_transfer = settings.SHARED_DRIVE_CONF.transfer_enabled
    if use_transfer:
        params = '_'.join(map(str, [xmlns, startdate, enddate, export_id, num_forms]))
        fname = '{}-{}'.format(app_id, hashlib.md5(params).hexdigest())
        fpath = os.path.join(settings.SHARED_DRIVE_CONF.transfer_dir, fname)
    else:
        _, fpath = tempfile.mkstemp()

    if not (os.path.isfile(fpath) and use_transfer):  # Don't rebuild the file if it is already there
        with open(fpath, 'wb') as zfile:
            with zipfile.ZipFile(zfile, 'w') as z:
                for form_number, form_info in enumerate(forms_info):
                    f = XFormInstance.wrap(form_info['form'])
                    form_info['cases'] = {case_id_to_name[case_id] for case_id in form_info['cases']}
                    for a in form_info['attachments']:
                        fname = filename(form_info, a['question_id'], a['extension'])
                        zi = zipfile.ZipInfo(fname, a['timestamp'])
                        z.writestr(zi, f.fetch_attachment(a['name'], stream=True).read(), zipfile.ZIP_STORED)
                    DownloadBase.set_progress(build_form_multimedia_zip, form_number + 1, num_forms)

    common_kwargs = dict(
        mimetype='application/zip',
        content_disposition='attachment; filename="{fname}.zip"'.format(fname=zip_name),
        download_id=download_id,
    )

    if use_transfer:
        expose_file_download(
            fpath,
            use_transfer=use_transfer,
            **common_kwargs
        )
    else:
        expose_cached_download(
            FileWrapper(open(fpath)),
            expiry=(1 * 60 * 60),
            **common_kwargs
        )

    DownloadBase.set_progress(build_form_multimedia_zip, num_forms, num_forms)
Exemplo n.º 48
0
 def all(cls):
     ids = [res['id'] for res in cls.get_db().view('userreports/data_sources_by_build_info',
                                                   reduce=False, include_docs=False)]
     for result in iter_docs(cls.get_db(), ids):
         yield cls.wrap(result)
Exemplo n.º 49
0
def get_open_case_docs_in_domain(domain, type=None, owner_id=None):
    from casexml.apps.case.models import CommCareCase
    case_ids = get_open_case_ids_in_domain(domain, type, owner_id)
    for doc in iter_docs(CommCareCase.get_db(), case_ids):
        yield doc
Exemplo n.º 50
0
    def rendered_content(self):
        from corehq.apps.users.views.mobile.users import EditCommCareUserView
        users = get_users_by_location_id(self.config['domain'],
                                         self.config['location_id'])
        in_charges = FacilityInCharge.objects.filter(
            location=self.location).values_list('user_id', flat=True)
        if self.location.parent.location_type.name == 'district':
            children = self.location.parent.get_descendants()
            availaible_in_charges = list(
                chain.from_iterable([
                    filter(
                        lambda u: 'In Charge' in u.user_data.get('role', []),
                        get_users_by_location_id(self.config['domain'],
                                                 child.location_id))
                    for child in children
                ]))
        else:
            availaible_in_charges = filter(
                lambda u: 'In Charge' in u.user_data.get('role', []),
                get_users_by_location_id(self.domain, self.location_id))
        user_to_dict = lambda sms_user: {
            'id':
            sms_user.get_id,
            'full_name':
            sms_user.full_name,
            'phone_numbers':
            sms_user.phone_numbers,
            'in_charge':
            sms_user.get_id in in_charges,
            'location_name':
            sms_user.location.sql_location.name,
            'url':
            reverse(EditCommCareUserView.urlname,
                    args=[self.config['domain'], sms_user.get_id])
        }

        web_users_from_extension = list(
            iter_docs(
                WebUser.get_db(),
                EWSExtension.objects.filter(
                    domain=self.domain,
                    location_id=self.location_id).values_list('user_id',
                                                              flat=True)))

        WebUserInfo = collections.namedtuple('WebUserInfo',
                                             'id first_name last_name email')

        web_users = {
            WebUserInfo(id=web_user['_id'],
                        first_name=web_user['first_name'],
                        last_name=web_user['last_name'],
                        email=web_user['email'])
            for web_user in (UserES().web_users().domain(
                self.config['domain']).term("domain_memberships.location_id",
                                            self.location_id).run().hits +
                             web_users_from_extension)
        }

        return render_to_string(
            'ewsghana/partials/users_tables.html', {
                'users': [user_to_dict(user) for user in users],
                'domain':
                self.domain,
                'location_id':
                self.location_id,
                'web_users':
                web_users,
                'district_in_charges':
                [user_to_dict(user) for user in availaible_in_charges]
            })
Exemplo n.º 51
0
 def _get_products(domain):
     product_ids = SQLProduct.objects.filter(domain=domain).product_ids()
     for p_doc in iter_docs(Product.get_db(), product_ids):
         # filter out archived products from export
         if not ('is_archived' in p_doc and p_doc['is_archived']):
             yield Product.wrap(p_doc)
Exemplo n.º 52
0
    def record_iter(cls, start_datetime, end_datetime):
        domain_ids = get_domain_ids_by_last_modified(start_datetime,
                                                     end_datetime)

        return iter_docs(Domain.get_db(), domain_ids)
Exemplo n.º 53
0
    def record_iter(cls, start_datetime, end_datetime):
        application_ids = get_application_ids_by_last_modified(
            start_datetime, end_datetime)

        return iter_docs(Application.get_db(), application_ids)
Exemplo n.º 54
0
 def get_all_data_sources(self):
     active_ids = get_all_registry_data_source_ids(is_active=True)
     for result in iter_docs(RegistryDataSourceConfiguration.get_db(),
                             active_ids):
         yield RegistryDataSourceConfiguration.wrap(result)
Exemplo n.º 55
0
    def record_iter(cls, start_datetime, end_datetime):
        user_ids = get_user_ids_by_last_modified(start_datetime, end_datetime)

        return iter_docs(CouchUser.get_db(), user_ids)