def get_translations(cls, lang, key=None, one=False): from corehq.apps.app_manager.models import Application if key: translations = [] r = Application.get_db().view('app_translations_by_popularity/view', startkey=[lang, key], endkey=[lang, key, {}], group=True ).all() r.sort(key=lambda x: -x['value']) for row in r: _, _, translation = row['key'] translations.append(translation) if one: return translations[0] if translations else None return translations else: translations = defaultdict(list) r = Application.get_db().view('app_translations_by_popularity/view', startkey=[lang], endkey=[lang, {}], group=True ).all() r.sort(key=lambda x: (x['key'][1], -x['value'])) for row in r: _, key, translation = row['key'] translations[key].append(translation) if one: return dict([(key, val[0]) for key, val in translations.items()]) else: return translations
def update_schema(self): key = [self.domain, self.app_id] all_apps = Application.get_db().view( 'app_manager/saved_app', startkey=key + [self.last_processed_version], endkey=key + [{}], reduce=False, include_docs=False, skip=(1 if self.last_processed_version else 0)).all() all_seen_apps = self.apps_with_errors | self.processed_apps to_process = [ app['id'] for app in all_apps if app['id'] not in all_seen_apps ] if self.app_id not in all_seen_apps: to_process.append(self.app_id) for app_doc in iter_docs(Application.get_db(), to_process): if app_doc['doc_type'] == 'RemoteApp': continue app = Application.wrap(app_doc) try: self.update_for_app(app) except AppManagerException: self.apps_with_errors.add(app.get_id) self.last_processed_version = app.version if to_process: self.save()
def update_analytics_indexes(): """ Mostly for testing; wait until analytics data sources are up to date so that calls to analytics functions return up-to-date """ from corehq.apps.app_manager.models import Application XFormInstance.get_db().view('couchforms/all_submissions_by_domain', limit=1).all() XFormInstance.get_db().view('all_forms/view', limit=1).all() XFormInstance.get_db().view('exports_forms_by_xform/view', limit=1).all() Application.get_db().view('exports_forms_by_app/view', limit=1).all()
def setUp(self): create_domain(self.domain) couch_user = CommCareUser.create(self.domain, self.username, self.password) userID = couch_user.user_id couch_user.first_name = self.first_name couch_user.last_name = self.last_name couch_user.save() self.sm = SuccessMessage(self.message, userID, tz=self.tz) c = Client() app = Application.new_app(self.domain, "Test App", application_version=APP_V1) app.add_module(Module.new_module("Test Module", "en")) form = app.new_form(0, "Test Form", "en") form.xmlns = self.xmlns app.success_message = {"en": self.message} app.save() # hack: prime the view once so the success message takes even though we use stale queries in submissions Application.get_db().view('exports_forms/by_xmlns', limit=1).one() def fake_form_submission(userID=userID, username=self.username, xmlns=self.xmlns, time=None): submission = submission_template % { "userID": userID, "username": username, "xmlns": xmlns } f = StringIO(submission.encode('utf-8')) f.name = "tempfile.xml" kwargs = dict(HTTP_X_SUBMIT_TIME=json_format_datetime(time)) if time else {} response = c.post("/a/{self.domain}/receiver/".format(self=self), { 'xml_submission_file': f, }, **kwargs) return response self.num_forms_today = 0 self.num_forms_this_week = 0 now = datetime.utcnow() tznow = now + self.tz week_start = tznow - timedelta(days=tznow.weekday()) week_start = datetime(week_start.year, week_start.month, week_start.day) - self.tz day_start = datetime(tznow.year, tznow.month, tznow.day) - self.tz spacing = 6 for h in xrange((24/spacing)*8): time = now-timedelta(hours=spacing*h) response = fake_form_submission(time=time) if time > week_start: self.num_forms_this_week += 1 if time > day_start: self.num_forms_today += 1 self.assertEqual( response.content, get_simple_response_xml(("Thanks {self.first_name} ({self.first_name} {self.last_name})! " "You have submitted {self.num_forms_today} forms today " "and {self.num_forms_this_week} forms since Monday.").format(self=self), nature=ResponseNature.SUBMIT_SUCCESS) )
def get_pre_migration_copy(app): from corehq.apps.app_manager.util import get_correct_app_class def date_key(doc): return doc.get("built_on") or mindate mindate = json_format_datetime(datetime(1980, 1, 1)) migrate_date = json_format_datetime(ORIGINAL_MIGRATION_DATE) skip = 0 docs = None while docs is None or date_key(docs[-1]) > migrate_date: docs = saved_apps = [row['doc'] for row in Application.get_db().view( 'app_manager/saved_app', startkey=[app.domain, app._id, {}], endkey=[app.domain, app._id], descending=True, skip=skip, limit=5, include_docs=True, )] if not docs: break skip += len(docs) docs = sorted(saved_apps, key=date_key, reverse=True) for doc in docs: if date_key(doc) < migrate_date: copy = get_correct_app_class(doc).wrap(doc) if copy.version < app.version: return copy return None
def test_prune_autogenerated_builds(self): kafka_seq = get_topic_offset(topics.APP) couch_seq = get_current_seq(Application.get_db()) # Build #1, manually generated app = self._create_app('test-prune-app') build1 = app.make_build() build1.save() self.assertFalse(build1.is_auto_generated) # Build #2, auto-generated app.save() build2 = make_async_build(app, 'someone') # Build #3, manually generated app.save() build3 = app.make_build() build3.save() # All 3 builds should show up in ES self.refresh_elasticsearch(kafka_seq, couch_seq) build_ids_in_es = AppES().domain(self.domain).is_build().values_list('_id', flat=True) self.assertItemsEqual(build_ids_in_es, [build1._id, build2._id, build3._id]) # prune, which should delete the autogenerated build prune_auto_generated_builds(self.domain, app.id) # Build2 should no longer be in ES self.refresh_elasticsearch(kafka_seq, couch_seq) build_ids_in_es = AppES().domain(self.domain).is_build().values_list('_id', flat=True) self.assertItemsEqual(build_ids_in_es, [build1._id, build3._id])
def test_app_pillow_kafka(self): consumer = get_test_kafka_consumer(topics.APP) # have to get the seq id before the change is processed kafka_seq = get_topic_offset(topics.APP) couch_seq = get_current_seq(Application.get_db()) app_name = 'app-{}'.format(uuid.uuid4().hex) app = self._create_app(app_name) app_db_pillow = get_application_db_kafka_pillow('test_app_db_pillow') app_db_pillow.process_changes(couch_seq, forever=False) # confirm change made it to kafka message = next(consumer) change_meta = change_meta_from_kafka_message(message.value) self.assertEqual(app._id, change_meta.document_id) self.assertEqual(self.domain, change_meta.domain) # send to elasticsearch app_pillow = get_app_to_elasticsearch_pillow() app_pillow.process_changes(since=kafka_seq, forever=False) self.es.indices.refresh(APP_INDEX_INFO.index) # confirm change made it to elasticserach results = AppES().run() self.assertEqual(1, results.total) app_doc = results.hits[0] self.assertEqual(self.domain, app_doc['domain']) self.assertEqual(app['_id'], app_doc['_id']) self.assertEqual(app_name, app_doc['name'])
def get_all_app_ids(cls): return { r["id"] for r in Application.get_db() .view("app_manager/applications", startkey=[None], endkey=[None, {}], reduce=False) .all() }
def get_unknown_form_name(self, xmlns, app_id=None, none_if_not_found=False): if app_id is not None and app_id != '_MISSING_APP_ID': try: app = Application.get_db().get(app_id) except ResourceNotFound: # must have been a weird app id, don't fail hard pass else: for module in app.get('modules', []): for form in module['forms']: if form['xmlns'] == xmlns: return form['name'].values()[0] key = ["xmlns", self.domain, xmlns] results = cache_core.cached_view( XFormInstance.get_db(), 'reports_forms/name_by_xmlns', reduce=False, startkey=key, endkey=key + [{}], limit=1, cache_expire=60 ) try: data = list(results)[0] except IndexError: data = None if data: return data['value'] return None if none_if_not_found else "Name Unknown"
def test_edit_commcare_profile(self, mock): app2 = Application.new_app(self.project.name, "TestApp2") app2.save() self.addCleanup(lambda: Application.get_db().delete_doc(app2.id)) data = { "custom_properties": { "random": "value", "another": "value" } } response = self.client.post(reverse('edit_commcare_profile', args=[self.project.name, app2._id]), json.dumps(data), content_type='application/json') content = json.loads(response.content) custom_properties = content["changed"]["custom_properties"] self.assertEqual(custom_properties["random"], "value") self.assertEqual(custom_properties["another"], "value") data = { "custom_properties": { "random": "changed", } } response = self.client.post(reverse('edit_commcare_profile', args=[self.project.name, app2._id]), json.dumps(data), content_type='application/json') content = json.loads(response.content) custom_properties = content["changed"]["custom_properties"] self.assertEqual(custom_properties["random"], "changed")
def iter_attachments(self): docs = get_all_docs_with_doc_types(Application.get_db(), apps_migration.doc_types) for doc in with_progress_bar(docs, length=self.docs_count): if '_attachments' in doc: for filename, info in doc['_attachments'].items(): yield doc, filename, info
def handle(self, *args, **options): start = options['startdate'] end = options['enddate'] print 'Starting...\n' ids = get_build_ids(start, end) print 'Checking {} builds\n'.format(len(ids)) case_types_by_domain = {} all_form_xmlns = set() for build in iter_docs(Application.get_db(), ids): domain = build.get('domain') errors = forms_with_empty_case_block(build) if not errors: continue case_types, form_xmlns = errors all_form_xmlns |= form_xmlns domain_case_counts = case_types_by_domain.setdefault(domain, {}) case_counts = { case_type: get_number_of_cases_in_domain(domain, case_type) for case_type in case_types if case_type not in domain_case_counts } domain_case_counts.update(case_counts) import pprint pprint.pprint(case_types_by_domain) print print all_form_xmlns
def handle(self, commit, **options): logger.setLevel('DEBUG') start_date = datetime(2020, 3, 19) # Initial release of FFX app end_date = datetime(2020, 4, 4) # Release of analytics app_query = AppES().term('doc_type', 'Application') \ .missing('created_from_template') \ .date_range('date_created', gt=start_date, lt=end_date) hits = app_query.run().hits logger.info(f"Pulled {len(hits)} apps from ES") hits = [ h for h in hits if 'FFX' in h['name'] and len(h['modules']) == 9 and ( not h['family_id'] or h['family_id'] in KNOWN_FAMILY_IDS) ] logger.info( f"Filtered to {len(hits)} apps likely imported from app library") for hit in hits: app = wrap_app(Application.get_db().get(hit['_id'])) app.created_from_template = _get_app_id(hit['date_created']) if commit: app.save(increment_version=False) logger.info( f"Done with backfill_created_from_template, commit={commit}")
def find_broken_suite_files(start, end): yield 'Starting...\n' db = Application.get_db() build_ids = db.view('app_manager/builds_by_date', startkey=start, endkey=end, reduce=False, wrapper=lambda row: row['id']).all() for build_id in build_ids: error = None try: suite = db.fetch_attachment(build_id, 'files/suite.xml') except ResourceNotFound: error = 'build has no attachment files/suite.xml' else: try: suite_xml.validate_suite(suite) except SuiteValidationError as error: pass if error: build = db.get(build_id) yield '%s\t%s\t%s\t%s\t%s\n' % ( build.get('built_on'), build.get('domain'), build_id, build.get('copy_of'), error, ) yield 'Done.\n'
def _get_app_ids(self, startkey): return {r['id'] for r in Application.get_db().view( 'app_manager/applications', startkey=startkey, endkey=startkey + [{}], reduce=False, ).all()}
def paginate_releases(request, domain, app_id): limit = request.GET.get("limit") try: limit = int(limit) except (TypeError, ValueError): limit = 10 start_build_param = request.GET.get("start_build") if start_build_param and json.loads(start_build_param): start_build = json.loads(start_build_param) assert isinstance(start_build, int) else: start_build = {} timezone = get_timezone_for_user(request.couch_user, domain) saved_apps = ( Application.get_db() .view( "app_manager/saved_app", startkey=[domain, app_id, start_build], endkey=[domain, app_id], descending=True, limit=limit, wrapper=lambda x: SavedAppBuild.wrap(x["value"]).to_saved_build_json(timezone), ) .all() ) for app in saved_apps: app["include_media"] = app["doc_type"] != "RemoteApp" return json_response(saved_apps)
def handle(self, **options): self.options = options app_ids = self.get_app_ids() logger.info('migrating {} apps'.format(len(app_ids))) results = iter_update(Application.get_db(), self._migrate_app, app_ids, verbose=True) self.results_callback(results) logger.info('done')
def get_pre_migration_copy(app): from corehq.apps.app_manager.util import get_correct_app_class def date_key(doc): return doc.get("built_on") or mindate mindate = json_format_datetime(datetime(1980, 1, 1)) migrate_date = json_format_datetime(ORIGINAL_MIGRATION_DATE) skip = 0 docs = None while docs is None or date_key(docs[-1]) > migrate_date: docs = saved_apps = [ row['doc'] for row in Application.get_db().view( 'app_manager/saved_app', startkey=[app.domain, app._id, {}], endkey=[app.domain, app._id], descending=True, skip=skip, limit=5, include_docs=True, ) ] if not docs: break skip += len(docs) docs = sorted(saved_apps, key=date_key, reverse=True) for doc in docs: if date_key(doc) < migrate_date: copy = get_correct_app_class(doc).wrap(doc) if copy.version < app.version: return copy return None
def paginate_releases(request, domain, app_id): limit = request.GET.get('limit') try: limit = int(limit) except (TypeError, ValueError): limit = 10 start_build_param = request.GET.get('start_build') if start_build_param and json.loads(start_build_param): start_build = json.loads(start_build_param) assert isinstance(start_build, int) else: start_build = {} timezone = get_timezone_for_user(request.couch_user, domain) saved_apps = Application.get_db().view( 'app_manager/saved_app', startkey=[domain, app_id, start_build], endkey=[domain, app_id], descending=True, limit=limit, wrapper=lambda x: SavedAppBuild.wrap(x['value']).to_saved_build_json( timezone), ).all() j2me_enabled_configs = CommCareBuildConfig.j2me_enabled_config_labels() for app in saved_apps: app['include_media'] = app['doc_type'] != 'RemoteApp' app['j2me_enabled'] = app['menu_item_label'] in j2me_enabled_configs if toggles.APPLICATION_ERROR_REPORT.enabled(request.couch_user.username): versions = [app['version'] for app in saved_apps] num_errors_dict = _get_error_counts(domain, app_id, versions) for app in saved_apps: app['num_errors'] = num_errors_dict.get(app['version'], 0) return json_response(saved_apps)
def assertNoMissingXmlnss(self, delete_apps=True): submissions = XFormInstance.get_db().view( 'couchforms/by_xmlns', key="undefined", include_docs=False, reduce=False, ).all() self.assertEqual(submissions, []) saved_apps = Application.get_db().view( 'app_manager/saved_app', include_docs=True, ) apps = [ get_correct_app_class(row['doc']).wrap(row['doc']) for row in saved_apps ] try: for app in apps: for form in app.get_forms(): self.assertEqual(form.source.count('xmlns="undefined"'), 0) self.assertNotEqual(form.xmlns, 'undefined') finally: if delete_apps: for app in apps: app.delete()
def find_broken_suite_files(start, end): yield 'Starting...\n' db = Application.get_db() build_ids = db.view( 'app_manager/builds_by_date', startkey=start, endkey=end, reduce=False, wrapper=lambda row: row['id'] ).all() for build_id in build_ids: error = None try: suite = db.fetch_attachment(build_id, 'files/suite.xml') except ResourceNotFound: error = 'build has no attachment files/suite.xml' else: try: suite_xml.validate_suite(suite) except SuiteValidationError as error: pass if error: build = db.get(build_id) yield '%s\t%s\t%s\t%s\t%s\n' % ( build.get('built_on'), build.get('domain'), build_id, build.get('copy_of'), error, ) yield 'Done.\n'
def _migrate_app_ids(app_ids): to_save = [] count = len(app_ids) logger.info('migrating {} apps'.format(count)) for i, app_doc in enumerate( iter_docs(Application.get_db(), app_ids)): try: if app_doc["doc_type"] in [ "Application", "Application-Deleted" ]: application = Application.wrap(app_doc) should_save = self.migrate_app(application) if should_save: to_save.append(application) if len(to_save) > 25: self.bulk_save(to_save) logger.info('completed {}/{} apps'.format( i, count)) to_save = [] except Exception: errors.append( "App {id} not properly migrated because {error}". format(id=app_doc['_id'], error=sys.exc_info()[0])) if to_save: self.bulk_save(to_save)
def test_prune_autogenerated_builds(self): kafka_seq = get_topic_offset(topics.APP) couch_seq = get_current_seq(Application.get_db()) # Build #1, manually generated app = self._create_app('test-prune-app') build1 = app.make_build() build1.save() self.assertFalse(build1.is_auto_generated) # Build #2, auto-generated app.save() autogenerate_build(app, 'username') # Build #3, manually generated app.save() build3 = app.make_build() build3.save() # All 3 builds should show up in ES self.refresh_elasticsearch(kafka_seq, couch_seq) build_ids_in_es = AppES().domain(self.domain).is_build().values_list( '_id', flat=True) self.assertEqual(len(build_ids_in_es), 3) # prune, which should delete the autogenerated build prune_auto_generated_builds(self.domain, app.id) # Build2 should no longer be in ES self.refresh_elasticsearch(kafka_seq, couch_seq) build_ids_in_es = AppES().domain(self.domain).is_build().values_list( '_id', flat=True) self.assertItemsEqual(build_ids_in_es, [build1._id, build3._id])
def handle(self, *args, **options): self.options = options app_ids = self.get_app_ids() logger.info('migrating {} apps'.format(len(app_ids))) results = iter_update(Application.get_db(), self._migrate_app, app_ids, verbose=True) self.results_callback(results) logger.info('done')
def _check_response(self, response): self.assertEqual(response.status_code, 302) redirect_location = response['Location'] [app_id] = re.compile(r'[a-fA-F0-9]{32}').findall(redirect_location) expected = '{}/modules-0/forms-0/'.format(app_id) self.assertTrue(redirect_location.endswith(expected)) self.addCleanup(lambda: Application.get_db().delete_doc(app_id))
def response_cloudcare(self): """ CloudCare enabled apps will have cloudcare_enabled set to false on downgrade. """ key = [self.domain.name] db = Application.get_db() domain_apps = db.view( 'app_manager/applications_brief', reduce=False, startkey=key, endkey=key + [{}], ).all() cloudcare_enabled_apps = [] for app_doc in iter_docs(db, [a['id'] for a in domain_apps]): if app_doc.get('cloudcare_enabled', False): cloudcare_enabled_apps.append((app_doc['_id'], app_doc['name'])) if not cloudcare_enabled_apps: return None num_apps = len(cloudcare_enabled_apps) return self._fmt_alert( ungettext( "You have %(num_apps)d application that will lose CloudCare access if you select this plan.", "You have %(num_apps)d applications that will lose CloudCare access if you select this plan.", num_apps ) % { 'num_apps': num_apps, }, [mark_safe('<a href="%(url)s">%(title)s</a>') % { 'title': a[1], 'url': reverse('view_app', args=[self.domain.name, a[0]]) } for a in cloudcare_enabled_apps], )
def get_exports_by_form(domain): from corehq.apps.app_manager.models import Application rows = Application.get_db().view('exports_forms_by_app/view', startkey=[domain], endkey=[domain, {}], group=True, stale=stale_ok()).all() form_count_breakdown = get_form_count_breakdown_for_domain(domain) for row in rows: key = tuple(row['key']) if key in form_count_breakdown: row['value']['submissions'] = form_count_breakdown.pop(key) for key, value in form_count_breakdown.items(): rows.append({ 'key': list(key), 'value': { 'xmlns': key[2], 'submissions': value } }) rows.sort(key=lambda row: row['key']) return rows
def _check_response(self, response): self.assertEqual(response.status_code, 302) redirect_location = response['Location'] [app_id] = re.compile(r'[a-fA-F0-9]{32}').findall(redirect_location) expected = '{}/modules-0/forms-0/source/'.format(app_id) self.assertTrue(redirect_location.endswith(expected)) self.addCleanup(lambda: Application.get_db().delete_doc(app_id))
def generate_schema_from_builds(domain, case_type): """Builds a schema from Application builds for a given identifier :param domain: The domain that the export belongs to :param unique_form_id: The unique identifier of the item being exported :returns: Returns a ExportDataSchema instance """ app_build_ids = get_all_app_ids(domain) all_case_schema = CaseExportDataSchema() for app_doc in iter_docs(Application.get_db(), app_build_ids): app = Application.wrap(app_doc) case_property_mapping = get_case_properties( app, [case_type], include_parent_properties=False ) case_schema = CaseExportDataSchema._generate_schema_from_case_property_mapping( case_property_mapping, app.version, ) case_history_schema = CaseExportDataSchema._generate_schema_for_case_history( case_property_mapping, app.version, ) all_case_schema = CaseExportDataSchema._merge_schemas( all_case_schema, case_schema, case_history_schema ) return all_case_schema
def paginate_releases(request, domain, app_id): limit = request.GET.get('limit') try: limit = int(limit) except (TypeError, ValueError): limit = 10 start_build_param = request.GET.get('start_build') if start_build_param and json.loads(start_build_param): start_build = json.loads(start_build_param) assert isinstance(start_build, int) else: start_build = {} timezone = get_timezone_for_user(request.couch_user, domain) saved_apps = Application.get_db().view('app_manager/saved_app', startkey=[domain, app_id, start_build], endkey=[domain, app_id], descending=True, limit=limit, wrapper=lambda x: SavedAppBuild.wrap(x['value']).to_saved_build_json(timezone), ).all() for app in saved_apps: app['include_media'] = app['doc_type'] != 'RemoteApp' if toggles.APPLICATION_ERROR_REPORT.enabled(request.couch_user.username): versions = [app['version'] for app in saved_apps] num_errors_dict = _get_error_counts(domain, app_id, versions) for app in saved_apps: app['num_errors'] = num_errors_dict.get(app['version'], 0) return json_response(saved_apps)
def get_all_apps(self, reset=False, batchsize=10): ''' This looks at all apps, including previous versions. Note that this won't look at linked or remote apps ''' db = Application.get_db() keys = [[Application.__name__], [f'{Application.__name__}-Deleted']] view_name = 'all_docs/by_doc_type' raw_iter = resumable_view_iterator(db, self.iteration_key, view_name, view_keys=keys, chunk_size=batchsize, full_row=True) if reset: raw_iter.discard_state() modified_start_key, keys = get_keys_to_search(keys, raw_iter) count = get_remaining_app_count(db, view_name, keys, modified_start_key) app_iter = (wrapped_app for wrapped_app in (self.wrap_app(x) for x in raw_iter) if wrapped_app) return with_progress_bar(app_iter, count)
def get_saved_apps(app): saved_apps = Application.get_db().view( 'app_manager/saved_app', startkey=[app.domain, app._id], endkey=[app.domain, app._id, {}], include_docs=True, ) return [get_correct_app_class(row['doc']).wrap(row['doc']) for row in saved_apps]
def setUpClass(cls): create_domain(cls.domain) couch_user = CommCareUser.create(cls.domain, cls.username, cls.password) cls.userID = couch_user.user_id couch_user.first_name = cls.first_name couch_user.last_name = cls.last_name couch_user.save() cls.sm = SuccessMessage(cls.message, cls.userID, tz=cls.tz) app = Application.new_app(cls.domain, "Test App", application_version=APP_V1) app.add_module(Module.new_module("Test Module", "en")) form = app.new_form(0, "Test Form", "en") form.xmlns = cls.xmlns app.success_message = {"en": cls.message} app.save() # hack: prime the view once so the success message takes even though we use stale queries in submissions Application.get_db().view('exports_forms/by_xmlns', limit=1).one()
def get_current_apps_iter(query, batchsize): db = Application.get_db() paginator = Paginator(query, 100) for page_num in paginator.page_range: page = paginator.page(page_num) app_ids = list(page.object_list) for app_doc in iter_docs(db, app_ids, chunksize=batchsize): yield Application.wrap(app_doc)
def get_form_details_for_xmlns(domain, xmlns): startkey = ["xmlns", domain, xmlns] return [ _row_to_form_details(row) for row in Application.get_db() .view("forms_by_app_info/view", startkey=startkey, endkey=startkey + [{}], reduce=False, stale=stale_ok()) .all() ]
def applications(self): key = [self.request.domain] return Application.get_db().view( 'app_manager/applications_brief', reduce=False, startkey=key, endkey=key+[{}], ).all()
def _raw_data(self, startkey, endkey=None, reduce=False, group=False): if endkey is None: endkey = startkey kwargs = dict(group=group) if group else dict(reduce=reduce) return Application.get_db().view('reports_forms/by_app_info', startkey=startkey, endkey=endkey+[{}], **kwargs ).all()
def _get_domain_attachments_size(self): # hash of app_id, xmlns to size of attachments startkey = [self.domain] db = Application.get_db() view = db.view( "attachments/attachments", startkey=startkey, endkey=startkey + [{}], group_level=3, reduce=True, group=True ) return {(a["key"][1], a["key"][2]): sizeof_fmt(a["value"]) for a in view}
def set_app_id_choices(self): app_ids = get_built_app_ids(self.domain) choices = [] for app_doc in iter_docs(Application.get_db(), app_ids): # This will return both Application and RemoteApp docs, but # they both have a name attribute choices.append((app_doc['_id'], app_doc['name'])) choices.sort(key=lambda x: x[1]) self.fields['app_id'].choices = choices
def setUpClass(cls): super(AppStatusIntegrationTest, cls).setUpClass() delete_all_docs_by_doc_type(Domain.get_db(), ['Domain', 'Domain-Deleted']) delete_all_docs_by_doc_type(CommCareUser.get_db(), ['CommCareUser', 'WebUser']) delete_all_docs_by_doc_type(Application.get_db(), ['Application', 'Application-Deleted']) cls.domain_records = [ Domain(name=cls.domain, hr_name='One', creating_user_id='abc', is_active=True), ] for domain in cls.domain_records: domain.save() cls.user_records = [ # TODO: Handle WebUsers who have multiple domains # WebUser.create( # cls.domain, # 'web-user', # '***', # date_joined=datetime.utcnow(), # first_name='A', # last_name='B', # email='*****@*****.**', # is_active=True, # is_staff=False, # is_superuser=True, # ), CommCareUser.create( cls.domain, 'commcare-user', '***', date_joined=datetime.utcnow(), email='*****@*****.**', is_active=True, is_staff=True, is_superuser=False, ), ] cls.form_records = [ create_form_for_test(cls.domain, user_id=cls.user_records[0]._id), create_form_for_test(cls.domain, user_id=cls.user_records[0]._id), create_form_for_test(cls.domain, user_id=cls.user_records[0]._id), ] cls.sync_records = [] for user in cls.user_records: restore_user = OTARestoreCommCareUser(user.domain, user) device = MockDevice(cls.domain_records[0], restore_user) cls.sync_records.append(device.sync()) cls.batch = create_batch(cls.slug)
def _get_domain_attachments_size(self): # hash of app_id, xmlns to size of attachments startkey = [self.domain] db = Application.get_db() view = db.view('attachments/attachments', startkey=startkey, endkey=startkey + [{}], group_level=3, reduce=True, group=True) return {(a['key'][1], a['key'][2]): sizeof_fmt(a['value']) for a in view}
def get_form_details_for_app_and_xmlns(domain, app_id, xmlns, deleted=False): status = "deleted" if deleted else "active" startkey = ["status xmlns app", domain, status, xmlns, app_id] return [ _row_to_form_details(row) for row in Application.get_db() .view("forms_by_app_info/view", startkey=startkey, endkey=startkey + [{}], reduce=False, stale=stale_ok()) .all() ]
def handle(self, **options): self.options = options app_ids = self.get_app_ids() domain = self.options.get('domain') logger.info('migrating {} apps{}'.format(len(app_ids), f" in {domain}" if domain else "")) results = iter_update(Application.get_db(), self._migrate_app, app_ids, verbose=True, chunksize=self.chunk_size) self.results_callback(results) logger.info('done')
def forwards(self, orm): # if the view doesn't exist manually create it. # typically for initial load or tests. try: Application.get_db().view( 'app_manager/applications', limit=1, ).all() except ResourceNotFound: sync_docs.sync(app_models, verbosity=2) errors = [] def _migrate_app_ids(app_ids): to_save = [] count = len(app_ids) logger.info('migrating {} apps'.format(count)) for i, app_doc in enumerate( iter_docs(Application.get_db(), app_ids)): try: if app_doc["doc_type"] in [ "Application", "Application-Deleted" ]: application = Application.wrap(app_doc) should_save = self.migrate_app(application) if should_save: to_save.append(application) if len(to_save) > 25: self.bulk_save(to_save) logger.info('completed {}/{} apps'.format( i, count)) to_save = [] except Exception: errors.append( "App {id} not properly migrated because {error}". format(id=app_doc['_id'], error=sys.exc_info()[0])) if to_save: self.bulk_save(to_save) logger.info('migrating applications') _migrate_app_ids(self.get_app_ids()) if errors: logger.info('\n'.join(errors))
def generate_schema_from_builds(domain, case_type, force_rebuild=False): """Builds a schema from Application builds for a given identifier :param domain: The domain that the export belongs to :param unique_form_id: The unique identifier of the item being exported :returns: Returns a CaseExportDataSchema instance """ original_id, original_rev = None, None current_case_schema = get_latest_case_export_schema(domain, case_type) if current_case_schema and not force_rebuild: # Save the original id an rev so we can later save the document under the same _id original_id, original_rev = current_case_schema._id, current_case_schema._rev else: current_case_schema = CaseExportDataSchema() app_build_ids = CaseExportDataSchema._get_app_build_ids_to_process( domain, current_case_schema.last_app_versions, ) for app_doc in iter_docs(Application.get_db(), app_build_ids): app = Application.wrap(app_doc) case_property_mapping = get_case_properties( app, [case_type], include_parent_properties=False ) case_schema = CaseExportDataSchema._generate_schema_from_case_property_mapping( case_property_mapping, app.copy_of, app.version, ) case_history_schema = CaseExportDataSchema._generate_schema_for_case_history( case_property_mapping, app.copy_of, app.version, ) current_case_schema = CaseExportDataSchema._merge_schemas( current_case_schema, case_schema, case_history_schema ) current_case_schema.record_update(app.copy_of, app.version) if original_id and original_rev: current_case_schema._id = original_id current_case_schema._rev = original_rev current_case_schema.domain = domain current_case_schema.case_type = case_type current_case_schema.save() return current_case_schema
def paginate_releases(request, domain, app_id): limit = request.GET.get('limit') only_show_released = json.loads( request.GET.get('only_show_released', 'false')) build_comment = request.GET.get('build_comment') page = int(request.GET.get('page', 1)) page = max(page, 1) try: limit = int(limit) except (TypeError, ValueError): limit = 10 timezone = get_timezone_for_user(request.couch_user, domain) app_es = (AppES().start((page - 1) * limit).size(limit).sort( 'version', desc=True).domain(domain).is_build().app_id(app_id)) if only_show_released: app_es = app_es.is_released() if build_comment: app_es = app_es.build_comment(build_comment) results = app_es.exclude_source().run() app_ids = results.doc_ids apps = get_docs(Application.get_db(), app_ids) for app in apps: app.pop('translations') saved_apps = [ SavedAppBuild.wrap( app, scrap_old_conventions=False).to_saved_build_json(timezone) for app in apps ] j2me_enabled_configs = CommCareBuildConfig.j2me_enabled_config_labels() for app in saved_apps: app['include_media'] = app['doc_type'] != 'RemoteApp' app['j2me_enabled'] = app['menu_item_label'] in j2me_enabled_configs app['target_commcare_flavor'] = ( SavedAppBuild.get(app['_id']).target_commcare_flavor if toggles.TARGET_COMMCARE_FLAVOR.enabled(domain) else 'none') if toggles.APPLICATION_ERROR_REPORT.enabled(request.couch_user.username): versions = [app['version'] for app in saved_apps] num_errors_dict = _get_error_counts(domain, app_id, versions) for app in saved_apps: app['num_errors'] = num_errors_dict.get(app['version'], 0) total_apps = results.total num_pages = int(ceil(total_apps / limit)) return json_response({ 'apps': saved_apps, 'pagination': { 'total': total_apps, 'num_pages': num_pages, 'current_page': page, } })
def _check_module_in_app(self, app_id, module_id): if module_id: app_json = Application.get_db().get(app_id) for module in app_json['modules']: if module_id == module['unique_id']: break else: self.add_error( 'module_id', "Module {} not found in app {}".format( module_id, app_json['name']))
def testBuildApp(self): # do it from a NOT-SAVED app; # regression test against case where contents gets lazy-put w/o saving app = Application.wrap(self._yesno_source) self.assertEqual(app['_id'], None) # i.e. hasn't been saved app._id = Application.get_db().server.next_uuid() copy = app.make_build() copy.save() self._check_has_build_files(copy) self._check_legacy_odk_files(copy)
def get_form_details_for_xmlns(domain, xmlns): startkey = ["xmlns", domain, xmlns] return [ _row_to_form_details(row) for row in Application.get_db().view('forms_by_app_info/view', startkey=startkey, endkey=startkey + [{}], reduce=False, stale=stale_ok(), ).all() ]
def _get_app_ids(self, startkey): return { r['id'] for r in Application.get_db().view( 'app_manager/applications', startkey=startkey, endkey=startkey + [{}], reduce=False, ).all() }
def options(self): apps_for_domain = Application.get_db().view( "app_manager/applications_brief", startkey=[self.domain], endkey=[self.domain, {}], include_docs=True ).all() return [(app['value']['_id'], _("%(name)s [up to build %(version)s]") % { 'name': app['value']['name'], 'version': app['value']['version']}) for app in apps_for_domain]
def testBuildApp(self, mock): # do it from a NOT-SAVED app; # regression test against case where contents gets lazy-put w/o saving app = Application.wrap(self._yesno_source) self.assertEqual(app['_id'], None) # i.e. hasn't been saved app._id = Application.get_db().server.next_uuid() copy = app.make_build() copy.save() self._check_has_build_files(copy, self.min_paths) self._check_legacy_odk_files(copy)