def rows(self): rows = [] if self.location_id: supply_points = self.get_supply_points() with_reporters, with_in_charge = self.supply_points_users() for sp in supply_points: url = make_url( StockLevelsReport, self.config['domain'], '?location_id=%s&startdate=%s&enddate=%s', (sp.location_id, json_format_date(self.config['startdate']), json_format_date(self.config['enddate'])) ) if sp.supply_point_id not in self.config['reporting_supply_points']: rows.append(['<div style="background-color: rgba(255, 0, 0, 0.2)">%s has not reported last ' 'month. <a href="%s" target="_blank">[details]</a></div>' % (sp.name, url)]) if sp.location_id not in with_reporters: rows.append(['<div style="background-color: rgba(255, 0, 0, 0.2)">%s has no reporters' ' registered. <a href="%s" target="_blank">[details]</a></div>' % (sp.name, url)]) if sp.location_id not in with_in_charge: rows.append(['<div style="background-color: rgba(255, 0, 0, 0.2)">%s has no in-charge ' 'registered. <a href="%s" target="_blank">[details]</a></div>' % (sp.name, url)]) if not rows: rows.append(['<div style="background-color: rgba(0, 255, 0, 0.2)">No current alerts</div>']) return rows
def test_calculator_get_result(self): key = ['a', 'b'] now = datetime.utcnow().date() start = json_format_date(now - WEEK) end = json_format_date(now) for cls in [MockIndicators, MockIndicatorsWithGetters]: classname = cls.__name__ self.fakedb.add_view('fluff/generic', [ ( {'reduce': True, 'key': [classname, 'a', 'b', 'value_week', 'null', None], 'wrap_doc': True}, [{"key": None, "value": {"sum": 3}}] ), ( {'reduce': True, 'key': [classname, 'a', 'b', 'value_week', 'null_value', None], 'wrap_doc': True}, [{"key": None, "value": {"max": 8}}] ), ( {'startkey': [classname, 'a', 'b', 'value_week', 'date', start], 'endkey': [classname, 'a', 'b', 'value_week', 'date', end], 'reduce': True, 'wrap_doc': True}, [{"key": None, "value": {"count": 7}}] ), ( {'startkey': [classname, 'a', 'b', 'value_week', 'date_value', start], 'endkey': [classname, 'a', 'b', 'value_week', 'date_value', end], 'reduce': True, 'wrap_doc': True}, [{"key": None, "value": {"sum": 11}}] ) ]) value = cls.get_result('value_week', key, reduce=True) self.assertEqual(value['null'], 3) self.assertEqual(value['date'], 7) self.assertEqual(value['date_value'], 11) self.assertEqual(value['null_value'], 8)
def _facility_to_fixture(facility, startdate, enddate): facility_id = facility.get_id facility_element = ElementTree.Element("facility", attrib={"id": facility_id, "name": _(facility.name)}) report_data = {} m4change_data_source = M4ChangeReportDataSource() report_slugs = m4change_data_source.get_report_slugs() reports = dict((report.slug, report) for report in m4change_data_source.get_reports()) for report_slug in report_slugs: report_data[report_slug] = FixtureReportResult.by_composite_key( domain.name, facility_id, json_format_date(startdate), json_format_date(enddate), report_slug ) if report_data[report_slug] is None: name = reports[report_slug].name rows = reports[report_slug].get_initial_row_data() fixture_result = FixtureReportResult( domain=domain.name, location_id=facility_id, start_date=startdate, end_date=enddate, report_slug=report_slug, rows=rows, name=name, ) report_data[report_slug] = fixture_result facility_element = _reports_to_fixture(report_data, facility_element) return facility_element
def generate_fixtures_for_domain(domain, db, data_source): location_ids = [location.get_id for location in Location.by_domain(domain)] dates = get_last_n_months(NUMBER_OF_MONTHS_FOR_FIXTURES) for date in dates: for location_id in location_ids: data_source.configure(config={ "startdate": date[0], "enddate": date[1], "location_id": location_id, "domain": domain }) report_data = data_source.get_data() for report_slug in report_data: rows = dict(report_data[report_slug].get("data", [])) name = report_data[report_slug].get("name") # Remove cached fixture docs db.delete_docs(FixtureReportResult.all_by_composite_key(domain, location_id, json_format_date(date[0]), json_format_date(date[1]), report_slug)) FixtureReportResult.save_result(domain, location_id, date[0].date(), date[1].date(), report_slug, rows, name)
def get_due_list_by_task_name(target_date, owner_id=None, case_es=None, size=0, case_type='task'): case_es = case_es or ReportCaseES(BIHAR_DOMAIN) es_type=None facet_name = 'vaccination_names' # The type of vaccination is stored in the `name` field in ElasticSearch # so we can get the sums directly as facets on `name.exact` where the `.exact` # is to avoid tokenization so that "OPV 1" does not create two facets. base_query = case_es.base_query(start=0, size=size) owner_filter = {"match_all":{}} if owner_id is None else {"term": {"owner_id": owner_id}} filter = { "and": [ owner_filter, {"term": {"closed": False}}, {"term": {"type": case_type}}, {"range": {"date_eligible.#value": {"to": json_format_date(target_date)}}}, {"range": {"date_expires.#value": {"from": json_format_date(target_date)}}}, ] } base_query['filter']['and'] += filter['and'] base_query['facets'] = { facet_name: { "terms": {"field":"task_id.#value", "size": 1000}, "facet_filter": filter # This controls the records processed for the summation } } es_result = case_es.run_query(base_query, es_type=es_type) return ((facet['term'], facet['count']) for facet in es_result['facets'][facet_name]['terms'])
def _row(self, group, rank): def _get_url(): # HACK: hard(ish) code get_url until we fix the render_as bug url = SupervisorReportsADMSection.get_url(domain=self.domain, subreport=self.WORKER_RANK_SLUG) # /a/[domain]/reports/adm/[section]/[subreport]/ # needs to become # /a/[domain]/reports/adm/[render_as]/[section]/[subreport]/ if self.render_next: section_chunk = "/{section}/".format(section=SupervisorReportsADMSection.slug) section_with_rendering = "/{render_as}{section_chunk}".format( render_as=self.render_next, section_chunk=section_chunk ) url = url.replace(section_chunk, section_with_rendering) return url url = _get_url() end = datetime.today().date() start = end - timedelta(days=30) params = { "ufilter": 0, "startdate": json_format_date(start), "enddate": json_format_date(end) } def _awcc_link(g): params["group"] = g.get_id return format_html(u'<a href="{details}">{awcc}</a>', awcc=get_awcc(g), details=url_and_params(url, params)) return [group.name, _awcc_link(group)]
def get_result(cls, calc_name, key, reduce=True): calculator = cls._calculators[calc_name] result = {} for emitter_name in calculator._fluff_emitters: shared_key = [cls._doc_type] + key + [calc_name, emitter_name] emitter_type = getattr(calculator, emitter_name)._fluff_emitter q_args = { 'reduce': reduce, 'include_docs': not reduce, } if emitter_type == 'date': now = datetime.datetime.utcnow().date() start = now - calculator.window end = now q = cls.view( 'fluff/generic', startkey=shared_key + [json_format_date(start)], endkey=shared_key + [json_format_date(end)], **q_args ).all() elif emitter_type == 'null': q = cls.view( 'fluff/generic', key=shared_key + [None], **q_args ).all() if reduce: try: result[emitter_name] = q[0]['value'] except IndexError: result[emitter_name] = 0 else: result[emitter_name] = q return result
def report_config(self): config = dict( domain=self.domain, empty='', yes='yes', no='no', death='death', pregnant_mother_type = 'pregnant', health_center = 'health_center', hospital = 'hospital', home = 'home', on_route = 'on_route', other = 'other', male = 'male', female = 'female', health_center_worker = 'health_center_worker', trained_traditional_birth_attendant = 'trained_traditional_birth_attendant', normal_delivery = 'normal', cesarean_delivery = 'cesarean', unknown_delivery = 'unknown', abortion = 'abortion', weight_birth_25='2.5', newborn_death='newborn_death', infant_death='infant_death', child_death='child_death', date_of_death='date_of_death' ) if 'startdate' in self.request.GET and self.request.GET['startdate']: config['startdate'] = self.request.GET['startdate'] config['strsd'] = self.request.GET['startdate'] if 'enddate' in self.request.GET and self.request.GET['enddate']: config['enddate'] = self.request.GET['enddate'] config['stred'] = self.request.GET['enddate'] today = datetime.date.today() config['today'] = json_format_date(today) for d in [35, 56, 84, 85, 112, 196]: config['today_plus_%d' % d] = json_format_date(today + datetime.timedelta(days=d)) for d in [2, 4, 21, 25, 40, 42, 75, 106, 182, 183, 273, 365, 547, 548, 700, 730]: config['today_minus_%d' % d] = json_format_date(today - datetime.timedelta(days=d)) for d in [1, 3, 5, 6]: config['%d' % d] = '%d' % d config['last_month'] = json_format_date(today - datetime.timedelta(days=30)) for k, v in sorted(LOCATION_HIERARCHY.iteritems(), reverse=True): req_prop = 'location_%s' % v['prop'] if self.request.GET.getlist(req_prop, []): location_list = self.request.GET.getlist(req_prop, []) if location_list and location_list[0] != '0': config.update({k: tuple(location_list)}) return config
def report_config(self): config = dict( domain=self.domain, empty="", yes="yes", no="no", death="death", pregnant_mother_type="pregnant", health_center="health_center", hospital="hospital", home="home", on_route="on_route", other="other", male="male", female="female", health_center_worker="health_center_worker", trained_traditional_birth_attendant="trained_traditional_birth_attendant", normal_delivery="normal", cesarean_delivery="cesarean", unknown_delivery="unknown", abortion="abortion", weight_birth_25="2.5", newborn_death="newborn_death", infant_death="infant_death", child_death="child_death", date_of_death="date_of_death", ) if "startdate" in self.request.GET and self.request.GET["startdate"]: config["startdate"] = self.request.GET["startdate"] config["strsd"] = self.request.GET["startdate"] if "enddate" in self.request.GET and self.request.GET["enddate"]: config["enddate"] = self.request.GET["enddate"] config["stred"] = self.request.GET["enddate"] today = datetime.date.today() config["today"] = json_format_date(today) for d in [35, 56, 84, 85, 112, 196]: config["today_plus_%d" % d] = json_format_date(today + datetime.timedelta(days=d)) for d in [2, 4, 21, 25, 40, 42, 75, 106, 182, 183, 273, 365, 547, 548, 700, 730]: config["today_minus_%d" % d] = json_format_date(today - datetime.timedelta(days=d)) for d in [1, 3, 5, 6]: config["%d" % d] = "%d" % d config["last_month"] = json_format_date(today - datetime.timedelta(days=30)) for k, v in sorted(LOCATION_HIERARCHY.iteritems(), reverse=True): req_prop = "location_%s" % v["prop"] if self.request.GET.getlist(req_prop, []): location_list = self.request.GET.getlist(req_prop, []) if location_list and location_list[0] != "0": config.update({k: tuple(location_list)}) return config
def report_config(self): config = dict( domain=self.domain, startdate=self.datespan.startdate.replace(hour=0, minute=0, second=0), enddate=self.datespan.enddate.replace(hour=23, minute=59, second=59), visit="''", strsd=json_format_date(self.datespan.startdate), stred=json_format_date(self.datespan.enddate) ) self.config_update(config) return config
def report_config(self): config = dict( domain=self.domain, startdate=self.datespan.startdate, enddate=self.datespan.enddate, visit="''", strsd=json_format_date(self.datespan.startdate), stred=json_format_date(self.datespan.enddate) ) self.config_update(config) return config
def _month_to_fixture(date_tuple, locations): monthly_report_element = ElementTree.Element('monthly-report', attrib={ 'startdate': json_format_date(date_tuple[0]), 'enddate': json_format_date(date_tuple[1]), 'month_year_label': date_tuple[0].strftime('%b %Y') }) for location in locations: facility_element = _facility_to_fixture(location, date_tuple[0], date_tuple[1]) if facility_element: monthly_report_element.append(facility_element) else: return None return monthly_report_element
def handle(self, *args, **options): if len(args) not in [2, 3]: raise CommandError('Usage is copy_domain %s' % self.args) sourcedb = Database(args[0]) domain = args[1].strip() simulate = options['simulate'] exclude_attachments = options['exclude_attachments'] self.run_multi_process = options['run_multi_process'] since = json_format_date(iso_string_to_date(options['since'])) if options['since'] else None if options['list_types']: self.list_types(sourcedb, domain, since) sys.exit(0) if simulate: print "\nSimulated run, no data will be copied.\n" if options['postgres_db'] and options['postgres_password']: settings.DATABASES[options['postgres_db']]['PASSWORD'] = options['postgres_password'] self.targetdb = Database(args[2]) if len(args) == 3 else get_db() try: domain_doc = Domain.get_by_name(domain) except ResourceNotFound: domain_doc = None if domain_doc is None: self.copy_domain(sourcedb, domain) if options['doc_types']: doc_types = options['doc_types'].split(',') for type in doc_types: startkey = [x for x in [domain, type, since] if x is not None] endkey = [x for x in [domain, type, {}] if x is not None] self.copy_docs(sourcedb, domain, simulate, startkey, endkey, doc_type=type, since=since, postgres_db=options['postgres_db'], exclude_attachments=exclude_attachments) elif options['id_file']: path = options['id_file'] if not os.path.isfile(path): print "Path '%s' does not exist or is not a file" % path sys.exit(1) with open(path) as input: doc_ids = [line.rstrip('\n') for line in input] if not doc_ids: print "Path '%s' does not contain any document ID's" % path sys.exit(1) self.copy_docs(sourcedb, domain, simulate, doc_ids=doc_ids, postgres_db=options['postgres_db'], exclude_attachments=exclude_attachments) else: startkey = [domain] endkey = [domain, {}] exclude_types = DEFAULT_EXCLUDE_TYPES + options['doc_types_exclude'].split(',') self.copy_docs(sourcedb, domain, simulate, startkey, endkey, exclude_types=exclude_types, postgres_db=options['postgres_db'], exclude_attachments=exclude_attachments)
def submit_case_update_form(casedoc, update_dict, couch_user, submit_date=None, xmlns=XMLNS_PATIENT_UPDATE, sync_token=None): """ Main entry point for submitting an update for a pact patient Args: casedoc: the patient case update_dict: the kv of the fields being changed couch_user: user committing the change submit_date: now if None """ if submit_date is None: submit_date = datetime.utcnow() form = etree.Element("data", nsmap={None: xmlns, 'jrm': "http://dev.commcarehq.org/jr/xforms"}) meta_block = generate_meta_block(couch_user, timestart=submit_date, timeend=submit_date) form.append(meta_block) update_block = prepare_case_update_xml_block(casedoc, couch_user, update_dict, submit_date) form.append(update_block) # todo: this date is based off midnight UTC not local time... encounter_date = etree.XML('<encounter_date>%s</encounter_date>' % json_format_date(datetime.utcnow())) form.append(encounter_date) submission_xml_string = etree.tostring(form) if sync_token: extra_meta = {LAST_SYNCTOKEN_HEADER: sync_token} else: extra_meta = None return submit_xform('/a/pact/receiver', PACT_DOMAIN, submission_xml_string, extra_meta=extra_meta)
def get(self, request, *args, **kwargs): # just copying what was in the old django view here. don't want to mess too much with exports just yet. try: export_tag = [self.domain, json.loads(request.GET.get("export_tag", "null") or "null")] except ValueError: return HttpResponseBadRequest() schema = build_latest_schema(export_tag) if not schema and self.export_helper.export_type == "form": schema = create_basic_form_checkpoint(export_tag) if schema: app_id = request.GET.get('app_id') self.export_helper.custom_export = self.export_helper.ExportSchemaClass.default( schema=schema, name="%s: %s" % ( xmlns_to_name(self.domain, export_tag[1], app_id=app_id) if self.export_helper.export_type == "form" else export_tag[1], json_format_date(datetime.utcnow()) ), type=self.export_helper.export_type ) if self.export_helper.export_type in ['form', 'case']: self.export_helper.custom_export.app_id = app_id if self.export_helper.export_type == 'form': self.export_helper.custom_export.update_question_schema() return super(BaseCreateCustomExportView, self).get(request, *args, **kwargs) messages.warning(request, _("<strong>No data found for that form " "(%s).</strong> Submit some data before creating an export!") % xmlns_to_name(self.domain, export_tag[1], app_id=None), extra_tags="html") return HttpResponseRedirect(ExcelExportReport.get_url(domain=self.domain))
def _convert_constant_to_expression_spec(value): # this is a hack to reconvert these to json-strings in case they were already # converted to dates (e.g. because this was a sub-part of a filter or expression) if isinstance(value, datetime.datetime): value = json_format_datetime(value) elif isinstance(value, datetime.date): value = json_format_date(value) return {"type": "constant", "constant": value}
def date_to_xml_string(date): if not date: return '' if isinstance(date, six.string_types): date = parse_datetime(date) return json_format_date(date)
def date_to_xml_string(date): if not date: return '' if isinstance(date, basestring): date = parse_datetime(date) return json_format_date(date)
def get_single_balance_block(case_id, product_id, quantity, date_string=None, section_id='stock'): date_string = date_string or json_format_date(datetime.utcnow()) return """ <balance xmlns="http://commcarehq.org/ledger/v1" entity-id="{case_id}" date="{date}" section-id="{section_id}"> <entry id="{product_id}" quantity="{quantity}" /> </balance>""".format( case_id=case_id, product_id=product_id, quantity=quantity, date=date_string, section_id=section_id ).strip()
def _month_to_fixture(date_tuple, locations): monthly_report_element = ElementTree.Element( "monthly-report", attrib={ "startdate": json_format_date(date_tuple[0]), "enddate": json_format_date(date_tuple[1]), "month_year_label": date_tuple[0].strftime("%b %Y"), }, ) for location in locations: facility_element = _facility_to_fixture(location, date_tuple[0], date_tuple[1]) if facility_element: monthly_report_element.append(facility_element) else: return None return monthly_report_element
def _receipt_report(domain, case_id, product_id, amount, days_ago): from corehq.apps.commtrack.tests.util import get_single_transfer_block from dimagi.utils.parsing import json_format_date from corehq.apps.hqcase.utils import submit_case_blocks date_string = json_format_date(ago(days_ago)) stock_block = get_single_transfer_block( src_id=None, dest_id=case_id, product_id=product_id, quantity=amount, date_string=date_string ) submit_case_blocks(stock_block, domain=domain)
def get_single_transfer_block(src_id, dest_id, product_id, quantity, date_string=None, section_id='stock'): date_string = date_string or json_format_date(datetime.utcnow()) return """ <transfer xmlns="http://commcarehq.org/ledger/v1" src="{src_id}" dest="{dest_id}" date="{date}" section-id="{section_id}"> <entry id="{product_id}" quantity="{quantity}" /> </transfer >""".format( src_id=src_id, dest_id=dest_id, product_id=product_id, quantity=quantity, date=date_string, section_id=section_id, ).strip()
def get_result(self, key, reduce=True): result = {} for emitter_name in self._fluff_emitters: shared_key = [self.fluff._doc_type] + key + [self.slug, emitter_name] emitter = getattr(self, emitter_name) emitter_type = emitter._fluff_emitter q_args = { 'reduce': reduce, } if emitter_type == 'date': now = self.fluff.get_now() start = now - self.window end = now if start > end: q_args['descending'] = True q = self.fluff.view( 'fluff/generic', startkey=shared_key + [json_format_date(start)], endkey=shared_key + [json_format_date(end)], **q_args ).all() elif emitter_type == 'null': q = self.fluff.view( 'fluff/generic', key=shared_key + [None], **q_args ).all() else: raise exceptions.EmitterTypeError( 'emitter type %s not recognized' % emitter_type ) if reduce: try: result[emitter_name] = q[0]['value'][emitter._reduce_type] except IndexError: result[emitter_name] = 0 else: def strip(id_string): prefix = '%s-' % self.fluff.__name__ assert id_string.startswith(prefix) return id_string[len(prefix):] result[emitter_name] = [strip(row['id']) for row in q] return result
def date_to_xml_string(date): if not date: return '' if isinstance(date, bytes): date = date.decode('utf-8') if isinstance(date, six.text_type): date = parse_datetime(date) return json_format_date(date)
def get_payload(self, repeat_record, form): from casexml.apps.case.xform import cases_referenced_by_xform logger.debug('DHIS2: Form domain "%s" XMLNS "%s"', form['domain'], form['xmlns']) if form['xmlns'] not in (REGISTER_CHILD_XMLNS, GROWTH_MONITORING_XMLNS, RISK_ASSESSMENT_XMLNS): # This is not a form we care about raise IgnoreDocument settings = Dhis2Settings.for_domain(form['domain']) dhis2_api = Dhis2Api(settings.dhis2['host'], settings.dhis2['username'], settings.dhis2['password'], settings.dhis2['top_org_unit_name']) cases = cases_referenced_by_xform(form) case = next(c for c in cases if c.type == CASE_TYPE) event = None if form['xmlns'] == REGISTER_CHILD_XMLNS: # Create a DHIS2 tracked entity instance from the form's case and # enroll in the nutrition assessment programme. logger.debug('DHIS2: Processing Register Child form') push_case(case, dhis2_api) # We just need to enroll. No event to create raise IgnoreDocument elif form['xmlns'] == GROWTH_MONITORING_XMLNS: logger.debug('DHIS2: Processing Growth Monitoring form') if not getattr(case, 'external_id', None): logger.info('Register Child form must be processed before Growth Monitoring form') return # Try again later self._update_instance(dhis2_api, case) # Create a paediatric nutrition assessment event. program_id = dhis2_api.get_program_id('Paediatric Nutrition Assessment') program_stage_id = dhis2_api.get_program_stage_id('Nutrition Assessment') event = dhis2_api.form_to_event(program_id, form, NUTRITION_ASSESSMENT_EVENT_FIELDS, program_stage_id, case['external_id']) elif form['xmlns'] == RISK_ASSESSMENT_XMLNS: logger.debug('DHIS2: Processing Risk Assessment form') if not getattr(case, 'external_id', None): logger.info('Register Child form must be processed before Risk Assessment form') return # Try again later self._update_instance(dhis2_api, case) # Check whether the case needs to be enrolled in the Risk Assessment Program program_id = dhis2_api.get_program_id('Underlying Risk Assessment') if not dhis2_api.enrolled_in(case['external_id'], 'Underlying Risk Assessment'): today = json_format_date(date.today()) program_data = {dhis2_attr: case[cchq_attr] for cchq_attr, dhis2_attr in RISK_ASSESSMENT_PROGRAM_FIELDS.iteritems()} dhis2_api.enroll_in_id(case['external_id'], program_id, today, program_data) # Create a risk assessment event. program_stage_id = dhis2_api.get_program_stage_id('Underlying Risk Assessment') event = dhis2_api.form_to_event(program_id, form, RISK_ASSESSMENT_EVENT_FIELDS, program_stage_id, case['external_id']) return json.dumps(event, default=json_serializer)
def _facility_to_fixture(facility, startdate, enddate): facility_id = facility.get_id facility_element = ElementTree.Element('facility', attrib={ 'id': facility_id, 'name': _(facility.name) }) report_data = {} for report_slug in self.report_slugs: report_data[report_slug] = FixtureReportResult.by_composite_key( self.domain.name, facility_id, json_format_date(startdate), json_format_date(enddate), report_slug) if report_data[report_slug] is None: name = self.reports[report_slug].name rows = self.reports[report_slug].get_initial_row_data() fixture_result = FixtureReportResult(domain=self.domain.name, location_id=facility_id, start_date=startdate, end_date=enddate, report_slug=report_slug, rows=rows, name=name) report_data[report_slug] = fixture_result facility_element = (_reports_to_fixture(report_data, facility_element)) return facility_element
def _fmt_result(item_config, value): params = copy(self.request_params) params['category'] = item_config['slug'] params['date'] = json_format_date(self.get_date()) return format_html(u'<a href="{next}">{val}</a>', val=value, next=url_and_params( VaccinationClientList.get_url(self.domain, render_as=self.render_next), params ))
def generate_fixtures_for_locations(): client = get_redis_client() start_date, end_date = get_last_n_months(1)[0] db = FixtureReportResult.get_db() data_source = M4ChangeReportDataSource() for domain in M4CHANGE_DOMAINS: redis_key = REDIS_FIXTURE_KEYS[domain] redis_lock_key = REDIS_FIXTURE_LOCK_KEYS[domain] lock = client.lock(redis_lock_key, timeout=5) location_ids = [] if lock.acquire(blocking=True): try: location_ids_str = client.get(redis_key) location_ids = json.loads(location_ids_str if location_ids_str else "[]") client.set(redis_key, '[]') finally: release_lock(lock, True) for location_id in location_ids: data_source.configure(config={ "startdate": start_date, "enddate": end_date, "location_id": location_id, "domain": domain }) report_data = data_source.get_data() for report_slug in report_data: # Remove cached fixture docs db.delete_docs( FixtureReportResult.all_by_composite_key( domain, location_id, json_format_date(start_date), json_format_date(end_date), report_slug) ) rows = dict(report_data[report_slug].get("data", [])) name = report_data[report_slug].get("name") FixtureReportResult.save_result(domain, location_id, start_date.date(), end_date.date(), report_slug, rows, name)
def chw_calendar_submit_report(request, username, interval=7): """Calendar view of submissions by CHW, overlaid with their scheduled visits, and whether they made them or not.""" return_context = {} return_context['username'] = username total_interval = interval if 'interval' in request.GET: try: total_interval = int(request.GET['interval']) except ValueError: pass #secret date ranges if 'enddate' in request.GET: end_date_str = request.GET.get('enddate', json_format_date(datetime.utcnow())) end_date = iso_string_to_date(end_date_str) else: end_date = datetime.utcnow().date() if 'startdate' in request.GET: #if there's a startdate, trump interval start_date_str = request.GET.get('startdate', json_format_date(datetime.utcnow())) start_date = iso_string_to_date(start_date_str) total_interval = (end_date - start_date).days ret, patients, total_scheduled, total_visited = get_schedule_tally( username, total_interval, override_date=end_date) if len(ret) > 0: return_context['date_arr'] = ret return_context['total_scheduled'] = total_scheduled return_context['total_visited'] = total_visited return_context['start_date'] = ret[0][0] return_context['end_date'] = ret[-1][0] else: return_context['total_scheduled'] = 0 return_context['total_visited'] = 0 return return_context
def chw_calendar_submit_report(request, username, interval=7): """Calendar view of submissions by CHW, overlaid with their scheduled visits, and whether they made them or not.""" return_context = {} return_context['username'] = username total_interval = interval if 'interval' in request.GET: try: total_interval = int(request.GET['interval']) except ValueError: pass #secret date ranges if 'enddate' in request.GET: end_date_str = request.GET.get('enddate', json_format_date(datetime.utcnow())) end_date = iso_string_to_date(end_date_str) else: end_date = datetime.utcnow().date() if 'startdate' in request.GET: #if there's a startdate, trump interval start_date_str = request.GET.get('startdate', json_format_date(datetime.utcnow())) start_date = iso_string_to_date(start_date_str) total_interval = (end_date - start_date).days ret, patients, total_scheduled, total_visited = get_schedule_tally(username, total_interval, override_date=end_date) if len(ret) > 0: return_context['date_arr'] = ret return_context['total_scheduled'] = total_scheduled return_context['total_visited'] = total_visited return_context['start_date'] = ret[0][0] return_context['end_date'] = ret[-1][0] else: return_context['total_scheduled'] = 0 return_context['total_visited'] = 0 return return_context
def test_disp_des_products_report(self): disp_des = DispDesProducts(config=dict( domain=TEST_DOMAIN, startdate=datetime(2016, 2, 1), enddate=datetime(2016, 2, 29), visit="''", strsd=json_format_date(datetime(2016, 2, 1)), stred=json_format_date(datetime(2016, 2, 29)), empty_prd_code='__none__', region_id=self.region.get_id )) rows = disp_des.rows self.assertEqual(len(rows), 3) self.assertListEqual( rows, [ ['Commandes', {'sort_key': 25L, 'html': 25L}, {'sort_key': 26L, 'html': 26L}], ['Raux', {'sort_key': 25L, 'html': 25L}, {'sort_key': 23L, 'html': 23L}], ['Taux', '100%', '88%'] ] )
def test_disp_des_products_report(self): disp_des = DispDesProducts(config=dict( domain=TEST_DOMAIN, startdate=datetime(2016, 2, 1), enddate=datetime(2016, 2, 29), visit="''", strsd=json_format_date(datetime(2016, 2, 1)), stred=json_format_date(datetime(2016, 2, 29)), empty_prd_code='__none__', region_id=self.region.get_id )) rows = disp_des.rows self.assertEqual(len(rows), 3) self.assertListEqual( rows, [ ['Commandes', {'sort_key': 25, 'html': 25}, {'sort_key': 26, 'html': 26}], ['Raux', {'sort_key': 25, 'html': 25}, {'sort_key': 23, 'html': 23}], ['Taux', '100%', '88%'] ] )
def _facility_to_fixture(facility, startdate, enddate): facility_id = facility.location_id facility_element = ElementTree.Element('facility', attrib={ 'id': facility_id, 'name': _(facility.name) }) report_data = {} m4change_data_source = M4ChangeReportDataSource() report_slugs = m4change_data_source.get_report_slugs() reports = dict((report.slug, report) for report in m4change_data_source.get_reports()) for report_slug in report_slugs: report_data[report_slug] = FixtureReportResult.by_composite_key( restore_user.domain, facility_id, json_format_date(startdate), json_format_date(enddate), report_slug) if report_data[report_slug] is None: name = reports[report_slug].name rows = reports[report_slug].get_initial_row_data() fixture_result = FixtureReportResult(domain=restore_user.domain, location_id=facility_id, start_date=startdate, end_date=enddate, report_slug=report_slug, rows=rows, name=name) report_data[report_slug] = fixture_result facility_element = (_reports_to_fixture(report_data, facility_element)) return facility_element
def csv_data_block(self, username, user_context): """ generator of rows of scheduled visits for a given chw """ def finish_row_blanks(r): if len(r) < 11: for x in range(11 - len(r)): r.append('---') return r # {% for visit_date, patient_visits in date_arr %} # {% if patient_visits %} # {% for cpatient, visit in patient_visits %} # {% if visit %} #this is ugly, but we're repeating the work of the template for rendering the row data for visit_date, patient_visits in user_context['date_arr']: rowdata = [] if len(patient_visits) > 0: for patient_case, visit in patient_visits: rowdata = [json_format_date(visit_date), username, patient_case['pactid']] if visit is not None: ####is scheduled if visit.get('scheduled', '---') == 'yes': rowdata.append('scheduled') else: rowdata.append('unscheduled') ####visit kept visit_kept = visit.get('visit_kept', '---') if visit_kept == 'notice': rowdata.append("no - notice given") elif visit_kept == 'yes': rowdata.append("yes") else: rowdata.append(visit_kept) #visit type rowdata.append(visit.get('visit_type', '---')) #contact type rowdata.append(visit.get('contact_type', '---')) rowdata.append(visit.get('username', '---')) rowdata.append(visit.get('observed_art', '---')) rowdata.append(visit.get('has_pillbox_check', '---')) rowdata.append(visit.get('doc_id', '---')) else: rowdata.append('novisit') yield finish_row_blanks(rowdata)
def rows(self): rows = [] if self.location_id: supply_points = self.get_supply_points() with_reporters, with_in_charge = self.supply_points_users() last_month_reporting_sp_ids = self.last_month_reporting_sp_ids for sp in supply_points: url = make_url(ReportingRatesReport, self.config['domain'], '?location_id=%s&startdate=%s&enddate=%s', (sp.location_id, json_format_date(self.config['startdate']), json_format_date(self.config['enddate']))) if sp.supply_point_id not in last_month_reporting_sp_ids: rows.append([ '<div style="background-color: rgba(255, 0, 0, 0.2)">%s has not reported last ' 'month. <a href="%s" target="_blank">[details]</a></div>' % (sp.name, url) ]) if sp.location_id not in with_reporters: rows.append([ '<div style="background-color: rgba(255, 0, 0, 0.2)">%s has no reporters' ' registered. <a href="%s" target="_blank">[details]</a></div>' % (sp.name, url) ]) if sp.location_id not in with_in_charge: rows.append([ '<div style="background-color: rgba(255, 0, 0, 0.2)">%s has no in-charge ' 'registered. <a href="%s" target="_blank">[details]</a></div>' % (sp.name, url) ]) if not rows: rows.append([ '<div style="background-color: rgba(0, 255, 0, 0.2)">No current alerts</div>' ]) return rows
def rows(self): """ Override this method to create a functional tabular report. Returns 2D list of rows. [['row1'],[row2']] """ case_id = self.request.GET.get('dot_patient', '') start_date_str = self.request.GET.get('startdate', json_format_date(datetime.utcnow() - timedelta(days=7))) end_date_str = self.request.GET.get('enddate', json_format_date(datetime.utcnow())) if case_id == '': mode = 'all' else: mode = '' start_datetime = datetime.combine(iso_string_to_date(start_date_str), time()) end_datetime = datetime.combine(iso_string_to_date(end_date_str), time()) for num, obs in enumerate(self.tabular_data(mode, case_id, start_datetime, end_datetime)): dict_obj = obs.to_json() row = [dict_obj[x.prop_name].encode('utf-8') if isinstance(dict_obj[x.prop_name], six.text_type) else dict_obj[x.prop_name] for x in self.headers] yield row
def get_result(self, key, date_range=None, reduce=True): # This block is pretty much a stripped copy-paste from fluff # except I needed to make sure the results were unique by case assert isinstance(date_range, tuple) start, end = date_range shared_key = [self.fluff._doc_type] + key + [self.slug, 'total'] q = self.fluff.view( 'fluff/generic', startkey=shared_key + [json_format_date(start)], endkey=shared_key + [json_format_date(end)], reduce=False, ).all() def strip(id_string): prefix = '%s-' % self.fluff.__name__ assert id_string.startswith(prefix) return id_string[len(prefix):] cases = {} for form in q: form_id = strip(form['id']) case_id = XFormInstance.get(form_id).form['case']['@case_id'] cases[case_id] = max(cases.get(case_id, 0), form['value']) return {'total': sum(cases.values())}
def test_deactivate_all_schedules(self): self.assertSchedulesActive(self.domain_1_sms_schedules) self.assertSchedulesActive(self.domain_1_survey_schedules) self.assertSchedulesActive(self.domain_2_sms_schedules) self.assertSchedulesActive(self.domain_2_survey_schedules) with patch('corehq.apps.accounting.subscription_changes.refresh_timed_schedule_instances.delay') as p1,\ patch('corehq.apps.accounting.subscription_changes.refresh_alert_schedule_instances.delay') as p2,\ patch('corehq.messaging.tasks.initiate_messaging_rule_run') as p3: _deactivate_schedules(self.domain_obj_1) self.assertEqual(p1.call_count, 2) p1.assert_has_calls( [ call( broadcast.schedule_id.hex, broadcast.recipients, start_date_iso_string=json_format_date(broadcast.start_date) ) for broadcast in (self.domain_1_sms_schedules[0], self.domain_1_survey_schedules[0]) ], any_order=True ) self.assertEqual(p2.call_count, 2) p2.assert_has_calls( [ call(broadcast.schedule_id.hex, broadcast.recipients) for broadcast in (self.domain_1_sms_schedules[1], self.domain_1_survey_schedules[1]) ], any_order=True ) self.assertEqual(p3.call_count, 2) p3.assert_has_calls( [ call(rule) for rule in (self.domain_1_sms_schedules[2], self.domain_1_survey_schedules[2]) ], any_order=True ) self.assertSchedulesInactive(self.domain_1_sms_schedules) self.assertSchedulesInactive(self.domain_1_survey_schedules) self.assertSchedulesActive(self.domain_2_sms_schedules) self.assertSchedulesActive(self.domain_2_survey_schedules)
def submit_case_update_form(casedoc, update_dict, couch_user, submit_date=None, xmlns=XMLNS_PATIENT_UPDATE, sync_token=None): """ Main entry point for submitting an update for a pact patient Args: casedoc: the patient case update_dict: the kv of the fields being changed couch_user: user committing the change submit_date: now if None """ if submit_date is None: submit_date = datetime.utcnow() form = etree.Element("data", nsmap={ None: xmlns, 'jrm': "http://dev.commcarehq.org/jr/xforms" }) meta_block = generate_meta_block(couch_user, timestart=submit_date, timeend=submit_date) form.append(meta_block) update_block = prepare_case_update_xml_block(casedoc, couch_user, update_dict, submit_date) form.append(update_block) # todo: this date is based off midnight UTC not local time... encounter_date = etree.XML('<encounter_date>%s</encounter_date>' % json_format_date(datetime.utcnow())) form.append(encounter_date) submission_xml_string = etree.tostring(form) if sync_token: extra_meta = {LAST_SYNCTOKEN_HEADER: sync_token} else: extra_meta = None return submit_xform('/a/pact/receiver', PACT_DOMAIN, submission_xml_string, extra_meta=extra_meta)
def case_filter(self): now = datetime.datetime.utcnow() fromdate = now - timedelta(days=42) _filters = BaseHNBCReport.base_filters(self) _filters.append(filters.term('pp_case_filter.#value', '1')) _filters.append(filters.range(gte=json_format_date(fromdate))) status = self.request_params.get('PNC_status', '') if status: if status == 'On Time': for i in range(1, 8): _filters.append(filters.term('case_pp_%s_done.#value' % i, 'yes')) else: or_stmt = [] for i in range(1, 8): or_stmt.append(filters.not_term('case_pp_%s_done.#value' % i, 'yes')) if or_stmt: _filters.append(filters.OR(*or_stmt)) return filters.AND(*_filters)
def get(self, request, *args, **kwargs): # just copying what was in the old django view here. don't want to mess too much with exports just yet. try: export_tag = [self.domain, json.loads(request.GET.get("export_tag", "null") or "null")] except ValueError: return HttpResponseBadRequest() if self.export_helper.export_type == "form" and not export_tag[1]: return HttpResponseRedirect(ExcelExportReport.get_url(domain=self.domain)) schema = build_latest_schema(export_tag) if not schema and self.export_helper.export_type == "form": schema = create_basic_form_checkpoint(export_tag) if request.GET.get('minimal', False): messages.warning(request, _("Warning you are using minimal mode, some things may not be functional")) if schema: app_id = request.GET.get('app_id') self.export_helper.custom_export = self.export_helper.ExportSchemaClass.default( schema=schema, name="%s: %s" % ( xmlns_to_name(self.domain, export_tag[1], app_id=app_id) if self.export_helper.export_type == "form" else export_tag[1], json_format_date(datetime.utcnow()) ), type=self.export_helper.export_type ) if self.export_helper.export_type in ['form', 'case']: self.export_helper.custom_export.app_id = app_id if self.export_helper.export_type == 'form': self.export_helper.custom_export.update_question_schema() return super(BaseCreateCustomExportView, self).get(request, *args, **kwargs) messages.warning(request, _("<strong>No data found for that form " "(%s).</strong> Submit some data before creating an export!") % xmlns_to_name(self.domain, export_tag[1], app_id=None), extra_tags="html") return HttpResponseRedirect(ExcelExportReport.get_url(domain=self.domain))
def case_filter(self): now = datetime.datetime.utcnow() fromdate = now - timedelta(days=42) filters = BaseHNBCReport.base_filters(self) filters.append({'term': {'pp_case_filter.#value': "1"}}) filters.append({'range': {'date_birth.#value': {"gte": json_format_date(fromdate)}}}) status = self.request_params.get('PNC_status', '') or_stmt = [] if status: if status == 'On Time': for i in range(1, 8): filters.append({'term': {'case_pp_%s_done.#value' % i: 'yes'}}) else: for i in range(1, 8): or_stmt.append({"not": {'term': {'case_pp_%s_done.#value' % i: 'yes'}}}) or_stmt = {'or': or_stmt} filters.append(or_stmt) return {'and': filters} if filters else {}
def get_scheduled_broadcast_activate_ajax_response(self, active_flag, broadcast_id): broadcast = self.get_scheduled_broadcast(broadcast_id) if not self.can_use_inbound_sms and broadcast.schedule.memoized_uses_sms_survey: return HttpResponseBadRequest( "Cannot create or edit survey reminders because subscription " "does not have access to inbound SMS") TimedSchedule.objects.filter(schedule_id=broadcast.schedule_id).update( active=active_flag) refresh_timed_schedule_instances.delay( broadcast.schedule_id.hex, broadcast.recipients, start_date_iso_string=json_format_date(broadcast.start_date)) return JsonResponse({ 'success': True, 'broadcast': self._fmt_scheduled_broadcast(broadcast), })
def make_rows(self, es_results, with_checkbox): submissions = [res['_source'] for res in self.es_results.get('hits', {}).get('hits', [])] for form in submissions: data = calculate_form_data(self, form) row = [ json_format_date(iso_string_to_datetime(form["form"]["meta"]["timeEnd"])), self._get_case_name_html(data.get('case'), with_checkbox), self._get_service_type_html(form, data.get('service_type'), with_checkbox), data.get('location_name'), get_property(data.get('case'), "card_number", EMPTY_FIELD), data.get('location_parent_name'), get_property(data.get('case'), "phone_number", EMPTY_FIELD), data.get('amount_due'), get_property(data.get('case'), "visits", EMPTY_FIELD) ] if with_checkbox: checkbox = mark_safe('<input type="checkbox" class="selected-element" ' 'data-formid="%(form_id)s" ' 'data-caseid="%(case_id)s" data-servicetype="%(service_type)s"/>') row.append(checkbox % dict(form_id=data.get('form_id'), case_id=data.get('case_id'), service_type=data.get('service_type'))) else: row.append(self.display_status) yield row
def rows(self): group_id = None if self.request.couch_user.is_commcare_user(): group_ids = self.request.couch_user.get_group_ids() if len(group_ids) > 0: group_id = group_ids[0] data = {} for case in get_cases_in_domain(self.domain, type='participant'): if case.closed: continue # If a site coordinator is viewing the report, only show participants from that site (group) if group_id is None or group_id == case.owner_id: timezone = pytz.timezone(case.get_case_property("time_zone")) data[case._id] = { "name": case.name, "time_zone": timezone, "dates": [None] * 14, } dates = self.get_past_two_weeks() date_strings = [json_format_date(date) for date in dates] start_date = dates[0] - timedelta(days=1) end_date = dates[-1] + timedelta(days=2) expected_callback_events = ExpectedCallback.by_domain( self.domain, start_date=datetime.combine(start_date, time(0, 0)), end_date=datetime.combine(end_date, time(0, 0))).order_by('date') for event in expected_callback_events: if event.couch_recipient in data: timezone = data[event.couch_recipient]["time_zone"] event_date = (ServerTime( event.date).user_time(timezone).ui_string("%Y-%m-%d")) if event_date in date_strings: data[event.couch_recipient]["dates"][date_strings.index( event_date)] = event.status result = [] for case_id, data_dict in data.items(): row = [ self._fmt(data_dict["name"]), None, None, None, ] total_no_response = 0 total_indicated = 0 total_pending = 0 for date_status in data_dict["dates"]: if date_status == CALLBACK_PENDING: total_indicated += 1 total_pending += 1 row.append(self._fmt(_("pending"))) elif date_status == CALLBACK_RECEIVED: total_indicated += 1 row.append(self._fmt(_("OK"))) elif date_status == CALLBACK_MISSED: total_indicated += 1 total_no_response += 1 row.append(self._fmt_highlight(_("No Response"))) else: row.append(self._fmt(_("not indicated"))) if total_no_response > 0: row[1] = self._fmt_highlight(total_no_response) else: row[1] = self._fmt(total_no_response) row[2] = self._fmt(total_indicated) row[3] = self._fmt(total_pending) result.append(row) return result
program_stage_id, case['external_id']) elif form['xmlns'] == RISK_ASSESSMENT_XMLNS: logger.debug('DHIS2: Processing Risk Assessment form') if not getattr(case, 'external_id', None): logger.info( 'Register Child form must be processed before Risk Assessment form' ) return # Try again later self._update_instance(dhis2_api, case) # Check whether the case needs to be enrolled in the Risk Assessment Program program_id = dhis2_api.get_program_id('Underlying Risk Assessment') if not dhis2_api.enrolled_in(case['external_id'], 'Underlying Risk Assessment'): today = json_format_date(date.today()) program_data = { dhis2_attr: case[cchq_attr] for cchq_attr, dhis2_attr in RISK_ASSESSMENT_PROGRAM_FIELDS.iteritems() } dhis2_api.enroll_in_id(case['external_id'], program_id, today, program_data) # Create a risk assessment event. program_stage_id = dhis2_api.get_program_stage_id( 'Underlying Risk Assessment') event = dhis2_api.form_to_event(program_id, form, RISK_ASSESSMENT_EVENT_FIELDS, program_stage_id, case['external_id'])
def slug(self): return 'recap_passage_%s' % json_format_date(self.config['startdate'])
def start_param(self): """ :return: the start of the window as a string (to be used in SQL) """ return json_format_date(self.start)
def end_param(self): """ :return: the end of the window as a string (to be used in SQL) """ return json_format_date(self.end)
def __str__(self): return "Obs %s [%s] %d/%d" % (json_format_date(self.observed_date), "ART" if self.is_art else "NonART", self.dose_number+1, self.total_doses)
def callcenter_test(request): user_id = request.GET.get("user_id") date_param = request.GET.get("date") enable_caching = request.GET.get('cache') doc_id = request.GET.get('doc_id') if not user_id and not doc_id: return render(request, "hqadmin/callcenter_test.html", {"enable_caching": enable_caching}) error = None user = None user_case = None domain = None if user_id: try: user = CommCareUser.get(user_id) domain = user.project except ResourceNotFound: error = "User Not Found" elif doc_id: try: doc = CommCareUser.get_db().get(doc_id) domain = Domain.get_by_name(doc['domain']) doc_type = doc.get('doc_type', None) if doc_type == 'CommCareUser': case_type = domain.call_center_config.case_type user_case = CaseAccessors(doc['domain']).get_case_by_domain_hq_user_id(doc['_id'], case_type) elif doc_type == 'CommCareCase': if doc.get('hq_user_id'): user_case = CommCareCase.wrap(doc) else: error = 'Case ID does does not refer to a Call Center Case' except ResourceNotFound: error = "User Not Found" try: query_date = dateutil.parser.parse(date_param) except ValueError: error = "Unable to parse date, using today" query_date = date.today() def view_data(case_id, indicators): new_dict = OrderedDict() key_list = sorted(indicators) for key in key_list: new_dict[key] = indicators[key] return { 'indicators': new_dict, 'case': CommCareCase.get(case_id), } if user or user_case: custom_cache = None if enable_caching else cache.caches['dummy'] override_case = CallCenterCase.from_case(user_case) cci = CallCenterIndicators( domain.name, domain.default_timezone, domain.call_center_config.case_type, user, custom_cache=custom_cache, override_date=query_date, override_cases=[override_case] if override_case else None ) data = {case_id: view_data(case_id, values) for case_id, values in cci.get_data().items()} else: data = {} context = { "error": error, "mobile_user": user, "date": json_format_date(query_date), "enable_caching": enable_caching, "data": data, "doc_id": doc_id } return render(request, "hqadmin/callcenter_test.html", context)
def report_config(self): config = dict(domain=self.domain, empty='', yes='yes', no='no', death='death', pregnant_mother_type='pregnant', health_center='health_center', hospital='hospital', home='home', on_route='on_route', other='other', male='male', female='female', health_center_worker='health_center_worker', trained_traditional_birth_attendant= 'trained_traditional_birth_attendant', normal_delivery='normal', cesarean_delivery='cesarean', unknown_delivery='unknown', abortion='abortion', weight_birth_25='2.5', newborn_death='newborn_death', infant_death='infant_death', child_death='child_death', date_of_death='date_of_death') if 'startdate' in self.request.GET and self.request.GET['startdate']: config['startdate'] = self.request.GET['startdate'] config['strsd'] = self.request.GET['startdate'] if 'enddate' in self.request.GET and self.request.GET['enddate']: config['enddate'] = self.request.GET['enddate'] config['stred'] = self.request.GET['enddate'] today = datetime.date.today() config['today'] = json_format_date(today) for d in [35, 56, 84, 85, 112, 196]: config['today_plus_%d' % d] = json_format_date(today + datetime.timedelta(days=d)) for d in [ 2, 4, 21, 25, 40, 42, 75, 106, 182, 183, 273, 365, 547, 548, 700, 730 ]: config['today_minus_%d' % d] = json_format_date(today - datetime.timedelta(days=d)) for d in [1, 3, 5, 6]: config['%d' % d] = '%d' % d config['last_month'] = json_format_date(today - datetime.timedelta(days=30)) for k, v in sorted(six.iteritems(LOCATION_HIERARCHY), reverse=True): req_prop = 'location_%s' % v['prop'] if self.request.GET.getlist(req_prop, []): location_list = self.request.GET.getlist(req_prop, []) if location_list and location_list[0] != '0': config.update({k: tuple(location_list)}) return config
def clean_dob(self): if self.cleaned_data['dob'] is not None: return json_format_date(self.cleaned_data['dob']) else: return None
def get_result(self, key, date_range=None, reduce=True, verbose_results=False): """ If your Calculator does not have a window set, you must pass a tuple of date or datetime objects to date_range """ if verbose_results: assert not reduce, "can't have reduce set for verbose results" if date_range is not None: start, end = date_range elif self.window: now = self.fluff.get_now() start = now - self.window end = now result = {} for emitter_name in self._fluff_emitters: shared_key = [self.fluff._doc_type] + key + [self.slug, emitter_name] emitter = getattr(self, emitter_name) emitter_type = emitter._fluff_emitter q_args = { 'reduce': reduce, } if emitter_type == 'date': assert isinstance(date_range, tuple) or self.window, ( "You must either set a window on your Calculator " "or pass in a date range") if start > end: q_args['descending'] = True q = self.fluff.view( 'fluff/generic', startkey=shared_key + [json_format_date(start)], endkey=shared_key + [json_format_date(end)], **q_args ).all() elif emitter_type == 'null': q = self.fluff.view( 'fluff/generic', key=shared_key + [None], **q_args ).all() else: raise EmitterTypeError( 'emitter type %s not recognized' % emitter_type ) if reduce: try: result[emitter_name] = q[0]['value'][emitter._reduce_type] except IndexError: result[emitter_name] = 0 else: # clean ids def strip(id_string): prefix = '%s-' % self.fluff.__name__ assert id_string.startswith(prefix) return id_string[len(prefix):] for row in q: row['id'] = strip(row['id']) if not verbose_results: # strip down to ids result[emitter_name] = [row['id'] for row in q] else: result[emitter_name] = q return result
def daterange(self, start_date, end_date): for n in range(int((end_date - start_date).days) + 1): yield json_format_date(start_date + timedelta(n))
def title(self): return 'Recap Passage %s' % json_format_date(self.config['startdate'])
def handle(self, *args, **options): if len(args) not in [2, 3]: raise CommandError('Usage is copy_domain %s' % self.args) self.exclude_dbs = ( # these have data we don't want to copy 'receiverwrapper', 'couchlog', 'auditcare', 'fluff-bihar', 'fluff-opm', 'fluff-mc', 'fluff-cvsu', 'mvp-indicators', 'm4change', # todo: missing domain/docs, but probably want to add back 'meta', ) self.source_couch = source_couch = CouchConfig(args[0]) domain = args[1].strip() simulate = options['simulate'] exclude_attachments = options['exclude_attachments'] self.run_multi_process = options['run_multi_process'] since = json_format_date(iso_string_to_date( options['since'])) if options['since'] else None if options['list_types']: for sourcedb_name, sourcedb in self.iter_source_dbs(): self.list_types(sourcedb, domain, since) sys.exit(0) if simulate: print "\nSimulated run, no data will be copied.\n" if options['postgres_db'] and options['postgres_password']: settings.DATABASES[options['postgres_db']]['PASSWORD'] = options[ 'postgres_password'] self.targetdb = CouchConfig( args[2]) if len(args) == 3 else CouchConfig() try: domain_doc = Domain.get_by_name(domain) except ResourceNotFound: domain_doc = None if domain_doc is None: self.copy_domain(source_couch, domain) if options['doc_types']: doc_types = options['doc_types'].split(',') for doc_type in doc_types: sourcedb = source_couch.get_db_for_doc_type(doc_type) startkey = [ x for x in [domain, doc_type, since] if x is not None ] endkey = [x for x in [domain, doc_type, {}] if x is not None] self.copy_docs(sourcedb, domain, simulate, startkey, endkey, doc_type=doc_type, since=since, postgres_db=options['postgres_db'], exclude_attachments=exclude_attachments) elif options['id_file']: path = options['id_file'] if not os.path.isfile(path): print "Path '%s' does not exist or is not a file" % path sys.exit(1) with open(path) as input: doc_ids = [line.rstrip('\n') for line in input] if not doc_ids: print "Path '%s' does not contain any document ID's" % path sys.exit(1) for sourcedb_name, sourcedb in self.iter_source_dbs(): self.copy_docs(sourcedb, domain, simulate, doc_ids=doc_ids, postgres_db=options['postgres_db'], exclude_attachments=exclude_attachments) else: startkey = [domain] endkey = [domain, {}] exclude_types = DEFAULT_EXCLUDE_TYPES + options[ 'doc_types_exclude'].split(',') for sourcedb_name, sourcedb in self.iter_source_dbs(): self.copy_docs(sourcedb, domain, simulate, startkey, endkey, exclude_types=exclude_types, postgres_db=options['postgres_db'], exclude_attachments=exclude_attachments)
def get_due_list_by_task_name(target_date, owner_id=None, case_es=None, size=0, case_type='task'): case_es = case_es or ReportCaseES(BIHAR_DOMAIN) es_type = None facet_name = 'vaccination_names' # The type of vaccination is stored in the `name` field in ElasticSearch # so we can get the sums directly as facets on `name.exact` where the `.exact` # is to avoid tokenization so that "OPV 1" does not create two facets. base_query = case_es.base_query(start=0, size=size) owner_filter = { "match_all": {} } if owner_id is None else { "term": { "owner_id": owner_id } } filter = { "and": [ owner_filter, { "term": { "closed": False } }, { "term": { "type": case_type } }, { "range": { "date_eligible.#value": { "to": json_format_date(target_date) } } }, { "range": { "date_expires.#value": { "from": json_format_date(target_date) } } }, ] } base_query['filter']['and'] += filter['and'] base_query['facets'] = { facet_name: { "terms": { "field": "task_id.#value", "size": 1000 }, "facet_filter": filter # This controls the records processed for the summation } } es_result = case_es.run_query(base_query, es_type=es_type) return ((facet['term'], facet['count']) for facet in es_result['facets'][facet_name]['terms'])
def safe_format_date(date): return json_format_date(date) if date else date
def clean_mass_health_expiration(self): if self.cleaned_data['mass_health_expiration'] is not None: return json_format_date( self.cleaned_data['mass_health_expiration']) else: return None