def handle(self, *args, **options): start_date = None if len(args) < 1 else string_to_datetime(args[0]) end_date = None if len(args) < 2 else string_to_datetime(args[1]) cleanup = options["cleanup"] return runner.update_warehouse(start_date, end_date, cleanup)
def couch_doc_counts(request, domain): from casexml.apps.case.models import CommCareCase from couchforms.models import XFormInstance start = string_to_datetime(request.GET.get('start')) if request.GET.get('start') else None end = string_to_datetime(request.GET.get('end')) if request.GET.get('end') else None return json_response({ cls.__name__: get_doc_count_in_domain_by_class(domain, cls, start, end) for cls in [CommCareCase, XFormInstance] })
def write_log_events(writer, user, domain=None, override_user=None, start_date=None, end_date=None): start_date = string_to_datetime(start_date).replace( tzinfo=None) if start_date else None end_date = string_to_datetime(end_date).replace( tzinfo=None) if end_date else None for event in navigation_events_by_user(user, start_date, end_date): if not domain or domain == event.domain: write_log_event(writer, event, override_user)
def retrieve_patient_group(user_ids, domain, year, month): """ Given a set of user IDs, retrieve all patients matching that set. Update them with followup and referral form info. """ caseid_set = set() patients = {} year = int(year) month = int(month) form_now = datetime(year, month, 1) + relativedelta(months=1) - relativedelta(seconds=1) pyear = 0 pmonth = 0 for result in user_ids: rform = result['doc']['form'] #shortcut try: # strptime() can fail if the CHW enters weird data. regdate = string_to_datetime( rform['patient']['date_of_registration']) if regdate > form_now: continue except: pass p = dict() age_tmp = string_to_datetime(rform['patient']['date_of_birth']) p['age'] = int((form_now - age_tmp).days / 365.25) p.update(rform['case']) p.update(rform['patient']) caseid_set.add(p['case_id']) p['ward'] = p['village'] p['registered_this_month'] = True if ( regdate.year == form_now.year and regdate.month == form_now.month) else False p['followup_this_month'] = 0 p['referrals_made'] = 0 for i in [ 'provider', 'registration_and_followup_hiv', 'hiv_status_during_registration', 'hiv_status_after_test', 'referrals_hiv', 'ctc', 'medication_given', 'services', 'referrals' ]: if not i in p: p[i] = '' patients[p['case_id']] = p update_patients_with_followups(domain, patients, caseid_set, year, month) update_patients_with_referrals(patients, caseid_set, year, month) ## I chose not to remove patients without followups this month -- sometimes there's a referral in a month with no followup. # map(lambda x: patients.__delitem__(x), filter(lambda y: not patients[y]['followup_this_month'], patients.keys())) gp = PathfinderPatientGroup() gp += patients.values() return gp
def retrieve_patient_group(user_ids, domain, year, month): """ Given a set of user IDs, retrieve all patients matching that set. Update them with followup and referral form info. """ caseid_set = set() patients = {} year = int(year) month = int(month) form_now = datetime(year, month, 1)+relativedelta(months=1)-relativedelta(seconds=1) pyear = 0 pmonth = 0 for result in user_ids: rform = result['doc']['form'] #shortcut try: # strptime() can fail if the CHW enters weird data. regdate = string_to_datetime(rform['patient']['date_of_registration']) if regdate > form_now: continue except: pass p = dict() age_tmp = string_to_datetime(rform['patient']['date_of_birth']) p['age'] = int((form_now - age_tmp).days / 365.25) p.update(rform['case']) p.update(rform['patient']) caseid_set.add(p['case_id'] ) p['ward'] = p['village'] p['registered_this_month'] = True if (regdate.year == form_now.year and regdate.month == form_now.month) else False p['followup_this_month'] = 0 p['referrals_made'] = 0 for i in ['provider', 'registration_and_followup_hiv', 'hiv_status_during_registration', 'hiv_status_after_test', 'referrals_hiv', 'ctc', 'medication_given', 'services', 'referrals']: if not i in p: p[i] = '' patients[p['case_id']] = p update_patients_with_followups(domain, patients, caseid_set,year,month) update_patients_with_referrals(patients,caseid_set,year,month) ## I chose not to remove patients without followups this month -- sometimes there's a referral in a month with no followup. # map(lambda x: patients.__delitem__(x), filter(lambda y: not patients[y]['followup_this_month'], patients.keys())) gp = PathfinderPatientGroup() gp += patients.values() return gp
def validate_date(self, date): try: date_test = json_format_datetime(string_to_datetime(date)) except Exception: date_test = None if date_test is None or date != date_test: raise CommandError("Invalid datetime given for argument '%s'. Argument must be json-formatted." % date)
def from_doc(cls, case_block): """ Create a case object from a case block. """ if not const.CASE_ACTION_CREATE in case_block: raise ValueError("No create tag found in case block!") # create case from required fields in the case/create block create_block = case_block[const.CASE_ACTION_CREATE] id = case_block[const.CASE_TAG_ID] opened_on = parsing.string_to_datetime(case_block[const.CASE_TAG_MODIFIED]) # create block type = create_block[const.CASE_TAG_TYPE_ID] name = create_block[const.CASE_TAG_NAME] external_id = create_block[const.CASE_TAG_EXTERNAL_ID] user_id = create_block[const.CASE_TAG_USER_ID] if const.CASE_TAG_USER_ID in create_block else "" create_action = CommCareCaseAction.from_action_block(const.CASE_ACTION_CREATE, opened_on, opened_on.date(), create_block) case = CommCareCase(case_id=id, opened_on=opened_on, modified_on=opened_on, type=type, name=name, user_id=user_id, external_id=external_id, closed=False, actions=[create_action,]) # apply initial updates, referrals and such, if present case.update_from_block(case_block) return case
def handle(self, *args, **options): if len(args) == 1: domain = args[0] since = None elif len(args) == 2: domain = args[0] since = string_to_datetime(args[1]) else: raise CommandError('Usage: %s\n%s' % (self.args, self.help)) if should_use_sql_backend(domain): raise CommandError('This command only works for couch-based domains.') succeeded = [] failed = [] error_messages = defaultdict(lambda: 0) for form in iter_problem_forms(domain, since): print "%s\t%s\t%s\t%s\t%s" % (form._id, form.received_on, form.xmlns, form.get_data('form/meta/username'), form.problem.strip()) if not options["dryrun"]: try: reprocess_form_cases(form) except Exception, e: failed.append(form._id) error_messages[str(e)] += 1 else: succeeded.append(form._id)
def fetch_key_records(request, domain): last_issued = request.GET.get('last_issued') if last_issued: last_issued = string_to_datetime(last_issued).replace(tzinfo=None) user_id = request.couch_user.user_id payload = FetchKeyRecords(domain, user_id, last_issued).get_payload() return HttpResponse(payload)
def deid_date(val, doc, key_path='form/case/@case_id|form/case/case_id|_id'): key = JSONPath(key_path).search(doc) if not key: return None offset = DeidGenerator(key, 'date').random_number(-31, 32) orig_date = string_to_datetime(val) return (orig_date + timedelta(days=offset)).date()
def _cacheable_domain_activity_report(request): landmarks = json.loads(request.GET.get('landmarks') or "[7, 30, 90]") landmarks.sort() now = datetime.utcnow() dates = [] for landmark in landmarks: dates.append(now - timedelta(days=landmark)) domains = [{'name': domain.name, 'display_name': domain.display_name()} for domain in Domain.get_all()] for domain in domains: domain['users'] = dict([(user.user_id, {'raw_username': user.raw_username}) for user in CommCareUser.by_domain(domain['name'])]) if not domain['users']: continue key = make_form_couch_key(domain['name']) forms = [r['value'] for r in get_db().view('reports_forms/all_forms', reduce=False, startkey=key+[json_format_datetime(dates[-1])], endkey=key+[json_format_datetime(now)], ).all()] domain['user_sets'] = [dict() for landmark in landmarks] for form in forms: user_id = form.get('user_id') try: time = string_to_datetime(form['submission_time']).replace(tzinfo = None) except ValueError: continue if user_id in domain['users']: for i, date in enumerate(dates): if time > date: domain['user_sets'][i][user_id] = domain['users'][user_id] return HttpResponse(json.dumps({'domains': domains, 'landmarks': landmarks}))
def force_to_datetime(val): """Forces a date, string, or datetime to a datetime.""" if not val: return val elif isinstance(val, datetime): return val elif isinstance(val, date): return datetime.combine(val, time()) elif isinstance(val, basestring): return string_to_datetime(val) else: raise ValueError("object must be date or datetime!")
def from_block(cls, date, block): """ Create referrals from a block of processed data (a dictionary) """ if not const.REFERRAL_ACTION_OPEN in block: raise ValueError("No open tag found in referral block!") id = block[const.REFERRAL_TAG_ID] follow_date = parsing.string_to_datetime(block[const.REFERRAL_TAG_FOLLOWUP_DATE]) open_block = block[const.REFERRAL_ACTION_OPEN] types = open_block[const.REFERRAL_TAG_TYPES].split(" ") ref_list = [] for type in types: ref = Referral(referral_id=id, followup_on=follow_date, type=type, opened_on=date, modified_on=date, closed=False) ref_list.append(ref) # there could be a single update block that closes a referral # that we just opened. not sure why this would happen, but # we'll support it. if const.REFERRAL_ACTION_UPDATE in block: update_block = block[const.REFERRAL_ACTION_UPDATE] for ref in ref_list: if ref.type == update_block[const.REFERRAL_TAG_TYPE]: ref.apply_updates(date, block) return ref_list
def from_block(cls, date, block): """ Create referrals from a block of processed data (a dictionary) """ if not const.REFERRAL_ACTION_OPEN in block: raise ValueError("No open tag found in referral block!") id = block[const.REFERRAL_TAG_ID] follow_date = parsing.string_to_datetime( block[const.REFERRAL_TAG_FOLLOWUP_DATE]) open_block = block[const.REFERRAL_ACTION_OPEN] types = open_block[const.REFERRAL_TAG_TYPES].split(" ") ref_list = [] for type in types: ref = Referral(referral_id=id, followup_on=follow_date, type=type, opened_on=date, modified_on=date, closed=False) ref_list.append(ref) # there could be a single update block that closes a referral # that we just opened. not sure why this would happen, but # we'll support it. if const.REFERRAL_ACTION_UPDATE in block: update_block = block[const.REFERRAL_ACTION_UPDATE] for ref in ref_list: if ref.type == update_block[const.REFERRAL_TAG_TYPE]: ref.apply_updates(date, block) return ref_list
def handle(self, *args, **options): domain = since = None if len(args) == 1: domain = args[0] elif len(args) == 2: domain = args[0] since = string_to_datetime(args[1]) else: raise CommandError('Usage: %s\n%s' % (self.args, self.help)) succeeded = [] failed = [] error_messages = defaultdict(lambda: 0) for form in iter_problem_forms(domain, since): print "%s\t%s\t%s\t%s\t%s" % ( form._id, form.received_on, form.xmlns, form.xpath('form/meta/username'), form.problem.strip()) if not options["dryrun"]: try: reprocess_form_cases(form) except Exception, e: failed.append(form._id) error_messages[str(e)] += 1 else: succeeded.append(form._id)
def get_monthly_case_list(chw, startdate, enddate): """ Like get_monthly_case_breakdown but return lists of the actual CommCareCase objects. Also the dates are different. A case is included in the date range based on the first date the case is synced to the phone, regardless of due date. The return value is: { date: [(case_id, casedoc), (case_id, casedoc), ...], date: [(case_id, casedoc), (case_id, casedoc), ...], ... casedocs can be null, which would typically indicate that the case or patient has since been deleted. """ data = get_db().view("phone/cases_sent_to_chws", group=True, group_level=2, reduce=True, startkey=[chw.get_id], endkey=[chw.get_id, {}]) monthly_breakdown = defaultdict(lambda: []) for row in data: case_id = row["key"][1] first_synced = string_to_datetime(row["value"]) first_synced = datetime(first_synced.year, first_synced.month, first_synced.day) if startdate <= first_synced and first_synced < enddate: try: monthly_breakdown[datetime(first_synced.year, first_synced.month, 1)]\ .append((case_id, CommCareCase.get_by_id(case_id))) except MultipleResultsFound: logging.error("Multiple results found for case id %s in chw pi report. this is really weird." % case_id) return monthly_breakdown
def get_visit_date(cls, form): # get a date from the form ordered_props = ["encounter_date", "date", "meta/TimeEnd"] for prop in ordered_props: if form.xpath(prop): return string_to_datetime(form.xpath(prop)).date() return datetime.utcnow().date()
def days_until(self): try: variable = string_to_datetime(self.variable) except Exception: return "(?)" else: # add 12 hours and then floor == round to the nearest day return (variable - datetime.utcnow() + timedelta(hours=12)).days
def row(site): data = outlet_metadata(site, self.ancestry) stockout_days = [] inactive_site = True for p in products: startkey = [str(self.domain), site._id, p['_id']] endkey = startkey + [{}] latest_state = get_db().view('commtrack/stock_product_state', startkey=endkey, endkey=startkey, descending=True).first() if latest_state: doc = latest_state['value'] so_date = doc['updated_unknown_properties']['stocked_out_since'] if so_date: so_days = (date.today() - dateparse.string_to_datetime(so_date).date()).days + 1 else: so_days = 0 inactive_site = False else: so_days = None if so_days is not None: stockout_days.append(so_days) data.append(so_days) else: data.append(u'\u2014') combined_stockout_days = min(stockout_days) if stockout_days else None data.append(combined_stockout_days if combined_stockout_days is not None else u'\u2014') if self.HIDE_NODATA_LOCS and inactive_site: return None return data
def force_to_date(val): """Forces a date, string, or datetime to a date.""" if not val: return val if isinstance(val, datetime): return val.date() if isinstance(val, date): return val if isinstance(val, basestring): return string_to_datetime(val).date() else: raise ValueError("object must be date or datetime!")
def clean(self, value): events = [] for e in value: try: day = int(e["day"]) except Exception: raise ValidationError("Day must be specified and must be a number.") pattern = re.compile("\d{1,2}:\d\d") if pattern.match(e["time"]): try: time = string_to_datetime(e["time"]).time() except Exception: raise ValidationError("Please enter a valid time from 00:00 to 23:59.") else: raise ValidationError("Time must be in the format HH:MM.") message = {} if self.widget.method == "sms" or self.widget.method == "callback": for key in e["messages"]: language = e["messages"][key]["language"] text = e["messages"][key]["text"] if len(language.strip()) == 0: raise ValidationError("Please enter a language code.") if len(text.strip()) == 0: raise ValidationError("Please enter a message.") if language in message: raise ValidationError("You have entered the same language twice for the same reminder event."); message[language] = text if len(e["timeouts"].strip()) == 0 or self.widget.method != "callback": timeouts_int = [] else: timeouts_str = e["timeouts"].split(",") timeouts_int = [] for t in timeouts_str: try: timeouts_int.append(int(t)) except Exception: raise ValidationError("Callback timeout intervals must be a list of comma-separated numbers.") form_unique_id = None if self.widget.method == "survey": form_unique_id = e.get("survey", None) if form_unique_id is None: raise ValidationError("Please create a form for the survey first, and then create the reminder definition.") events.append(CaseReminderEvent( day_num = day ,fire_time = time ,message = message ,callback_timeout_intervals = timeouts_int ,form_unique_id = form_unique_id )) if len(events) == 0: raise ValidationError("You must have at least one reminder event.") return events
def __init__(self, meta_block): if const.TAG_META_CLINIC_ID in meta_block: self.clinic_id = str(meta_block[const.TAG_META_CLINIC_ID]) if const.TAG_META_TIMESTART in meta_block: self.time_start = string_to_datetime(meta_block[const.TAG_META_TIMESTART]) elif "time_start" in meta_block: self.time_start = string_to_datetime(meta_block["time_start"]) if const.TAG_META_TIMEEND in meta_block: self.time_end = string_to_datetime(meta_block[const.TAG_META_TIMEEND]) elif "time_end" in meta_block: self.time_end = string_to_datetime(meta_block["time_end"]) if const.TAG_META_USERNAME in meta_block: self.username = meta_block[const.TAG_META_USERNAME] if const.TAG_META_USER_ID in meta_block: self.user_id = meta_block[const.TAG_META_USER_ID] if const.TAG_META_UID in meta_block: self.uid = meta_block[const.TAG_META_UID]
def get_first_submission_date(user_id): date_row = get_db().view("reports/user_submission_dates", group=True, group_level=2, startkey=[user_id], endkey=[user_id, {}], limit=1).one() if date_row: return string_to_datetime(date_row["key"][1])
def ajax_POST_send(self, get, post): timestamp = string_to_datetime(post.get("timestamp")) \ if "timestamp" in post else None msg = self.backend.receive( post.get("identity", None), post.get("text", ""), timestamp) return True
def extract_birthdate(doc, field=None): if field: birthdate = doc.get(field) else: birthdate = doc.get('birthdate') if not birthdate: birthdate = doc.get('dob') return string_to_datetime(birthdate).date() if birthdate else None
def case_plot_js(chw_id): # there has to be a better way to do this data = get_db().view("phone/cases_sent_to_chws", group=True, group_level=2, reduce=True, startkey=[chw_id], endkey=[chw_id, {}]) daily_case_data, total_case_data = get_cumulative_counts([string_to_datetime(row["value"]).date() for row in data]) return render_to_string("reports/partials/case_plot.js", {"daily_case_data": daily_case_data, "total_case_data": total_case_data})
def from_ts(dt): # damn this is ugly if isinstance(dt, datetime): return dt.replace(tzinfo=None) if len(dt) > 20 and dt.endswith('Z'): # deal with invalid timestamps (where are these coming from?) dt = dt[:-1] return dateparse.string_to_datetime(dt).replace(tzinfo=None)
def extract_date(x): try: def clip_timezone(datestring): return datestring[:len('yyyy-mm-ddThh:mm:ss')] return string_to_datetime(clip_timezone(x['key'][2])) except Exception: logging.error("Tried to get a date from this, but it didn't work: %r" % x) return None
def get_sparkline_extras(data): """ Gets a sparkline plot json extras """ # flot expects [[timestamp1, value1], [timestamp2, value2], ...] ret = defaultdict(lambda: defaultdict(lambda: 0)) for date, data_dict in data.items(): ret[int(date_to_flot_time(string_to_datetime(date)))] = data_dict return json.dumps(ret)
def load_messages(message_file): print "loading messages from %s" % message_file print "started" with open(message_file, 'r') as f: reader = unicode_csv_reader(f, delimiter=',', quotechar='"') inbound_count = outbound_count = 0 inbound_max = outbound_max = 9999999999 for row in reader: pk1, pk2, pk3, dir, timestamp, text, phone = row parsed_date = string_to_datetime(timestamp) if dir == "I": #print "%s: %s (%s)" % (phone, text, timestamp) inbound_count = inbound_count + 1 try: utils.send_test_message(identity=phone, text=text, timestamp=timestamp) except RouterError, e: print e.code print e.content_type print e.response raise if inbound_count % 100 == 0: print "processed %s inbound and %s outbound messages." % ( inbound_count, outbound_count) elif dir == "O": status_type, status_value = guess_status_from_outbound_message( text) if status_type and status_value: outbound_count = outbound_count + 1 # this is super janky, but we'll live with it # hack it so that outbound reminders generate the # appropriate supply point statuses in the db notset = False try: connection = Connection.objects.get( identity=phone, backend__name="migration") if connection.contact and connection.contact.supply_point: SupplyPointStatus.objects.create( status_type=status_type, status_value=status_value, status_date=parsed_date, supply_point=connection.contact. supply_point) else: notset = True except Connection.DoesNotExist: notset = True if notset: print "No connection, contact, or supply point found for %s, so no status saved" % phone if inbound_count >= inbound_max: break if outbound_count >= outbound_max: break
def get_last_submission_date(user_id): # have to swap the start and end keys when you specify descending=true date_row = get_db().view("reports/user_submission_dates", group=True, group_level=2, endkey=[user_id], startkey=[user_id, {}], limit=1, descending=True).one() if date_row: return string_to_datetime(date_row["key"][1])
def admin_fetch_key_records(request, domain): last_issued = request.GET.get('last_issued') if last_issued: last_issued = string_to_datetime(last_issued).replace(tzinfo=None) username = request.GET.get('as', '') key_user = CommCareUser.get_by_username(username) if not key_user: return HttpResponseNotFound('User %s not found.' % username) payload = FetchKeyRecords(domain, key_user._id, last_issued).get_payload() return HttpResponse(payload)
def get_sparkline_extras(data): """ Gets a sparkline plot json extras """ raise NotImplementedError("This method is no longer supported") # flot expects [[timestamp1, value1], [timestamp2, value2], ...] ret = defaultdict(lambda: defaultdict(lambda: 0)) for date, data_dict in data.items(): ret[int(date_to_flot_time(string_to_datetime(date)))] = data_dict return json.dumps(ret)
def apply_updates(self, date, referral_block): if not const.REFERRAL_ACTION_UPDATE in referral_block: logging.warn("No update action found in referral block, nothing to be applied") return update_block = referral_block[const.REFERRAL_ACTION_UPDATE] if not self.type == update_block[const.REFERRAL_TAG_TYPE]: logging.warn("Tried to update from a block with a mismatched type!") return if date > self.modified_on: self.modified_on = date if const.REFERRAL_TAG_FOLLOWUP_DATE in referral_block: self.followup_on = parsing.string_to_datetime(referral_block[const.REFERRAL_TAG_FOLLOWUP_DATE]) if const.REFERRAL_TAG_DATE_CLOSED in update_block: self.closed = True self.closed_on = parsing.string_to_datetime(update_block[const.REFERRAL_TAG_DATE_CLOSED])
def get_edd(encounter): """ Get an edd from the form. First checks the lmp field, then the edd field, then the gestational age. If none are filled in returns nothing. Otherwise calculates the edd from what it finds. """ formdoc = encounter.get_xform() if (formdoc.xpath("first_visit/lmp")): # edd = lmp + 40 weeks = 280 days return edd_from_lmp(string_to_datetime(formdoc.xpath("first_visit/lmp")).date()) elif (formdoc.xpath("first_visit/edd")): return string_to_datetime(formdoc.xpath("first_visit/edd")).date() elif (formdoc.xpath("gestational_age")): return edd_from_gestational_age(encounter.visit_date, int(formdoc.xpath("gestational_age"))) elif (formdoc.xpath("vitals/gest_age")): return edd_from_gestational_age(encounter.visit_date, int(formdoc.xpath("vitals/gest_age"))) else: # fall back return None
def summary_row(site, reports): all_transactions = list(itertools.chain(*(get_transactions(r) for r in reports))) tx_by_product = map_reduce(lambda tx: [(tx['product'],)], data=all_transactions, include_docs=True) data = outlet_metadata(site, self.ancestry) stockouts = {} inactive_site = True for p in products: tx_by_action = map_reduce(lambda tx: [(tx['action'], int(tx['value']))], data=tx_by_product.get(p['_id'], [])) product_states = product_state_buckets.get((site._id, p['_id']), []) stock_update_states = filter(lambda st: 'current_stock' in st['updated_unknown_properties'], product_states) latest_state = stock_update_states[-1] if stock_update_states else None if latest_state: stock = latest_state['updated_unknown_properties']['current_stock'] as_of = dateparse.string_to_datetime(latest_state['server_date']).strftime('%Y-%m-%d') inactive_site = False stockout_dates = set() for state in product_states: stocked_out_since = state['updated_unknown_properties'].get('stocked_out_since') if stocked_out_since: so_start = max(dateparse.string_to_datetime(stocked_out_since).date(), self.datespan.startdate.date()) so_end = dateparse.string_to_datetime(state['server_date']).date() # TODO deal with time zone issues dt = so_start while dt < so_end: stockout_dates.add(dt) dt += timedelta(days=1) stockouts[p['_id']] = stockout_dates data.append('%s (%s)' % (stock, as_of) if latest_state else u'\u2014') data.append(sum(tx_by_action.get('sales', []))) data.append(sum(tx_by_action.get('receipts', []))) data.append(sum(tx_by_action.get('consumption', []))) combined_stockout_days = len(reduce(lambda a, b: a.intersection(b), stockouts.values())) data.append(combined_stockout_days) if self.HIDE_NODATA_LOCS and inactive_site: return None return data
def get_messages(self, language_code=None): now_date = self.now.date() for forms in self.forms.values(): vhnd_date = forms[0]['form'].get('vhsnd_date_past_month') if vhnd_date is None: continue if (now_date - string_to_datetime(vhnd_date).date()).days < 37: # AWW has VHND form submission in last 37 days -> no message return [] return [self.render_template({}, language_code=language_code)]
def force_to_date(val): """Forces a date, string, or datetime to a date.""" if not val: return val elif isinstance(val, datetime.datetime): return val.date() elif isinstance(val, datetime.date): return val elif isinstance(val, str): return string_to_datetime(val).date() else: raise ValueError("object must be date or datetime!")
def fetch_key_records(request, domain): last_issued = request.GET.get('last_issued') if last_issued: last_issued = string_to_datetime(last_issued).replace(tzinfo=None) user_id = request.couch_user.user_id payload = FetchKeyRecords(domain, user_id, last_issued).get_payload() device_id = request.GET.get('device_id') if device_id and isinstance(request.couch_user, CommCareUser): if not request.couch_user.is_demo_user: request.couch_user.update_device_id_last_used(device_id) request.couch_user.save() return HttpResponse(payload)
def from_case_update(cls, case_update, xformdoc): """ Create a case object from a case update object. """ case = cls() case._id = case_update.id case.modified_on = parsing.string_to_datetime(case_update.modified_on_str) \ if case_update.modified_on_str else datetime.utcnow() # apply initial updates, referrals and such, if present case.update_from_case_update(case_update, xformdoc) return case
def docdownload(request, facility_id): """ Download google docs document """ if 'token' in request.session: #should be able to make this global client = gdata.docs.client.DocsClient() client.ssl = True # Force all API requests through HTTPS client.http_client.debug = False # Set to True for debugging HTTP requests client.auth_token = gdata.gauth.AuthSubToken(request.session['token']) supply_point = get_object_or_404(SupplyPoint, pk=facility_id) query_string = '/feeds/default/private/full?title=%s&title-exact=false&max-results=100' % supply_point.code feed = client.GetDocList(uri=query_string) most_recent_doc = None if not feed.entry: messages.error(request, 'Sorry, there is no recent R&R for this facility.') return HttpResponseRedirect( reverse("tz_facility_details", args=[supply_point.pk])) else: for entry in feed.entry: if not most_recent_doc: most_recent_doc = entry else: new_date = string_to_datetime(entry.updated.text) old_date = string_to_datetime(most_recent_doc.updated.text) if new_date > old_date: most_recent_doc = entry exportFormat = '&exportFormat=pdf' content = client.GetFileContent(uri=most_recent_doc.content.src + exportFormat) response = HttpResponse(content) response['content-Type'] = 'application/pdf' response[ 'Content-Disposition'] = 'inline; filename=%s' % most_recent_doc.title.text return response
def _calculate_both(self, case): complication_date = None complication_shortly_after_birth_date = None if case.type == 'cc_bihar_pregnancy': for form, date in self.get_forms_with_complications(case): complication_date = date if form.xmlns == DELIVERY: add = form.get_data('form/case/update/add') else: add = get_add(case) add = string_to_datetime(add).date() if form.metadata.timeStart.date() - add <= self.days: complication_shortly_after_birth_date = date
def row(site): data = outlet_metadata(site, self.ancestry) stockout_days = [] inactive_site = True for p in products: startkey = [str(self.domain), site._id, p['_id']] endkey = startkey + [{}] def product_states(): for st in get_db().view('commtrack/stock_product_state', startkey=endkey, endkey=startkey, descending=True): doc = st['value'] if 'stocked_out_since' in doc[ 'updated_unknown_properties']: yield doc try: latest_state = product_states().next() except StopIteration: latest_state = None if latest_state: so_date = latest_state['updated_unknown_properties'][ 'stocked_out_since'] if so_date: so_days = (date.today() - dateparse.string_to_datetime( so_date).date()).days + 1 else: so_days = 0 inactive_site = False else: so_days = None if so_days is not None: stockout_days.append(so_days) data.append(so_days) else: data.append(u'\u2014') combined_stockout_days = min( stockout_days) if stockout_days else None data.append(combined_stockout_days if combined_stockout_days is not None else u'\u2014') if self.HIDE_NODATA_LOCS and inactive_site: return None return data
def force_to_datetime(val): """Forces a date, string, or datetime to a datetime.""" if not val: return val elif isinstance(val, datetime.datetime): return val elif isinstance(val, datetime.date): return datetime.datetime.combine(val, datetime.time()) elif isinstance(val, six.string_types): from corehq.util.python_compatibility import soft_assert_type_text soft_assert_type_text(val) return string_to_datetime(val) else: raise ValueError("object must be date or datetime!")
def apply_updates(self, date, referral_block): if not const.REFERRAL_ACTION_UPDATE in referral_block: logging.warn( "No update action found in referral block, nothing to be applied" ) return update_block = referral_block[const.REFERRAL_ACTION_UPDATE] if not self.type == update_block[const.REFERRAL_TAG_TYPE]: logging.warn( "Tried to update from a block with a mismatched type!") return if date > self.modified_on: self.modified_on = date if const.REFERRAL_TAG_FOLLOWUP_DATE in referral_block: self.followup_on = parsing.string_to_datetime( referral_block[const.REFERRAL_TAG_FOLLOWUP_DATE]) if const.REFERRAL_TAG_DATE_CLOSED in update_block: self.closed = True self.closed_on = parsing.string_to_datetime( update_block[const.REFERRAL_TAG_DATE_CLOSED])
def row(doc): transactions = dict(((tx['action'], tx['product']), tx['value']) for tx in get_transactions(doc, False)) location = locs[leaf_loc(doc)] data = outlet_metadata(location, ancestry) data.extend([ dateparse.string_to_datetime( doc['received_on']).strftime('%Y-%m-%d'), CommCareUser.get( doc['form']['meta']['userID']).username_in_report, ]) for p in products: for a in self.ordered_actions(ACTION_ORDERING): data.append(transactions.get((a, p['_id']), u'\u2014')) return data
def _clean(meta_block): # couchdbkit chokes on dates that aren't actually dates # so check their validity before passing htem up ret = copy(dict(meta_block)) if meta_block: for key in ("timeStart", "timeEnd"): if key in meta_block: if meta_block[key]: try: parsed = string_to_datetime( meta_block[key]) except ValueError: # we couldn't parse it del ret[key] else: # it was empty, also a failure del ret[key] return ret
def _new_update_from_case_update(self, case_update, xformdoc): assert not case_update.has_referrals() mod_date = parsing.string_to_datetime(case_update.modified_on_str) \ if case_update.modified_on_str else datetime.utcnow() # get actions and apply them for action in case_update.actions: if action.action_type_slug == const.CASE_ACTION_CREATE: self.apply_create_block(action, xformdoc, mod_date, case_update.user_id) else: case_action = CommCareCaseAction.from_parsed_action( mod_date, case_update.user_id, xformdoc, action, ) self.actions.append(case_action) self.rebuild(strict=False) # override any explicit properties from the update if self.modified_on is None or mod_date > self.modified_on: self.modified_on = mod_date if case_update.creates_case(): # case_update.get_create_action() seems to sometimes return an action with all properties set to none, # so set opened_by and opened_on here if not self.opened_on: self.opened_on = mod_date if not self.opened_by: self.opened_by = case_update.user_id if case_update.closes_case(): self.closed_by = case_update.user_id if case_update.user_id: self.user_id = case_update.user_id if case_update.version: self.version = case_update.version
def _clean(meta_block): ret = copy(dict(meta_block)) for key in ret.keys(): # remove attributes from the meta block if key.startswith('@'): del ret[key] # couchdbkit erroneously converts appVersion to a Decimal just because it is possible (due to it being within a "dynamic" property) # (see https://github.com/benoitc/couchdbkit/blob/a23343e539370cffcf8b0ce483c712911bb022c1/couchdbkit/schema/properties.py#L1038) ret['appVersion'] = get_text(meta_block.get('appVersion')) ret['location'] = get_text(meta_block.get('location')) # couchdbkit chokes on dates that aren't actually dates # so check their validity before passing them up if meta_block: for key in ("timeStart", "timeEnd"): if key in meta_block: if meta_block[key]: try: # try to parse to ensure correctness parsed = string_to_datetime( meta_block[key]) # and set back in the right format in case it was a date, not a datetime ret[key] = json_format_datetime(parsed) except ValueError: # we couldn't parse it del ret[key] else: # it was empty, also a failure del ret[key] # also clean dicts on the return value, since those are not allowed for key in ret: if isinstance(ret[key], dict): ret[key] = ", ".join(\ "%s:%s" % (k, v) \ for k, v in ret[key].items()) return ret
def _attach_shared_props(doc): # attaches shared properties of the request to the document. # used on forms and errors doc['submit_ip'] = get_ip(request) doc['path'] = request.path # if you have OpenRosaMiddleware running the headers appear here if hasattr(request, 'openrosa_headers'): doc['openrosa_headers'] = request.openrosa_headers # if you have SyncTokenMiddleware running the headers appear here if hasattr(request, 'last_sync_token'): doc['last_sync_token'] = request.last_sync_token # a hack allowing you to specify the submit time to use # instead of the actual time receiver # useful for migrating data received_on = request.META.get('HTTP_X_SUBMIT_TIME') date_header = request.META.get('HTTP_DATE') if received_on: doc.received_on = string_to_datetime(received_on) if date_header: # comes in as: # Mon, 11 Apr 2011 18:24:43 GMT # goes out as: # 2011-04-11T18:24:43Z try: date = datetime.strptime(date_header, "%a, %d %b %Y %H:%M:%S GMT") date = datetime.strftime(date, "%Y-%m-%dT%H:%M:%SZ") except: logging.error("Receiver app: incoming submission has a date header that we can't parse: '%s'" % date_header ) date = date_header doc['date_header'] = date return doc
def get_messages(self, language_code=None): now_date = self.now.date() user_ids_with_forms_in_time_frame = set() for user_id, forms in self.forms.items(): vhnd_date = forms[0]['form'].get('vhsnd_date_past_month') if vhnd_date is None: continue if (now_date - string_to_datetime(vhnd_date).date()).days < 37: user_ids_with_forms_in_time_frame.add(user_id) awc_ids = { loc for loc, user_ids in self.user_ids_by_location_id.items() if user_ids.isdisjoint(user_ids_with_forms_in_time_frame) } messages = [] if awc_ids: awc_names = {self.awc_locations[awc] for awc in awc_ids} context = {'location_names': ', '.join(awc_names)} messages.append( self.render_template(context, language_code=language_code)) return messages
def from_dict(cls, data): return cls(string_to_datetime(data['startdate'], data['enddate']))
return tag_of(node), attrs NS = "{%s}" % V2_NAMESPACE updates = {} fields = {"update": updates} for node in case.find(NS + "create") or []: tag = tag_of(node) if tag in cls._built_ins: fields[tag] = node.text # can create node have date_opened child node? for node in case.find(NS + "update") or []: tag = tag_of(node) if tag in cls._built_ins or tag == "external_id": fields[tag] = node.text elif tag == "date_opened": fields[tag] = string_to_datetime( node.text).replace(tzinfo=None) else: # can this be a hierarchical structure? if yes, how to decode? updates[tag] = node.text return cls(case_id=case.get("case_id"), date_modified=string_to_datetime( case.get("date_modified")).replace(tzinfo=None), user_id=case.get("user_id"), index=dict( index_tuple(x) for x in case.find(NS + "index") or []), **fields) def as_text(self): return self.as_bytes().decode('utf-8') def as_bytes(self):