def split_outgoings(apps, schema_editor): Outgoing = apps.get_model('msgs', 'Outgoing') outgoing_ids = Outgoing.objects.all().values_list('pk', flat=True) for id_batch in chunks(outgoing_ids, 1000): outgoings = Outgoing.objects.filter(pk__in=id_batch).select_related('org', 'partner').prefetch_related('contacts') for outgoing in outgoings: contacts = list(outgoing.contacts.all()) if len(contacts) > 0: outgoing.contact = contacts[0] outgoing.save(update_fields=('contact',)) if len(contacts) > 1: for other_contact in contacts[1:]: Outgoing.objects.create(org=outgoing.org, partner=outgoing.partner, text=outgoing.text, backend_broadcast_id=outgoing.backend_broadcast_id, contact=other_contact, urns=outgoing.urns, created_by=outgoing.created_by, created_on=outgoing.created_on, case=outgoing.case) print("Split outgoing message #%d into %d single-recipient messages" % (outgoing.pk, len(contacts)))
def populate_reply_to(apps, schema_editor): Contact = apps.get_model('contacts', 'Contact') Message = apps.get_model('msgs', 'Message') Outgoing = apps.get_model('msgs', 'Outgoing') # ids of all contacts with outgoing messages contact_ids = list(Contact.objects.exclude(outgoing_messages=None).values_list('pk', flat=True)) num_processed = 0 # number of contact's whose timelines we've processed num_updated = 0 # number of outgoing messages we've updated for id_batch in chunks(contact_ids, 1000): contacts = Contact.objects.filter(pk__in=id_batch).prefetch_related('incoming_messages', 'outgoing_messages') for contact in contacts: timeline = list(contact.incoming_messages.all()) + list(contact.outgoing_messages.all()) timeline = sorted(timeline, key=lambda x: x.created_on) prev_incoming = None for item in timeline: if isinstance(item, Message): prev_incoming = item elif isinstance(item, Outgoing): if prev_incoming: item.reply_to = prev_incoming item.save(update_fields=('reply_to',)) num_updated += 1 else: print("WARNING: didn't find previous incoming message for outgoing message #%d" % item.pk) num_processed += len(id_batch) print("Processed %d of %d contacts with outgoing messages" % (num_processed, len(contact_ids))) if num_updated: print("Updated %d outgoing messages with reply_tos" % num_updated)
def render_book(self, book, search): from casepro.contacts.models import Field base_fields = [ "Message On", "Opened On", "Closed On", "Assigned Partner", "Labels", "Summary", "Messages Sent", "Messages Received", "Contact" ] contact_fields = Field.get_all(self.org, visible=True) all_fields = base_fields + [f.label for f in contact_fields] # load all messages to be exported cases = Case.search(self.org, self.created_by, search) cases = cases.select_related('initial_message') # need for "Message On" cases = cases.annotate( incoming_count=Count('incoming_messages', distinct=True), outgoing_count=Count('outgoing_messages', distinct=True) ) def add_sheet(num): sheet = book.add_sheet(unicode(_("Cases %d" % num))) for col in range(len(all_fields)): field = all_fields[col] sheet.write(0, col, unicode(field)) return sheet # even if there are no cases - still add a sheet if not cases: add_sheet(1) else: sheet_number = 1 for case_chunk in chunks(cases, 65535): current_sheet = add_sheet(sheet_number) row = 1 for case in case_chunk: current_sheet.write(row, 0, self.excel_datetime(case.initial_message.created_on), self.DATE_STYLE) current_sheet.write(row, 1, self.excel_datetime(case.opened_on), self.DATE_STYLE) current_sheet.write(row, 2, self.excel_datetime(case.closed_on), self.DATE_STYLE) current_sheet.write(row, 3, case.assignee.name, self.DATE_STYLE) current_sheet.write(row, 4, ', '.join([l.name for l in case.labels.all()])) current_sheet.write(row, 5, case.summary) current_sheet.write(row, 6, case.outgoing_count) current_sheet.write(row, 7, case.incoming_count - 1) # subtract 1 for the initial messages current_sheet.write(row, 8, case.contact.uuid) fields = case.contact.get_fields() for cf in range(len(contact_fields)): contact_field = contact_fields[cf] current_sheet.write(row, len(base_fields) + cf, fields.get(contact_field.key, None)) row += 1 sheet_number += 1 return book
def populate_is_stopped(apps, schema_editor): Contact = apps.get_model('contacts', 'Contact') contact_ids = list(Contact.objects.values_list('id', flat=True)) num_updated = 0 for id_batch in chunks(contact_ids, 5000): Contact.objects.filter(pk__in=id_batch).update(is_stopped=False) num_updated += len(id_batch) print("Populated is_stopped for %d of %d contacts" % (num_updated, len(contact_ids)))
def backfill_action_org(apps, schema_editor): Case = apps.get_model("cases", "Case") total_cases = Case.objects.count() num_updated = 0 print(f"Found {total_cases} to update actions for...") for case_batch in chunks(Case.objects.all(), 1000): with transaction.atomic(): for case_ in case_batch: case_.actions.update(org=case_.org) num_updated += 1 print(f" > Updated {num_updated} cases of {total_cases}")
def push_outgoing(self, org, outgoing, as_broadcast=False): client = self._get_client(org) # RapidPro currently doesn't send emails so we use the CasePro email system to send those instead for_backend = [] for msg in outgoing: if msg.urn and msg.urn.startswith("mailto:"): to_address = msg.urn.split(":", 1)[1] send_raw_email([to_address], "New message", msg.text, None) else: for_backend.append(msg) if not for_backend: return if as_broadcast: # we might not be able to send all as a single broadcast, so we batch for batch in chunks(for_backend, self.BATCH_SIZE): contact_uuids = [] urns = [] for msg in batch: if msg.contact: contact_uuids.append(msg.contact.uuid) if msg.urn: urns.append(msg.urn) text = outgoing[0].text broadcast = client.create_broadcast(text=text, contacts=contact_uuids, urns=urns) for msg in batch: msg.backend_broadcast_id = broadcast.id Outgoing.objects.filter(pk__in=[o.id for o in batch]).update( backend_broadcast_id=broadcast.id) else: for msg in for_backend: contact_uuids = [msg.contact.uuid] if msg.contact else [] urns = [msg.urn] if msg.urn else [] broadcast = client.create_broadcast(text=msg.text, contacts=contact_uuids, urns=urns) msg.backend_broadcast_id = broadcast.id msg.save(update_fields=("backend_broadcast_id", ))
def populate_reply_to(apps, schema_editor): Contact = apps.get_model("contacts", "Contact") Message = apps.get_model("msgs", "Message") Outgoing = apps.get_model("msgs", "Outgoing") # ids of all contacts with outgoing messages contact_ids = list( Contact.objects.exclude(outgoing_messages=None).values_list("pk", flat=True)) num_processed = 0 # number of contact's whose timelines we've processed num_updated = 0 # number of outgoing messages we've updated for id_batch in chunks(contact_ids, 1000): contacts = Contact.objects.filter(pk__in=id_batch).prefetch_related( "incoming_messages", "outgoing_messages") for contact in contacts: timeline = list(contact.incoming_messages.all()) + list( contact.outgoing_messages.all()) timeline = sorted(timeline, key=lambda x: x.created_on) prev_incoming = None for item in timeline: if isinstance(item, Message): prev_incoming = item elif isinstance(item, Outgoing): if prev_incoming: item.reply_to = prev_incoming item.save(update_fields=("reply_to", )) num_updated += 1 else: print( "WARNING: didn't find previous incoming message for outgoing message #%d" % item.pk) num_processed += len(id_batch) print("Processed %d of %d contacts with outgoing messages" % (num_processed, len(contact_ids))) if num_updated: print("Updated %d outgoing messages with reply_tos" % num_updated)
def migrate_language(Contact, FAQ): contacts = Contact.objects.exclude( Q(language=None) | Q(language="") | Q(language="eng")) total_contacts = contacts.count() if total_contacts: print("Found %d contacts to migrate languages for..." % total_contacts) num_updated = 0 for batch in chunks(contacts.only("id"), 1000): with transaction.atomic(): contact_batch = Contact.objects.filter( id__in=[c.id for c in batch]).select_related("org") for contact in contact_batch: new_language = iso6392_to_iso6393(contact.language, contact.org.timezone) if new_language != contact.language: contact.language = new_language contact.save(update_fields=("language", )) num_updated += 1 print(" > Updated %d of %d contacts" % (num_updated, total_contacts)) faqs = list( FAQ.objects.exclude( Q(language=None) | Q(language="") | Q(language="eng")).select_related("org")) if len(faqs): print("Found %d FAQs to migrate languages for..." % len(faqs)) for faq in faqs: new_language = iso6392_to_iso6393(faq.language, faq.org.timezone) if new_language != faq.language: faq.language = new_language faq.save(update_fields=("language", ))
def do_export(self): """ Does actual export. Called from a celery task as it may require a lot of API calls to grab all messages. """ from xlwt import Workbook, XFStyle book = Workbook() date_style = XFStyle() date_style.num_format_str = 'DD-MM-YYYY HH:MM:SS' base_fields = ["Time", "Message ID", "Flagged", "Labels", "Text", "Contact"] contact_fields = self.org.get_contact_fields() all_fields = base_fields + contact_fields label_map = {l.name: l for l in Label.get_all(self.org)} client = self.org.get_temba_client() search = self.get_search() # fetch all messages to be exported messages = Message.search(self.org, search, None) # extract all unique contacts in those messages contact_uuids = set() for msg in messages: contact_uuids.add(msg.contact) # fetch all contacts in batches of 25 and organize by UUID contacts_by_uuid = {} for uuid_chunk in chunks(list(contact_uuids), 25): for contact in client.get_contacts(uuids=uuid_chunk): contacts_by_uuid[contact.uuid] = contact def add_sheet(num): sheet = book.add_sheet(unicode(_("Messages %d" % num))) for col in range(len(all_fields)): field = all_fields[col] sheet.write(0, col, unicode(field)) return sheet # even if there are no messages - still add a sheet if not messages: add_sheet(1) else: sheet_number = 1 for msg_chunk in chunks(messages, 65535): current_sheet = add_sheet(sheet_number) row = 1 for msg in msg_chunk: created_on = msg.created_on.astimezone(pytz.utc).replace(tzinfo=None) flagged = SYSTEM_LABEL_FLAGGED in msg.labels labels = ', '.join([label_map[l_name].name for l_name in msg.labels if l_name in label_map]) contact = contacts_by_uuid.get(msg.contact, None) # contact may no longer exist in RapidPro current_sheet.write(row, 0, created_on, date_style) current_sheet.write(row, 1, msg.id) current_sheet.write(row, 2, 'Yes' if flagged else 'No') current_sheet.write(row, 3, labels) current_sheet.write(row, 4, msg.text) current_sheet.write(row, 5, msg.contact) for cf in range(len(contact_fields)): if contact: contact_field = contact_fields[cf] current_sheet.write(row, 6 + cf, contact.fields.get(contact_field, None)) else: current_sheet.write(row, 6 + cf, None) row += 1 sheet_number += 1 temp = NamedTemporaryFile(delete=True) book.save(temp) temp.flush() filename = 'orgs/%d/message_exports/%s.xls' % (self.org.id, random_string(20)) default_storage.save(filename, File(temp)) self.filename = filename self.save(update_fields=('filename',)) subject = "Your messages export is ready" download_url = settings.SITE_HOST_PATTERN % self.org.subdomain + reverse('cases.messageexport_read', args=[self.pk]) send_email(self.created_by.username, subject, 'cases/email/message_export', dict(link=download_url)) # force a gc import gc gc.collect()
def do_export(self): """ Does actual export. Called from a celery task as it may require a lot of API calls to grab all messages. """ from xlwt import Workbook, XFStyle book = Workbook() date_style = XFStyle() date_style.num_format_str = 'DD-MM-YYYY HH:MM:SS' base_fields = [ "Time", "Message ID", "Flagged", "Labels", "Text", "Contact" ] contact_fields = self.org.get_contact_fields() all_fields = base_fields + contact_fields label_map = {l.name: l for l in Label.get_all(self.org)} client = self.org.get_temba_client() search = self.get_search() # fetch all messages to be exported messages = Message.search(self.org, search, None) # extract all unique contacts in those messages contact_uuids = set() for msg in messages: contact_uuids.add(msg.contact) # fetch all contacts in batches of 25 and organize by UUID contacts_by_uuid = {} for uuid_chunk in chunks(list(contact_uuids), 25): for contact in client.get_contacts(uuids=uuid_chunk): contacts_by_uuid[contact.uuid] = contact def add_sheet(num): sheet = book.add_sheet(unicode(_("Messages %d" % num))) for col in range(len(all_fields)): field = all_fields[col] sheet.write(0, col, unicode(field)) return sheet # even if there are no messages - still add a sheet if not messages: add_sheet(1) else: sheet_number = 1 for msg_chunk in chunks(messages, 65535): current_sheet = add_sheet(sheet_number) row = 1 for msg in msg_chunk: created_on = msg.created_on.astimezone( pytz.utc).replace(tzinfo=None) flagged = SYSTEM_LABEL_FLAGGED in msg.labels labels = ', '.join([ label_map[l_name].name for l_name in msg.labels if l_name in label_map ]) contact = contacts_by_uuid.get( msg.contact, None) # contact may no longer exist in RapidPro current_sheet.write(row, 0, created_on, date_style) current_sheet.write(row, 1, msg.id) current_sheet.write(row, 2, 'Yes' if flagged else 'No') current_sheet.write(row, 3, labels) current_sheet.write(row, 4, msg.text) current_sheet.write(row, 5, msg.contact) for cf in range(len(contact_fields)): if contact: contact_field = contact_fields[cf] current_sheet.write( row, 6 + cf, contact.fields.get(contact_field, None)) else: current_sheet.write(row, 6 + cf, None) row += 1 sheet_number += 1 temp = NamedTemporaryFile(delete=True) book.save(temp) temp.flush() filename = 'orgs/%d/message_exports/%s.xls' % (self.org.id, random_string(20)) default_storage.save(filename, File(temp)) self.filename = filename self.save(update_fields=('filename', )) subject = "Your messages export is ready" download_url = settings.SITE_HOST_PATTERN % self.org.subdomain + reverse( 'cases.messageexport_read', args=[self.pk]) send_email(self.created_by.username, subject, 'cases/email/message_export', dict(link=download_url)) # force a gc import gc gc.collect()
def unflag_messages(self, org, messages): client = self._get_client(org) for batch in chunks(messages, self.BATCH_SIZE): client.bulk_unlabel_messages(messages=[m.backend_id for m in batch], label_name=SYSTEM_LABEL_FLAGGED)
def restore_messages(self, org, messages): client = self._get_client(org) for batch in chunks(messages, self.BATCH_SIZE): client.bulk_restore_messages(messages=[m.backend_id for m in batch])
def unlabel_messages(self, org, messages, label): client = self._get_client(org) for batch in chunks(messages, self.BATCH_SIZE): client.bulk_unlabel_messages(messages=[m.backend_id for m in batch], label=label.uuid)