def form_entries_to_csv_writer(csv_writer, form): """ Write form entries to csv_writer. """ columns = [] field_indexes = {} file_field_ids = [] site_url = get_setting('site', 'global', 'siteurl') entry_time_name = FormEntry._meta.get_field("entry_time").verbose_name columns.append(str(entry_time_name)) for field in form.fields.all().order_by('position', 'id'): if not field.field_type.split('.')[-1] in ['Description', 'Header']: columns.append(field.label) field_indexes[field.id] = 1 + len(field_indexes) if field.field_type == "FileField": file_field_ids.append(field.id) if form.custom_payment: columns.append(str("Pricing")) columns.append(str("Price")) columns.append(str("Payment Method")) csv_writer.writerow(columns) # Loop through each field value order by entry, building up each # entry as a row. entries = FormEntry.objects.filter(form=form).order_by('pk') for entry in entries: values = FieldEntry.objects.filter(entry=entry) row = [""] * len(columns) entry_time = entry.entry_time.strftime("%Y-%m-%d %H:%M:%S") if form.custom_payment: if entry.pricing: row[-3] = entry.pricing.label if not entry.pricing.price: row[-2] = entry.custom_price else: row[-2] = entry.pricing.price row[-1] = entry.payment_method row[0] = entry_time for field_entry in values: if not field_entry.field.field_type.split('.')[-1] in [ 'Description', 'Header' ]: value = escape_csv(field_entry.value) # Create download URL for file fields. # if field_entry.field_id in file_field_ids: # url = reverse("admin:forms_form_file", args=(field_entry.id,)) # value = '{site_url}{url}'.format(site_url=site_url, url=url) # Only use values for fields that currently exist for the form. try: row[field_indexes[field_entry.field_id]] = value except KeyError: pass # Write out the row. csv_writer.writerow(row)
def run(self, **kwargs): """Create the xls file""" form_fields = [ 'title', 'slug', 'intro', 'response', 'email_text', 'subject_template', 'send_email', 'email_from', 'email_copies', 'completion_url', 'custom_payment', 'payment_methods', 'allow_anonymous_view', 'allow_user_view', 'allow_member_view', 'allow_user_edit', 'allow_member_edit', 'create_dt', 'update_dt', 'creator', 'creator_username', 'owner', 'owner_username', 'status', 'status_detail', ] field_fields = [ 'label', 'field_type', 'field_function', 'required', 'visible', 'choices', 'position', 'default', ] pricing_fields = [ 'label', 'price', ] forms = Form.objects.filter(status=True) max_fields = forms.annotate(num_fields=Count('fields')).aggregate( Max('num_fields'))['num_fields__max'] max_pricings = forms.annotate(num_pricings=Count('pricing')).aggregate( Max('num_pricings'))['num_pricings__max'] file_name = 'forms.csv' data_row_list = [] for form in forms: data_row = [] # form setup form_d = full_model_to_dict(form) for field in form_fields: if field == 'payment_methods': value = [m.human_name for m in form.payment_methods.all()] else: value = form_d[field] value = str(value).replace(os.linesep, ' ').rstrip() value = escape_csv(value) data_row.append(value) if form.fields.all(): # field setup for field in form.fields.all(): field_d = full_model_to_dict(field) for f in field_fields: value = field_d[f] value = str(value).replace(os.linesep, ' ').rstrip() value = escape_csv(value) data_row.append(value) # fill out the rest of the field columns if form.fields.all().count() < max_fields: for i in range(0, max_fields - form.fields.all().count()): for f in field_fields: data_row.append('') if form.pricing_set.all(): # field setup for pricing in form.pricing_set.all(): pricing_d = full_model_to_dict(pricing) for f in pricing_fields: value = pricing_d[f] value = str(value).replace(os.linesep, ' ').rstrip() value = escape_csv(value) data_row.append(value) # fill out the rest of the field columns if form.pricing_set.all().count() < max_pricings: for i in range(0, max_pricings - form.pricing_set.all().count()): for f in pricing_fields: data_row.append('') data_row_list.append(data_row) fields = form_fields for i in range(0, max_fields): fields = fields + ["field %s %s" % (i, f) for f in field_fields] for i in range(0, max_pricings): fields = fields + [ "pricing %s %s" % (i, f) for f in pricing_fields ] return render_csv(file_name, fields, data_row_list)
def run(self, model, fields, file_name, **kwargs): """Create the xls file""" if issubclass(model, TendenciBaseModel): fields = fields + [ 'allow_anonymous_view', 'allow_user_view', 'allow_member_view', 'allow_user_edit', 'allow_member_edit', 'create_dt', 'update_dt', 'creator', 'creator_username', 'owner', 'owner_username', 'status', 'status_detail', ] items = model.objects.filter(status=True) start_dt = kwargs.get('start_dt', None) end_dt = kwargs.get('end_dt', None) include_files = kwargs.get('include_files', None) if start_dt and end_dt: if start_dt: try: start_dt = datetime.strptime(start_dt, '%m/%d/%Y') except: raise Exception( 'Please use the following date format MM/DD/YYYY.\n') if end_dt: try: end_dt = datetime.strptime(end_dt, '%m/%d/%Y') end_dt = end_dt + timedelta(days=1) except: raise Exception( 'Please use the following date format MM/DD/YYYY.\n') if start_dt and end_dt: items = items.filter(update_dt__gte=start_dt, update_dt__lte=end_dt) data_row_list = [] for item in items: # get the available fields from the model's meta opts = item._meta d = {} for f in opts.get_fields() + opts.many_to_many: if f.name in fields: # include specified fields only if isinstance(f, ManyToManyField): value = [ "%s" % obj for obj in f.value_from_object(item) ] elif isinstance(f, ForeignKey): value = getattr(item, f.name) elif isinstance(f, GenericRelation): generics = f.value_from_object(item).all() value = ["%s" % obj for obj in generics if obj != ''] value = ', '.join(value) else: value = f.value_from_object(item) if value: if isinstance(f, DateTimeField): value = value.strftime("%Y-%m-%d %H:%M") elif isinstance(f, DateField): value = value.strftime("%Y-%m-%d") elif isinstance(f, TimeField): value = value.strftime('%H:%M:%S') d[f.name] = value # append the accumulated values as a data row # keep in mind the ordering of the fields data_row = [] for field in fields: # clean the derived values into unicode value = str(d[field]).rstrip() value = escape_csv(value) data_row.append(value) data_row_list.append(data_row) if include_files: if model._meta.model_name == 'resume': temp_csv = NamedTemporaryFile(mode='w', delete=False) csv_writer = csv.writer(temp_csv, delimiter=',') csv_writer.writerow(fields) for data_row in data_row_list: csv_writer.writerow(data_row) temp_csv.close() temp_zip = NamedTemporaryFile(mode='wb', delete=False) zip_fp = zipfile.ZipFile(temp_zip, 'w', compression=zipfile.ZIP_DEFLATED) # handle files for item in items: if item.resume_file: zip_fp.write(item.resume_file.path, item.resume_file.name, zipfile.ZIP_DEFLATED) zip_fp.write(temp_csv.name, 'resumes.csv', zipfile.ZIP_DEFLATED) zip_fp.close() temp_zip.close() # set the response for the zip files with open(temp_zip.name, 'rb') as f: content = f.read() response = HttpResponse(content, content_type='application/zip') response[ 'Content-Disposition'] = 'attachment; filename="export_resumes_%d.zip"' % time( ) # remove the temporary files unlink(temp_zip.name) unlink(temp_csv.name) return response return render_csv(file_name, fields, data_row_list)
def run(self, **kwargs): """Create the xls file""" nav_fields = [ 'title', 'description', 'megamenu', 'allow_anonymous_view', 'allow_user_view', 'allow_member_view', 'allow_user_edit', 'allow_member_edit', 'create_dt', 'update_dt', 'creator', 'creator_username', 'owner', 'owner_username', 'status', 'status_detail', ] nav_item_fields = [ 'label', 'title', 'new_window', 'css', 'position', 'level', 'page', 'url', ] navs = Nav.objects.filter(status=True) max_nav_items = navs.annotate(num_navitems=Count('navitem')).aggregate( Max('num_navitems'))['num_navitems__max'] file_name = 'navs.csv' data_row_list = [] for nav in navs: data_row = [] # nav setup nav_d = full_model_to_dict(nav) for field in nav_fields: value = nav_d[field] value = str(value).replace(os.linesep, ' ').rstrip() value = escape_csv(value) data_row.append(value) if nav.navitem_set.all(): # nav_item setup for nav_item in nav.navitem_set.all(): nav_item_d = full_model_to_dict(nav_item) for field in nav_item_fields: value = nav_item_d[field] value = str(value).replace(os.linesep, ' ').rstrip() value = escape_csv(value) data_row.append(value) # fill out the rest of the nav_item columns if nav.navitem_set.all().count() < max_nav_items: for i in range(0, max_nav_items - nav.navitem_set.all().count()): for field in nav_item_fields: data_row.append('') data_row_list.append(data_row) fields = nav_fields for i in range(0, max_nav_items): fields = fields + [ "nav_item %s %s" % (i, f) for f in nav_item_fields ] return render_csv(file_name, fields, data_row_list)
def run(self, **kwargs): """Create the xls file""" event_fields = [ 'entity', 'type', 'title', 'description', 'all_day', 'start_dt', 'end_dt', 'timezone', 'private_slug', 'password', 'on_weekend', 'external_url', 'image', 'tags', 'allow_anonymous_view', 'allow_user_view', 'allow_member_view', 'allow_user_edit', 'allow_member_edit', 'create_dt', 'update_dt', 'creator', 'creator_username', 'owner', 'owner_username', 'status', 'status_detail', ] place_fields = [ 'name', 'description', 'address', 'city', 'state', 'zip', 'country', 'url', ] configuration_fields = [ 'payment_method', 'payment_required', 'limit', 'enabled', 'is_guest_price', 'use_custom_reg_form', 'reg_form', 'bind_reg_form_to_conf_only', ] speaker_fields = [ 'name', 'description', ] organizer_fields = [ 'name', 'description', ] pricing_fields = [ 'title', 'quantity', 'group', 'price', 'reg_form', 'start_dt', 'end_dt', 'allow_anonymous', 'allow_user', 'allow_member', 'status', ] events = Event.objects.filter(status=True) max_speakers = events.annotate(num_speakers=Count('speaker')).aggregate(Max('num_speakers'))['num_speakers__max'] max_organizers = events.annotate(num_organizers=Count('organizer')).aggregate(Max('num_organizers'))['num_organizers__max'] max_pricings = events.annotate(num_pricings=Count('registration_configuration__regconfpricing')).aggregate(Max('num_pricings'))['num_pricings__max'] file_name = 'events.csv' data_row_list = [] for event in events: data_row = [] # event setup event_d = full_model_to_dict(event, fields=event_fields) for field in event_fields: value = None if field == 'entity': if event.entity: value = event.entity.entity_name elif field == 'type': if event.type: value = event.type.name elif field in event_d: value = event_d[field] value = str(value).replace(os.linesep, ' ').rstrip() value = escape_csv(value) data_row.append(value) if event.place: # place setup place_d = full_model_to_dict(event.place) for field in place_fields: value = place_d[field] value = str(value).replace(os.linesep, ' ').rstrip() value = escape_csv(value) data_row.append(value) if event.registration_configuration: # config setup conf_d = full_model_to_dict(event.registration_configuration) for field in configuration_fields: if field == "payment_method": value = event.registration_configuration.payment_method.all() else: value = conf_d[field] value = str(value).replace(os.linesep, ' ').rstrip() value = escape_csv(value) data_row.append(value) if event.speaker_set.all(): # speaker setup for speaker in event.speaker_set.all(): speaker_d = full_model_to_dict(speaker) for field in speaker_fields: value = speaker_d[field] value = str(value).replace(os.linesep, ' ').rstrip() value = escape_csv(value) data_row.append(value) # fill out the rest of the speaker columns if event.speaker_set.all().count() < max_speakers: for i in range(0, max_speakers - event.speaker_set.all().count()): for field in speaker_fields: data_row.append('') if event.organizer_set.all(): # organizer setup for organizer in event.organizer_set.all(): organizer_d = full_model_to_dict(organizer) for field in organizer_fields: value = organizer_d[field] value = str(value).replace(os.linesep, ' ').rstrip() value = escape_csv(value) data_row.append(value) # fill out the rest of the organizer columns if event.organizer_set.all().count() < max_organizers: for i in range(0, max_organizers - event.organizer_set.all().count()): for field in organizer_fields: data_row.append('') reg_conf = event.registration_configuration if reg_conf and reg_conf.regconfpricing_set.all(): # pricing setup for pricing in reg_conf.regconfpricing_set.all(): pricing_d = full_model_to_dict(pricing) for field in pricing_fields: value = pricing_d[field] value = str(value).replace(os.linesep, ' ').rstrip() value = escape_csv(value) data_row.append(value) # fill out the rest of the pricing columns if reg_conf and reg_conf.regconfpricing_set.all().count() < max_pricings: for i in range(0, max_pricings - reg_conf.regconfpricing_set.all().count()): for field in pricing_fields: data_row.append('') data_row_list.append(data_row) fields = event_fields + ["place %s" % f for f in place_fields] fields = fields + ["config %s" % f for f in configuration_fields] for i in range(0, max_speakers): fields = fields + ["speaker %s %s" % (i, f) for f in speaker_fields] for i in range(0, max_organizers): fields = fields + ["organizer %s %s" % (i, f) for f in organizer_fields] for i in range(0, max_pricings): fields = fields + ["pricing %s %s" % (i, f) for f in pricing_fields] return render_csv(file_name, fields, data_row_list)
def run(self, **kwargs): """Create the xls file""" fields = [ 'guid', 'title', 'slug', 'header_image', 'content', 'view_contact_form', 'design_notes', 'syndicate', 'template', 'tags', 'entity', 'meta', 'categories', 'allow_anonymous_view', 'allow_user_view', 'allow_member_view', 'allow_user_edit', 'allow_member_edit', 'create_dt', 'update_dt', 'creator', 'creator_username', 'owner', 'owner_username', 'status', 'status_detail', ] file_name = 'pages.csv' pages = Page.objects.active() data_row_list = [] for page in pages: # get the available fields from the model's meta opts = page._meta d = {} for f in opts.get_fields() + opts.many_to_many: if f.name in fields: # include specified fields only if isinstance(f, ManyToManyField): value = [ "%s" % obj for obj in f.value_from_object(page) ] if isinstance(f, ForeignKey): value = getattr(page, f.name) if isinstance(f, GenericRelation): generics = f.value_from_object(page).all() value = ["%s" % obj for obj in generics] value = ', '.join(value) else: value = f.value_from_object(page) d[f.name] = value # append the accumulated values as a data row # keep in mind the ordering of the fields data_row = [] for field in fields: # clean the derived values into unicode value = str(d[field]).rstrip() value = escape_csv(value) data_row.append(value) data_row_list.append(data_row) return render_csv(file_name, fields, data_row_list)
def process_export(export_fields='all_fields', export_status_detail='', identifier=u'', user_id=0): from tendenci.apps.perms.models import TendenciBaseModel if export_fields == 'main_fields': field_list = [ 'headline', 'slug', 'summary', 'body', 'source', 'first_name', 'last_name', 'address', 'address2', 'city', 'state', 'zip_code', 'country', 'phone', 'phone2', 'fax', 'email', 'email2', 'website', 'list_type', 'requested_duration', 'activation_dt', 'expiration_dt', 'tags', 'enclosure_url', 'enclosure_type', 'enclosure_length', 'status', 'status_detail' ] else: # base ------------ base_field_list = [ smart_str(field.name) for field in TendenciBaseModel._meta.fields if not field.__class__ == AutoField ] field_list = [ smart_str(field.name) for field in Directory._meta.fields if not field.__class__ == AutoField ] field_list = [ name for name in field_list if name not in base_field_list ] field_list.remove('guid') # append base fields at the end field_list = field_list + base_field_list identifier = identifier or int(ttime.time()) file_name_temp = 'export/directories/%s_temp.csv' % identifier with default_storage.open(file_name_temp, 'wb') as csvfile: csv_writer = UnicodeWriter(csvfile, encoding='utf-8') fields_names = list(field_list) for i, item in enumerate(fields_names): if item == 'headline': fields_names[i] = 'name' if item == 'body': fields_names[i] = 'description' csv_writer.writerow(fields_names) directories = Directory.objects.all() if export_status_detail: directories = directories.filter( status_detail__icontains=export_status_detail) for directory in directories: items_list = [] for field_name in field_list: item = getattr(directory, field_name) if item is None: item = '' if item: if isinstance(item, datetime): item = item.strftime('%Y-%m-%d %H:%M:%S') elif isinstance(item, date): item = item.strftime('%Y-%m-%d') elif isinstance(item, time): item = item.strftime('%H:%M:%S') elif field_name == 'invoice': # display total vs balance item = 'Total: %d / Balance: %d' % (item.total, item.balance) elif isinstance(item, str): item = escape_csv(item) items_list.append(item) csv_writer.writerow(items_list) # rename the file name file_name = 'export/directories/%s.csv' % identifier default_storage.save(file_name, default_storage.open(file_name_temp, 'rb')) # delete the temp file default_storage.delete(file_name_temp) # notify user that export is ready to download [user] = User.objects.filter(pk=user_id)[:1] or [None] if user and user.email: download_url = reverse('directory.export_download', args=[identifier]) site_url = get_setting('site', 'global', 'siteurl') site_display_name = get_setting('site', 'global', 'sitedisplayname') parms = { 'download_url': download_url, 'user': user, 'site_url': site_url, 'site_display_name': site_display_name, 'export_status_detail': export_status_detail, 'export_fields': export_fields } subject = render_to_string( template_name='directories/notices/export_ready_subject.html', context=parms) subject = subject.strip('\n').strip('\r') body = render_to_string( template_name='directories/notices/export_ready_body.html', context=parms) email = Email(recipient=user.email, subject=subject, body=body) email.send()
def run(self, **kwargs): """ Return a csv of all profles """ filename = "profiles_export.csv" field_list = [ 'username', 'first_name', 'last_name', 'email', 'pl_id', 'member_number', 'historical_member_number', 'time_zone', 'language', 'salutation', 'initials', 'display_name', 'mailing_name', 'company', 'position_title', 'position_assignment', 'sex', 'address_type', 'address', 'address2', 'city', 'state', 'zipcode', 'country', 'county', 'phone', 'phone2', 'fax', 'work_phone', 'home_phone', 'mobile_phone', 'email', 'email2', 'url', 'url2', 'dob', 'ssn', 'spouse', 'department', 'education', 'student', 'remember_login', 'exported', 'direct_mail', 'notes', 'admin_notes', 'referral_source', 'hide_in_search', 'hide_address', 'hide_email', 'hide_phone', 'first_responder', 'agreed_to_tos', 'original_username', '\n', ] data_rows = [] profiles = Profile.objects.all() for profile in profiles: data_row = [ profile.user.username, profile.user.first_name, profile.user.last_name, profile.user.email, profile.pl_id, profile.member_number, profile.historical_member_number, profile.time_zone, profile.language, profile.salutation, profile.initials, profile.display_name, profile.mailing_name, profile.company, profile.position_title, profile.position_assignment, profile.sex, profile.address_type, profile.address, profile.address2, profile.city, profile.state, profile.zipcode, profile.country, profile.county, profile.phone, profile.phone2, profile.fax, profile.work_phone, profile.home_phone, profile.mobile_phone, profile.email2, profile.url, profile.url2, profile.dob, profile.ssn, profile.spouse, profile.department, profile.education, profile.student, profile.remember_login, profile.exported, profile.direct_mail, profile.notes, profile.admin_notes, profile.referral_source, profile.hide_in_search, profile.hide_address, profile.hide_email, profile.hide_phone, profile.first_responder, profile.agreed_to_tos, profile.original_username, '\n', ] data_row = [escape_csv(value) for value in data_row] data_rows.append(data_row) return render_excel(filename, field_list, data_rows, '.csv')
def process_export(identifier, user_id): field_list = [ 'guid', 'slug', 'timezone', 'headline', 'summary', 'body', 'source', 'first_name', 'last_name', 'phone', 'fax', 'email', 'website', 'release_dt', 'syndicate', 'featured', 'design_notes', 'tags', 'enclosure_url', 'enclosure_type', 'enclosure_length', 'not_official_content', 'entity', ] identifier = identifier or int(ttime.time()) file_name_temp = 'export/articles/%s_temp.csv' % (identifier) with default_storage.open(file_name_temp, 'wb') as csvfile: csv_writer = UnicodeWriter(csvfile, encoding='utf-8') csv_writer.writerow(field_list) articles = Article.objects.filter(status_detail='active') for article in articles: items_list = [] for field_name in field_list: item = getattr(article, field_name) if isinstance(item, datetime): item = item.strftime('%Y-%m-%d %H:%M:%S') elif isinstance(item, date): item = item.strftime('%Y-%m-%d') elif isinstance(item, time): item = item.strftime('%H:%M:%S') else: item = escape_csv(item) items_list.append(item) csv_writer.writerow(items_list) # rename the file name file_name = 'export/articles/%s.csv' % identifier default_storage.save(file_name, default_storage.open(file_name_temp, 'rb')) # delete the temp file default_storage.delete(file_name_temp) # notify user that export is ready to download [user] = User.objects.filter(pk=user_id)[:1] or [None] if user and user.email: download_url = reverse('article.export_download', args=[identifier]) site_url = get_setting('site', 'global', 'siteurl') site_display_name = get_setting('site', 'global', 'sitedisplayname') parms = { 'download_url': download_url, 'user': user, 'site_url': site_url, 'site_display_name': site_display_name, 'date_today': datetime.now() } subject = render_to_string( template_name='articles/notices/export_ready_subject.html', context=parms) subject = subject.strip('\n').strip('\r') body = render_to_string( template_name='articles/notices/export_ready_body.html', context=parms) email = Email(recipient=user.email, subject=subject, body=body) email.send()
def process_export( group_id, export_target='all', identifier=u'', user_id=0): """ Process export for group members and/or group subscribers. """ [group] = Group.objects.filter(id=group_id)[:1] or [None] if not group: return # pull 100 rows per query # be careful of the memory usage rows_per_batch = 100 identifier = identifier or str(time.time()) file_dir = 'export/groups/' file_path_temp = '%sgroup_%d_%s_%s_temp.csv' % (file_dir, group.id, export_target, identifier) # labels user_fields = ['id', 'first_name', 'last_name', 'email', 'username', 'is_active', 'is_staff', 'is_superuser'] profile_fields = ['direct_mail', 'company', 'department', 'position_title', 'address', 'address2', 'city', 'state', 'zipcode', 'country', 'region', 'phone', 'notes', 'referral_source', 'create_dt'] labels = user_fields + profile_fields field_dict = OrderedDict([(label.lower().replace(" ", "_"), '' ) for label in labels]) with default_storage.open(file_path_temp, 'w') as csvfile: csv_writer = csv.DictWriter(csvfile, fieldnames=list(field_dict.keys())) csv_writer.writeheader() # process regular group members count_members = group.members.filter( group_member__status=True, group_member__status_detail='active').count() num_rows_processed = 0 while num_rows_processed < count_members: users = group.members.filter( group_member__status=True, group_member__status_detail='active' ).select_related('profile' ).order_by('group_member__member_id')[num_rows_processed:(num_rows_processed + rows_per_batch)] num_rows_processed += rows_per_batch row_dict = field_dict.copy() for user in users: if hasattr(user, 'profile'): profile = user.profile else: profile = Profile.objects.create_profile(user) for field_name in user_fields: if hasattr(user, field_name): row_dict[field_name] = getattr(user, field_name) for field_name in profile_fields: if hasattr(profile, field_name): row_dict[field_name] = getattr(profile, field_name) for k, v in row_dict.items(): if not isinstance(v, str): if isinstance(v, datetime): row_dict[k] = v.strftime('%Y-%m-%d %H:%M:%S') elif isinstance(v, date): row_dict[k] = v.strftime('%Y-%m-%d') else: row_dict[k] = smart_str(v) else: row_dict[k] = escape_csv(v) csv_writer.writerow(row_dict) # rename the file name file_path = '%sgroup_%d_%s_%s.csv' % (file_dir, group.id, export_target, identifier) default_storage.save(file_path, default_storage.open(file_path_temp, 'rb')) # delete the temp file default_storage.delete(file_path_temp) # notify user that export is ready to download [user] = User.objects.filter(id=user_id)[:1] or [None] if user and user.email: download_url = reverse('group.members_export_download', args=[group.slug, export_target, identifier]) site_url = get_setting('site', 'global', 'siteurl') site_display_name = get_setting('site', 'global', 'sitedisplayname') parms = { 'group': group, 'download_url': download_url, 'user': user, 'site_url': site_url, 'site_display_name': site_display_name} subject = render_to_string( template_name='user_groups/exports/export_ready_subject.html', context=parms) subject = subject.strip('\n').strip('\r') body = render_to_string( template_name='user_groups/exports/export_ready_body.html', context=parms) email = Email( recipient=user.email, subject=subject, body=body) email.send()
def process_invoice_export(start_dt=None, end_dt=None, identifier=u'', user_id=0): fields = [ 'id', 'guid', 'object_type', 'object_id', 'title', 'tender_date', 'bill_to', 'bill_to_first_name', 'bill_to_last_name', 'bill_to_company', 'bill_to_address', 'bill_to_city', 'bill_to_state', 'bill_to_zip_code', 'bill_to_country', 'bill_to_phone', 'bill_to_fax', 'bill_to_email', 'ship_to', 'ship_to_first_name', 'ship_to_last_name', 'ship_to_company', 'ship_to_address', 'ship_to_city', 'ship_to_state', 'ship_to_zip_code', 'ship_to_country', 'ship_to_phone', 'ship_to_fax', 'ship_to_email', 'ship_to_address_type', 'receipt', 'gift', 'arrival_date_time', 'greeting', 'instructions', 'po', 'terms', 'due_date', 'ship_date', 'ship_via', 'fob', 'project', 'other', 'message', 'subtotal', 'shipping', 'shipping_surcharge', 'box_and_packing', 'tax_exempt', 'tax_exemptid', 'tax_rate', 'taxable', 'tax', 'variance', 'discount_amount', 'total', 'payments_credits', 'balance', 'disclaimer', 'variance_notes', 'admin_notes', 'create_dt', 'update_dt', 'creator', 'creator_username', 'owner', 'owner_username', 'status_detail' ] identifier = identifier or int(ttime.time()) file_name_temp = 'export/invoices/%s_temp.csv' % identifier with default_storage.open(file_name_temp, 'wb') as csvfile: csv_writer = UnicodeWriter(csvfile, encoding='utf-8') csv_writer.writerow(fields) invoices = Invoice.objects.filter(status=True, update_dt__gte=start_dt, update_dt__lte=end_dt) for invoice in invoices: items_list = [] for field_name in fields: item = getattr(invoice, field_name) if item is None: item = '' if item: if isinstance(item, datetime): item = item.strftime('%Y-%m-%d %H:%M:%S') elif isinstance(item, date): item = item.strftime('%Y-%m-%d') elif isinstance(item, time): item = item.strftime('%H:%M:%S') elif isinstance(item, str): item = escape_csv(item) items_list.append(item) csv_writer.writerow(items_list) # rename the file name file_name = 'export/invoices/%s.csv' % identifier default_storage.save(file_name, default_storage.open(file_name_temp, 'rb')) # delete the temp file default_storage.delete(file_name_temp) # notify user that export is ready to download [user] = User.objects.filter(pk=user_id)[:1] or [None] if user and user.email: download_url = reverse('invoice.export_download', args=[identifier]) site_url = get_setting('site', 'global', 'siteurl') site_display_name = get_setting('site', 'global', 'sitedisplayname') parms = { 'download_url': download_url, 'user': user, 'site_url': site_url, 'site_display_name': site_display_name, 'start_dt': start_dt, 'end_dt': end_dt } subject = render_to_string( template_name='invoices/notices/export_ready_subject.html', context=parms) subject = subject.strip('\n').strip('\r') body = render_to_string( template_name='invoices/notices/export_ready_body.html', context=parms) email = Email(recipient=user.email, subject=subject, body=body) email.send()
def iter_form_entries(form): """ Write form entries to csv_writer. Write out the column names and store the index of each field against its ID for building each entry row. Also store the IDs of fields with a type of FileField for converting their field values """ columns = [] field_indexes = {} file_field_ids = [] entry_time_name = FormEntry._meta.get_field("entry_time").verbose_name columns.append(str(entry_time_name)) for field in form.fields.all().order_by('position', 'id'): if not field.field_type.split('.')[-1] in ['Description', 'Header']: columns.append(field.label) field_indexes[field.id] = 1 + len(field_indexes) if field.field_type == "FileField": file_field_ids.append(field.id) if form.custom_payment: columns.append(str("Pricing")) columns.append(str("Price")) columns.append(str("Payment Method")) field_indexes[0] = 0 writer = csv.DictWriter(Echo(), fieldnames=field_indexes) yield writer.writerow(dict(zip(field_indexes, columns))) # Loop through each field value order by entry, building up each # entry as a row. entries = FormEntry.objects.filter(form=form).order_by('pk') for entry in entries: values = FieldEntry.objects.filter(entry=entry) row = [""] * len(columns) entry_time = entry.entry_time.strftime("%Y-%m-%d %H:%M:%S") if form.custom_payment: if entry.pricing: row[-3] = entry.pricing.label if not entry.pricing.price: row[-2] = entry.custom_price else: row[-2] = entry.pricing.price row[-1] = entry.payment_method row[0] = entry_time for field_entry in values: if not field_entry.field.field_type.split('.')[-1] in [ 'Description', 'Header' ]: value = escape_csv(field_entry.value) # Create download URL for file fields. # if field_entry.field_id in file_field_ids: # url = reverse("admin:forms_form_file", args=(field_entry.id,)) # value = '{site_url}{url}'.format(site_url=site_url, url=url) # Only use values for fields that currently exist for the form. try: row[field_indexes[field_entry.field_id]] = value except KeyError: pass # Write out the row. yield writer.writerow(dict(zip(field_indexes, row)))