def generate_recap(self): if not self.recap_file and self.header_line: file_name = 'user_import_%d_recap.csv' % self.id file_path = '%s/%s' % (os.path.split(self.upload_file.name)[0], file_name) f = default_storage.open(file_path, 'wb') recap_writer = UnicodeWriter(f, encoding='utf-8') header_row = self.header_line.split(',') if 'status' in header_row: header_row.remove('status') if 'status_detail' in header_row: header_row.remove('status_detail') header_row.extend(['action', 'error']) recap_writer.writerow(header_row) data_list = UserImportData.objects.filter( uimport=self).order_by('row_num') for idata in data_list: data_dict = idata.row_data row = [data_dict[k] for k in header_row if k in data_dict] row.extend([idata.action_taken, idata.error]) row = [smart_str(s).decode('utf-8') for s in row] recap_writer.writerow(row) f.close() self.recap_file.name = file_path self.save()
def generate_recap(self): if not self.recap_file and self.header_line: file_name = 'user_import_%d_recap.csv' % self.id file_path = '%s/%s' % (os.path.split( self.upload_file.name)[0], file_name) f = default_storage.open(file_path, 'wb') recap_writer = UnicodeWriter(f, encoding='utf-8') header_row = self.header_line.split(',') if 'status' in header_row: header_row.remove('status') if 'status_detail' in header_row: header_row.remove('status_detail') header_row.extend(['action', 'error']) recap_writer.writerow(header_row) data_list = UserImportData.objects.filter( uimport=self).order_by('row_num') for idata in data_list: data_dict = idata.row_data row = [data_dict[k] for k in header_row if k in data_dict] row.extend([idata.action_taken, idata.error]) row = [smart_str(s).decode('utf-8') for s in row] recap_writer.writerow(row) f.close() self.recap_file.name = file_path self.save()
def process_export(export_fields='all_fields', identifier=u'', user_id=0): from tendenci.core.perms.models import TendenciBaseModel if export_fields == 'main_fields': user_field_list = [ 'username', 'first_name', 'last_name', 'email'] profile_field_list = [ 'salutation', 'initials', 'display_name', 'company', 'department', 'position_title', 'sex', 'address', 'address2', 'city', 'state', 'zipcode', 'country', 'phone', 'phone2', 'fax', 'work_phone', 'home_phone', 'mobile_phone', 'url', 'url2', 'dob', 'status_detail'] else: # base ------------ base_field_list = [ smart_str(field.name) for field in TendenciBaseModel._meta.fields if not field.__class__ == AutoField] # user ------------ user_field_list = [ smart_str(field.name) for field in User._meta.fields if not field.__class__ == AutoField] user_field_list.remove('password') # profile --------- profile_field_list = [ smart_str(field.name) for field in Profile._meta.fields if not field.__class__ == AutoField] profile_field_list = [ name for name in profile_field_list if not name in base_field_list] profile_field_list.remove('guid') profile_field_list.remove('user') # append base fields at the end field_list = user_field_list + profile_field_list identifier = identifier or int(ttime.time()) file_name_temp = 'export/profiles/%s_temp.csv' % identifier with default_storage.open(file_name_temp, 'wb') as csvfile: csv_writer = UnicodeWriter(csvfile, encoding='utf-8') csv_writer.writerow(field_list) profiles = Profile.objects.all() for profile in profiles: p_user = profile.user items_list = [] for field_name in field_list: if field_name in profile_field_list: item = getattr(profile, field_name) elif field_name in user_field_list: item = getattr(p_user, field_name) else: item = '' if item: if isinstance(item, datetime): item = item.strftime('%Y-%m-%d %H:%M:%S') elif isinstance(item, date): item = item.strftime('%Y-%m-%d') elif isinstance(item, time): item = item.strftime('%H:%M:%S') elif isinstance(item, basestring): item = item.encode("utf-8") item = smart_str(item).decode('utf-8') items_list.append(item) csv_writer.writerow(items_list) # rename the file name file_name = 'export/profiles/%s.csv' % identifier default_storage.save(file_name, default_storage.open(file_name_temp, 'rb')) # delete the temp file default_storage.delete(file_name_temp) # notify user that export is ready to download [user] = User.objects.filter(pk=user_id)[:1] or [None] if user and user.email: download_url = reverse('profile.export_download', args=[identifier]) site_url = get_setting('site', 'global', 'siteurl') site_display_name = get_setting('site', 'global', 'sitedisplayname') parms = { 'download_url': download_url, 'user': user, 'site_url': site_url, 'site_display_name': site_display_name, 'export_fields': export_fields} subject = render_to_string( 'profiles/notices/export_ready_subject.html', parms) subject = subject.strip('\n').strip('\r') body = render_to_string( 'profiles/notices/export_ready_body.html', parms) email = Email( recipient=user.email, subject=subject, body=body) email.send()
def process_export(export_fields='all_fields', export_status_detail='', identifier=u'', user_id=0): from tendenci.core.perms.models import TendenciBaseModel if export_fields == 'main_fields': field_list = [ 'headline', 'slug', 'summary', 'body', 'source', 'first_name', 'last_name', 'address', 'address2', 'city', 'state', 'zip_code', 'country', 'phone', 'phone2', 'fax', 'email', 'email2', 'website', 'list_type', 'requested_duration', 'activation_dt', 'expiration_dt', 'tags', 'enclosure_url', 'enclosure_type', 'enclosure_length', 'status', 'status_detail' ] else: # base ------------ base_field_list = [ smart_str(field.name) for field in TendenciBaseModel._meta.fields if not field.__class__ == AutoField ] field_list = [ smart_str(field.name) for field in Directory._meta.fields if not field.__class__ == AutoField ] field_list = [ name for name in field_list if not name in base_field_list ] field_list.remove('guid') # append base fields at the end field_list = field_list + base_field_list identifier = identifier or int(ttime.time()) file_name_temp = 'export/directories/%s_temp.csv' % identifier with default_storage.open(file_name_temp, 'wb') as csvfile: csv_writer = UnicodeWriter(csvfile, encoding='utf-8') csv_writer.writerow(field_list) directories = Directory.objects.all() if export_status_detail: directories = directories.filter( status_detail__icontains=export_status_detail) for directory in directories: items_list = [] for field_name in field_list: item = getattr(directory, field_name) if item is None: item = '' if item: if isinstance(item, datetime): item = item.strftime('%Y-%m-%d %H:%M:%S') elif isinstance(item, date): item = item.strftime('%Y-%m-%d') elif isinstance(item, time): item = item.strftime('%H:%M:%S') elif isinstance(item, basestring): item = item.encode("utf-8") elif field_name == 'invoice': # display total vs balance item = 'Total: %d / Balance: %d' % (item.total, item.balance) item = smart_str(item).decode('utf-8') items_list.append(item) csv_writer.writerow(items_list) # rename the file name file_name = 'export/directories/%s.csv' % identifier default_storage.save(file_name, default_storage.open(file_name_temp, 'rb')) # delete the temp file default_storage.delete(file_name_temp) # notify user that export is ready to download [user] = User.objects.filter(pk=user_id)[:1] or [None] if user and user.email: download_url = reverse('directory.export_download', args=[identifier]) site_url = get_setting('site', 'global', 'siteurl') site_display_name = get_setting('site', 'global', 'sitedisplayname') parms = { 'download_url': download_url, 'user': user, 'site_url': site_url, 'site_display_name': site_display_name, 'export_status_detail': export_status_detail, 'export_fields': export_fields } subject = render_to_string( 'directories/notices/export_ready_subject.html', parms) subject = subject.strip('\n').strip('\r') body = render_to_string('directories/notices/export_ready_body.html', parms) email = Email(recipient=user.email, subject=subject, body=body) email.send()
def process_export( group_id, export_target='all', identifier=u'', user_id=0): """ Process export for group members and/or group subscribers. """ [group] = Group.objects.filter(id=group_id)[:1] or [None] if not group: return # pull 100 rows per query # be careful of the memory usage rows_per_batch = 100 identifier = identifier or str(time.time()) file_dir = 'export/groups/' file_path_temp = '%sgroup_%d_%s_%s_temp.csv' % (file_dir, group.id, export_target, identifier) # labels subscribers_labels = None regular_labels = None if export_target in ['subscribers', 'all']: # get a list of labels for subscribers subscribers_labels = list(set([label for (label, ) in SubscriberData.objects.filter( subscription__group=group ).values_list('field_label') ])) if export_target in ['members', 'all']: user_fields = [ 'id', 'first_name', 'last_name', 'email', 'is_active', 'is_staff', 'is_superuser' ] profile_fields = [ 'direct_mail', 'company', 'address', 'address2', 'city', 'state', 'zipcode', 'country', 'phone', 'create_dt' ] regular_labels = user_fields + profile_fields if regular_labels and subscribers_labels: labels = regular_labels + subscribers_labels elif regular_labels: labels = regular_labels elif subscribers_labels: labels = subscribers_labels field_dict = OrderedDict([(label.lower().replace(" ", "_"), '' ) for label in labels]) with default_storage.open(file_path_temp, 'wb') as csvfile: csv_writer = UnicodeWriter(csvfile, encoding='utf-8') csv_writer.writerow(field_dict.keys()) # process regular group members if export_target in ['members', 'all']: count_members = group.members.filter( group_member__status=True, group_member__status_detail='active' ).count() num_rows_processed = 0 while num_rows_processed < count_members: users = group.members.filter( group_member__status=True, group_member__status_detail='active' ).select_related('profile' )[num_rows_processed:(num_rows_processed + rows_per_batch)] num_rows_processed += rows_per_batch row_dict = field_dict.copy() for user in users: profile = user.profile for field_name in user_fields: if hasattr(user, field_name): row_dict[field_name] = getattr(user, field_name) for field_name in profile_fields: if hasattr(profile, field_name): row_dict[field_name] = getattr(profile, field_name) for k, v in row_dict.items(): if not isinstance(v, basestring): if isinstance(v, datetime): row_dict[k] = v.strftime('%Y-%m-%d %H:%M:%S') elif isinstance(v, date): row_dict[k] = v.strftime('%Y-%m-%d') else: row_dict[k] = smart_str(v) csv_writer.writerow(row_dict.values()) # process for subscribers if export_target in ['subscribers', 'all']: count_subscriptions = GroupSubscription.objects.filter( group=group ).count() num_rows_processed = 0 while num_rows_processed < count_subscriptions: subscription_ids = GroupSubscription.objects.filter( group=group ).order_by('id' ).values_list('id', flat=True )[num_rows_processed:(num_rows_processed + rows_per_batch)] num_rows_processed += rows_per_batch if subscription_ids: ssdata = SubscriberData.objects.filter( subscription__group=group, subscription_id__in=subscription_ids ).order_by('subscription__id') if ssdata: prev_subscription_id = 0 row_dict = field_dict.copy() # this batch of ssdata can contain up to 100 subscriptions # we process one subscription at a time by remembering # its previous subscription for sd in ssdata: if prev_subscription_id != 0 and \ sd.subscription.id != prev_subscription_id: # write out the row csv_writer.writerow(row_dict.values()) # reset row_dict row_dict = field_dict.copy() prev_subscription_id = sd.subscription.id field_name = sd.field_label.lower().replace(" ", "_") row_dict[field_name] = sd.value # write out the last row csv_writer.writerow(row_dict.values()) # rename the file name file_path = '%sgroup_%d_%s_%s.csv' % (file_dir, group.id, export_target, identifier) default_storage.save(file_path, default_storage.open(file_path_temp, 'rb')) # delete the temp file default_storage.delete(file_path_temp) # notify user that export is ready to download [user] = User.objects.filter(id=user_id)[:1] or [None] if user and user.email: download_url = reverse('group.members_export_download', args=[group.slug, export_target, identifier]) site_url = get_setting('site', 'global', 'siteurl') site_display_name = get_setting('site', 'global', 'sitedisplayname') parms = { 'group': group, 'download_url': download_url, 'user': user, 'site_url': site_url, 'site_display_name': site_display_name, 'export_target': export_target} subject = render_to_string( 'user_groups/exports/export_ready_subject.html', parms) subject = subject.strip('\n').strip('\r') body = render_to_string( 'user_groups/exports/export_ready_body.html', parms) email = Email( recipient=user.email, subject=subject, body=body) email.send()
def process_export(export_fields="all_fields", identifier=u"", user_id=0): from tendenci.core.perms.models import TendenciBaseModel if export_fields == "main_fields": user_field_list = ["username", "first_name", "last_name", "email"] profile_field_list = [ "salutation", "initials", "display_name", "company", "department", "position_title", "sex", "address", "address2", "city", "state", "zipcode", "country", "phone", "phone2", "fax", "work_phone", "home_phone", "mobile_phone", "url", "url2", "dob", "status_detail", ] else: # base ------------ base_field_list = [ smart_str(field.name) for field in TendenciBaseModel._meta.fields if not field.__class__ == AutoField ] # user ------------ user_field_list = [smart_str(field.name) for field in User._meta.fields if not field.__class__ == AutoField] user_field_list.remove("password") # profile --------- profile_field_list = [ smart_str(field.name) for field in Profile._meta.fields if not field.__class__ == AutoField ] profile_field_list = [name for name in profile_field_list if not name in base_field_list] profile_field_list.remove("guid") profile_field_list.remove("user") # append base fields at the end field_list = user_field_list + profile_field_list identifier = identifier or int(ttime.time()) file_name_temp = "export/profiles/%s_temp.csv" % identifier with default_storage.open(file_name_temp, "wb") as csvfile: csv_writer = UnicodeWriter(csvfile, encoding="utf-8") csv_writer.writerow(field_list) profiles = Profile.objects.all() for profile in profiles: p_user = profile.user items_list = [] for field_name in field_list: if field_name in profile_field_list: item = getattr(profile, field_name) elif field_name in user_field_list: item = getattr(p_user, field_name) else: item = "" if item: if isinstance(item, datetime): item = item.strftime("%Y-%m-%d %H:%M:%S") elif isinstance(item, date): item = item.strftime("%Y-%m-%d") elif isinstance(item, time): item = item.strftime("%H:%M:%S") elif isinstance(item, basestring): item = item.encode("utf-8") item = smart_str(item).decode("utf-8") items_list.append(item) csv_writer.writerow(items_list) # rename the file name file_name = "export/profiles/%s.csv" % identifier default_storage.save(file_name, default_storage.open(file_name_temp, "rb")) # delete the temp file default_storage.delete(file_name_temp) # notify user that export is ready to download [user] = User.objects.filter(pk=user_id)[:1] or [None] if user and user.email: download_url = reverse("profile.export_download", args=[identifier]) site_url = get_setting("site", "global", "siteurl") site_display_name = get_setting("site", "global", "sitedisplayname") parms = { "download_url": download_url, "user": user, "site_url": site_url, "site_display_name": site_display_name, "export_fields": export_fields, } subject = render_to_string("profiles/notices/export_ready_subject.html", parms) subject = subject.strip("\n").strip("\r") body = render_to_string("profiles/notices/export_ready_body.html", parms) email = Email(recipient=user.email, subject=subject, body=body) email.send()
def process_export(export_fields='all_fields', identifier=u'', user_id=0): from tendenci.core.perms.models import TendenciBaseModel if export_fields == 'main_fields': user_field_list = ['username', 'first_name', 'last_name', 'email'] profile_field_list = [ 'salutation', 'initials', 'display_name', 'company', 'department', 'position_title', 'sex', 'address', 'address2', 'city', 'state', 'zipcode', 'country', 'phone', 'phone2', 'fax', 'work_phone', 'home_phone', 'mobile_phone', 'url', 'url2', 'dob', 'status_detail' ] else: # base ------------ base_field_list = [ smart_str(field.name) for field in TendenciBaseModel._meta.fields if not field.__class__ == AutoField ] # user ------------ user_field_list = [ smart_str(field.name) for field in User._meta.fields if not field.__class__ == AutoField ] user_field_list.remove('password') # profile --------- profile_field_list = [ smart_str(field.name) for field in Profile._meta.fields if not field.__class__ == AutoField ] profile_field_list = [ name for name in profile_field_list if not name in base_field_list ] profile_field_list.remove('guid') profile_field_list.remove('user') # append base fields at the end field_list = user_field_list + profile_field_list identifier = identifier or int(ttime.time()) file_name_temp = 'export/profiles/%s_temp.csv' % identifier with default_storage.open(file_name_temp, 'wb') as csvfile: csv_writer = UnicodeWriter(csvfile, encoding='utf-8') csv_writer.writerow(field_list) profiles = Profile.objects.all() for profile in profiles: p_user = profile.user items_list = [] for field_name in field_list: if field_name in profile_field_list: item = getattr(profile, field_name) elif field_name in user_field_list: item = getattr(p_user, field_name) else: item = '' if item: if isinstance(item, datetime): item = item.strftime('%Y-%m-%d %H:%M:%S') elif isinstance(item, date): item = item.strftime('%Y-%m-%d') elif isinstance(item, time): item = item.strftime('%H:%M:%S') elif isinstance(item, basestring): item = item.encode("utf-8") item = smart_str(item).decode('utf-8') items_list.append(item) csv_writer.writerow(items_list) # rename the file name file_name = 'export/profiles/%s.csv' % identifier default_storage.save(file_name, default_storage.open(file_name_temp, 'rb')) # delete the temp file default_storage.delete(file_name_temp) # notify user that export is ready to download [user] = User.objects.filter(pk=user_id)[:1] or [None] if user and user.email: download_url = reverse('profile.export_download', args=[identifier]) site_url = get_setting('site', 'global', 'siteurl') site_display_name = get_setting('site', 'global', 'sitedisplayname') parms = { 'download_url': download_url, 'user': user, 'site_url': site_url, 'site_display_name': site_display_name, 'export_fields': export_fields } subject = render_to_string( 'profiles/notices/export_ready_subject.html', parms) subject = subject.strip('\n').strip('\r') body = render_to_string('profiles/notices/export_ready_body.html', parms) email = Email(recipient=user.email, subject=subject, body=body) email.send()
def process_export( group_id, export_target='all', identifier=u'', user_id=0): """ Process export for group members and/or group subscribers. """ [group] = Group.objects.filter(id=group_id)[:1] or [None] if not group: return # pull 100 rows per query # be careful of the memory usage rows_per_batch = 100 identifier = identifier or str(time.time()) file_dir = 'export/groups/' file_path_temp = '%sgroup_%d_%s_%s_temp.csv' % (file_dir, group.id, export_target, identifier) # labels user_fields = ['id', 'first_name', 'last_name', 'email', 'is_active', 'is_staff', 'is_superuser'] profile_fields = ['direct_mail', 'company', 'address', 'address2', 'city', 'state', 'zipcode', 'country', 'phone', 'create_dt'] labels = user_fields + profile_fields field_dict = OrderedDict([(label.lower().replace(" ", "_"), '' ) for label in labels]) with default_storage.open(file_path_temp, 'wb') as csvfile: csv_writer = UnicodeWriter(csvfile, encoding='utf-8') csv_writer.writerow(field_dict.keys()) # process regular group members count_members = group.members.filter( group_member__status=True, group_member__status_detail='active').count() num_rows_processed = 0 while num_rows_processed < count_members: users = group.members.filter( group_member__status=True, group_member__status_detail='active' ).select_related('profile' )[num_rows_processed:(num_rows_processed + rows_per_batch)] num_rows_processed += rows_per_batch row_dict = field_dict.copy() for user in users: profile = user.profile for field_name in user_fields: if hasattr(user, field_name): row_dict[field_name] = getattr(user, field_name) for field_name in profile_fields: if hasattr(profile, field_name): row_dict[field_name] = getattr(profile, field_name) for k, v in row_dict.items(): if not isinstance(v, basestring): if isinstance(v, datetime): row_dict[k] = v.strftime('%Y-%m-%d %H:%M:%S') elif isinstance(v, date): row_dict[k] = v.strftime('%Y-%m-%d') else: row_dict[k] = smart_str(v) csv_writer.writerow(row_dict.values()) # rename the file name file_path = '%sgroup_%d_%s_%s.csv' % (file_dir, group.id, export_target, identifier) default_storage.save(file_path, default_storage.open(file_path_temp, 'rb')) # delete the temp file default_storage.delete(file_path_temp) # notify user that export is ready to download [user] = User.objects.filter(id=user_id)[:1] or [None] if user and user.email: download_url = reverse('group.members_export_download', args=[group.slug, export_target, identifier]) site_url = get_setting('site', 'global', 'siteurl') site_display_name = get_setting('site', 'global', 'sitedisplayname') parms = { 'group': group, 'download_url': download_url, 'user': user, 'site_url': site_url, 'site_display_name': site_display_name} subject = render_to_string( 'user_groups/exports/export_ready_subject.html', parms) subject = subject.strip('\n').strip('\r') body = render_to_string( 'user_groups/exports/export_ready_body.html', parms) email = Email( recipient=user.email, subject=subject, body=body) email.send()
def process_export(identifier, user_id): field_list = [ 'guid', 'slug', 'timezone', 'headline', 'summary', 'body', 'source', 'first_name', 'last_name', 'phone', 'fax', 'email', 'website', 'release_dt', 'syndicate', 'featured', 'design_notes', 'tags', 'enclosure_url', 'enclosure_type', 'enclosure_length', 'not_official_content', 'entity', ] identifier = identifier or int(ttime.time()) file_name_temp = 'export/articles/%s_temp.csv' % (identifier) with default_storage.open(file_name_temp, 'wb') as csvfile: csv_writer = UnicodeWriter(csvfile, encoding='utf-8') csv_writer.writerow(field_list) articles = Article.objects.filter(status_detail='active') for article in articles: items_list = [] for field_name in field_list: item = getattr(article, field_name) if isinstance(item, datetime): item = item.strftime('%Y-%m-%d %H:%M:%S') elif isinstance(item, date): item = item.strftime('%Y-%m-%d') elif isinstance(item, time): item = item.strftime('%H:%M:%S') elif isinstance(item, basestring): item = item.encode("utf-8") item = smart_str(item).decode('utf-8') items_list.append(item) csv_writer.writerow(items_list) # rename the file name file_name = 'export/articles/%s.csv' % identifier default_storage.save(file_name, default_storage.open(file_name_temp, 'rb')) # delete the temp file default_storage.delete(file_name_temp) # notify user that export is ready to download [user] = User.objects.filter(pk=user_id)[:1] or [None] if user and user.email: download_url = reverse('article.export_download', args=[identifier]) site_url = get_setting('site', 'global', 'siteurl') site_display_name = get_setting('site', 'global', 'sitedisplayname') parms = { 'download_url': download_url, 'user': user, 'site_url': site_url, 'site_display_name': site_display_name } subject = render_to_string( 'articles/notices/export_ready_subject.html', parms) subject = subject.strip('\n').strip('\r') body = render_to_string('articles/notices/export_ready_body.html', parms) email = Email(recipient=user.email, subject=subject, body=body) email.send()
def process_export(identifier, user_id): field_list = [ 'guid', 'slug', 'timezone', 'headline', 'summary', 'body', 'source', 'first_name', 'last_name', 'phone', 'fax', 'email', 'website', 'release_dt', 'syndicate', 'featured', 'design_notes', 'tags', 'enclosure_url', 'enclosure_type', 'enclosure_length', 'not_official_content', 'entity', ] identifier = identifier or int(ttime.time()) file_name_temp = 'export/articles/%s_temp.csv' % (identifier) with default_storage.open(file_name_temp, 'wb') as csvfile: csv_writer = UnicodeWriter(csvfile, encoding='utf-8') csv_writer.writerow(field_list) articles = Article.objects.filter(status_detail='active') for article in articles: items_list = [] for field_name in field_list: item = getattr(article, field_name) if isinstance(item, datetime): item = item.strftime('%Y-%m-%d %H:%M:%S') elif isinstance(item, date): item = item.strftime('%Y-%m-%d') elif isinstance(item, time): item = item.strftime('%H:%M:%S') elif isinstance(item, basestring): item = item.encode("utf-8") item = smart_str(item).decode('utf-8') items_list.append(item) csv_writer.writerow(items_list) # rename the file name file_name = 'export/articles/%s.csv' % identifier default_storage.save(file_name, default_storage.open(file_name_temp, 'rb')) # delete the temp file default_storage.delete(file_name_temp) # notify user that export is ready to download [user] = User.objects.filter(pk=user_id)[:1] or [None] if user and user.email: download_url = reverse('article.export_download', args=[identifier]) site_url = get_setting('site', 'global', 'siteurl') site_display_name = get_setting('site', 'global', 'sitedisplayname') parms = { 'download_url': download_url, 'user': user, 'site_url': site_url, 'site_display_name': site_display_name} subject = render_to_string( 'articles/notices/export_ready_subject.html', parms) subject = subject.strip('\n').strip('\r') body = render_to_string( 'articles/notices/export_ready_body.html', parms) email = Email( recipient=user.email, subject=subject, body=body) email.send()
def process_export(export_fields='all_fields', export_status_detail='', identifier=u'', user_id=0): from tendenci.core.perms.models import TendenciBaseModel if export_fields == 'main_fields': field_list = [ 'headline', 'slug', 'summary', 'body', 'source', 'first_name', 'last_name', 'address', 'address2', 'city', 'state', 'zip_code', 'country', 'phone', 'phone2', 'fax', 'email', 'email2', 'website', 'list_type', 'requested_duration', 'activation_dt', 'expiration_dt', 'tags', 'enclosure_url', 'enclosure_type', 'enclosure_length', 'status', 'status_detail'] else: # base ------------ base_field_list = [ smart_str(field.name) for field in TendenciBaseModel._meta.fields if not field.__class__ == AutoField] field_list = [ smart_str(field.name) for field in Directory._meta.fields if not field.__class__ == AutoField] field_list = [ name for name in field_list if not name in base_field_list] field_list.remove('guid') # append base fields at the end field_list = field_list + base_field_list identifier = identifier or int(ttime.time()) file_name_temp = 'export/directories/%s_temp.csv' % identifier with default_storage.open(file_name_temp, 'wb') as csvfile: csv_writer = UnicodeWriter(csvfile, encoding='utf-8') csv_writer.writerow(field_list) directories = Directory.objects.all() if export_status_detail: directories = directories.filter(status_detail__icontains=export_status_detail) for directory in directories: items_list = [] for field_name in field_list: item = getattr(directory, field_name) if item is None: item = '' if item: if isinstance(item, datetime): item = item.strftime('%Y-%m-%d %H:%M:%S') elif isinstance(item, date): item = item.strftime('%Y-%m-%d') elif isinstance(item, time): item = item.strftime('%H:%M:%S') elif isinstance(item, basestring): item = item.encode("utf-8") elif field_name == 'invoice': # display total vs balance item = 'Total: %d / Balance: %d' % (item.total, item.balance) item = smart_str(item).decode('utf-8') items_list.append(item) csv_writer.writerow(items_list) # rename the file name file_name = 'export/directories/%s.csv' % identifier default_storage.save(file_name, default_storage.open(file_name_temp, 'rb')) # delete the temp file default_storage.delete(file_name_temp) # notify user that export is ready to download [user] = User.objects.filter(pk=user_id)[:1] or [None] if user and user.email: download_url = reverse('directory.export_download', args=[identifier]) site_url = get_setting('site', 'global', 'siteurl') site_display_name = get_setting('site', 'global', 'sitedisplayname') parms = { 'download_url': download_url, 'user': user, 'site_url': site_url, 'site_display_name': site_display_name, 'export_status_detail': export_status_detail, 'export_fields': export_fields} subject = render_to_string( 'directories/notices/export_ready_subject.html', parms) subject = subject.strip('\n').strip('\r') body = render_to_string( 'directories/notices/export_ready_body.html', parms) email = Email( recipient=user.email, subject=subject, body=body) email.send()
def process_export(group_id, export_target='all', identifier=u'', user_id=0): """ Process export for group members and/or group subscribers. """ [group] = Group.objects.filter(id=group_id)[:1] or [None] if not group: return # pull 100 rows per query # be careful of the memory usage rows_per_batch = 100 identifier = identifier or str(time.time()) file_dir = 'export/groups/' file_path_temp = '%sgroup_%d_%s_%s_temp.csv' % (file_dir, group.id, export_target, identifier) # labels user_fields = [ 'id', 'first_name', 'last_name', 'email', 'is_active', 'is_staff', 'is_superuser' ] profile_fields = [ 'direct_mail', 'company', 'address', 'address2', 'city', 'state', 'zipcode', 'country', 'phone', 'create_dt' ] labels = user_fields + profile_fields field_dict = OrderedDict([(label.lower().replace(" ", "_"), '') for label in labels]) with default_storage.open(file_path_temp, 'wb') as csvfile: csv_writer = UnicodeWriter(csvfile, encoding='utf-8') csv_writer.writerow(field_dict.keys()) # process regular group members count_members = group.members.filter( group_member__status=True, group_member__status_detail='active').count() num_rows_processed = 0 while num_rows_processed < count_members: users = group.members.filter( group_member__status=True, group_member__status_detail='active').select_related( 'profile')[num_rows_processed:(num_rows_processed + rows_per_batch)] num_rows_processed += rows_per_batch row_dict = field_dict.copy() for user in users: profile = user.profile for field_name in user_fields: if hasattr(user, field_name): row_dict[field_name] = getattr(user, field_name) for field_name in profile_fields: if hasattr(profile, field_name): row_dict[field_name] = getattr(profile, field_name) for k, v in row_dict.items(): if not isinstance(v, basestring): if isinstance(v, datetime): row_dict[k] = v.strftime('%Y-%m-%d %H:%M:%S') elif isinstance(v, date): row_dict[k] = v.strftime('%Y-%m-%d') else: row_dict[k] = smart_str(v) csv_writer.writerow(row_dict.values()) # rename the file name file_path = '%sgroup_%d_%s_%s.csv' % (file_dir, group.id, export_target, identifier) default_storage.save(file_path, default_storage.open(file_path_temp, 'rb')) # delete the temp file default_storage.delete(file_path_temp) # notify user that export is ready to download [user] = User.objects.filter(id=user_id)[:1] or [None] if user and user.email: download_url = reverse('group.members_export_download', args=[group.slug, export_target, identifier]) site_url = get_setting('site', 'global', 'siteurl') site_display_name = get_setting('site', 'global', 'sitedisplayname') parms = { 'group': group, 'download_url': download_url, 'user': user, 'site_url': site_url, 'site_display_name': site_display_name } subject = render_to_string( 'user_groups/exports/export_ready_subject.html', parms) subject = subject.strip('\n').strip('\r') body = render_to_string('user_groups/exports/export_ready_body.html', parms) email = Email(recipient=user.email, subject=subject, body=body) email.send()
def process_invoice_export(start_dt=None, end_dt=None, identifier=u'', user_id=0): fields = ['id', 'guid', 'object_type', 'object_id', 'title', 'tender_date', 'bill_to', 'bill_to_first_name', 'bill_to_last_name', 'bill_to_company', 'bill_to_address', 'bill_to_city', 'bill_to_state', 'bill_to_zip_code', 'bill_to_country', 'bill_to_phone', 'bill_to_fax', 'bill_to_email', 'ship_to', 'ship_to_first_name', 'ship_to_last_name', 'ship_to_company', 'ship_to_address', 'ship_to_city', 'ship_to_state', 'ship_to_zip_code', 'ship_to_country', 'ship_to_phone', 'ship_to_fax', 'ship_to_email', 'ship_to_address_type', 'receipt', 'gift', 'arrival_date_time', 'greeting', 'instructions', 'po', 'terms', 'due_date', 'ship_date', 'ship_via', 'fob', 'project', 'other', 'message', 'subtotal', 'shipping', 'shipping_surcharge', 'box_and_packing', 'tax_exempt', 'tax_exemptid', 'tax_rate', 'taxable', 'tax', 'variance', 'discount_amount', 'total', 'payments_credits', 'balance', 'disclaimer', 'variance_notes', 'admin_notes', 'create_dt', 'update_dt', 'creator', 'creator_username', 'owner', 'owner_username', 'status_detail'] identifier = identifier or int(ttime.time()) file_name_temp = 'export/invoices/%s_temp.csv' % identifier with default_storage.open(file_name_temp, 'wb') as csvfile: csv_writer = UnicodeWriter(csvfile, encoding='utf-8') csv_writer.writerow(fields) invoices = Invoice.objects.filter(status=True, update_dt__gte=start_dt, update_dt__lte=end_dt) for invoice in invoices: items_list = [] for field_name in fields: item = getattr(invoice, field_name) if item is None: item = '' if item: if isinstance(item, datetime): item = item.strftime('%Y-%m-%d %H:%M:%S') elif isinstance(item, date): item = item.strftime('%Y-%m-%d') elif isinstance(item, time): item = item.strftime('%H:%M:%S') elif isinstance(item, basestring): item = item.encode("utf-8") item = smart_str(item).decode('utf-8') items_list.append(item) csv_writer.writerow(items_list) # rename the file name file_name = 'export/invoices/%s.csv' % identifier default_storage.save(file_name, default_storage.open(file_name_temp, 'rb')) # delete the temp file default_storage.delete(file_name_temp) # notify user that export is ready to download [user] = User.objects.filter(pk=user_id)[:1] or [None] if user and user.email: download_url = reverse('invoice.export_download', args=[identifier]) site_url = get_setting('site', 'global', 'siteurl') site_display_name = get_setting('site', 'global', 'sitedisplayname') parms = { 'download_url': download_url, 'user': user, 'site_url': site_url, 'site_display_name': site_display_name, 'start_dt': start_dt, 'end_dt': end_dt} subject = render_to_string( 'invoices/notices/export_ready_subject.html', parms) subject = subject.strip('\n').strip('\r') body = render_to_string( 'invoices/notices/export_ready_body.html', parms) email = Email( recipient=user.email, subject=subject, body=body) email.send()
def process_export( group_id, export_target='all', identifier=u'', user_id=0): """ Process export for group members and/or group subscribers. """ [group] = Group.objects.filter(id=group_id)[:1] or [None] if not group: return # pull 100 rows per query # be careful of the memory usage rows_per_batch = 100 identifier = identifier or int(time.time()) file_dir = 'export/groups/' file_name_temp = '%sgroup_%d_%s_%d_temp.csv' % (file_dir, group.id, export_target, identifier) # labels subscribers_labels = None regular_labels = None if export_target in ['subscribers', 'all']: # get a list of labels for subscribers subscribers_labels = list(set([label for (label, ) in SubscriberData.objects.filter( subscription__group=group ).values_list('field_label') ])) if export_target in ['regular', 'all']: user_fields = [ 'id', 'first_name', 'last_name', 'email', 'is_active', 'is_staff', 'is_superuser' ] profile_fields = [ 'direct_mail', 'company', 'address', 'address2', 'city', 'state', 'zipcode', 'country', 'phone', 'create_dt' ] regular_labels = user_fields + profile_fields if regular_labels and subscribers_labels: labels = regular_labels + subscribers_labels elif regular_labels: labels = regular_labels elif subscribers_labels: labels = subscribers_labels field_dict = OrderedDict([(label.lower().replace(" ", "_"), '' ) for label in labels]) with default_storage.open(file_name_temp, 'wb') as csvfile: csv_writer = UnicodeWriter(csvfile, encoding='utf-8') csv_writer.writerow(field_dict.keys()) # process regular group members if export_target in ['regular', 'all']: count_members = group.members.filter( group_member__status=True, group_member__status_detail='active' ).count() num_rows_processed = 0 while num_rows_processed < count_members: users = group.members.filter( group_member__status=True, group_member__status_detail='active' ).select_related('profile' )[num_rows_processed:(num_rows_processed + rows_per_batch)] num_rows_processed += rows_per_batch row_dict = field_dict.copy() for user in users: profile = user.profile for field_name in user_fields: if hasattr(user, field_name): row_dict[field_name] = getattr(user, field_name) for field_name in profile_fields: if hasattr(profile, field_name): row_dict[field_name] = getattr(profile, field_name) for k, v in row_dict.items(): if not isinstance(v, basestring): if isinstance(v, datetime): row_dict[k] = v.strftime('%Y-%m-%d %H:%M:%S') elif isinstance(v, date): row_dict[k] = v.strftime('%Y-%m-%d') else: row_dict[k] = smart_str(v) csv_writer.writerow(row_dict.values()) # process for subscribers if export_target in ['subscribers', 'all']: count_subscriptions = GroupSubscription.objects.filter( group=group ).count() num_rows_processed = 0 while num_rows_processed < count_subscriptions: subscription_ids = GroupSubscription.objects.filter( group=group ).order_by('id' ).values_list('id', flat=True )[num_rows_processed:(num_rows_processed + rows_per_batch)] num_rows_processed += rows_per_batch if subscription_ids: ssdata = SubscriberData.objects.filter( subscription__group=group, subscription_id__in=subscription_ids ) if ssdata: row_dict = field_dict.copy() for sd in ssdata: field_name = sd.field_label.lower().replace(" ", "_") row_dict[field_name] = sd.value csv_writer.writerow(row_dict.values()) # rename the file name file_name = '%sgroup_%d_%s_%d.csv' % (file_dir, group.id, export_target, identifier) default_storage.save(file_name, default_storage.open(file_name_temp, 'rb')) # delete the temp file default_storage.delete(file_name_temp) print file_name