def simple_async_csv(job_pk, qs): job = ExportJob.objects.get(pk=job_pk) file_obj = TemporaryFile() write_csv(qs, file_obj) job.complete_with(generate_filename(qs), File(file_obj)) job.save()
def upload(request): qs = models.Elements.objects.all() filename = djqscsv.generate_filename(qs, append_datestamp=True) my_default_retry_params = gcs.RetryParams(initial_delay=0.2, max_delay=5.0, backoff_factor=2, max_retry_period=15) gcs.set_default_retry_params(my_default_retry_params) bucket_name = os.environ.get('BUCKET_NAME', app_identity.get_default_gcs_bucket_name()) bucket = '/' + bucket_name file_obj = djqscsv.render_to_csv_response(qs, filename) try: write_retry_params = gcs.RetryParams(backoff_factor=1.1) gcs_file = gcs.open(bucket+'/'+filename, 'w', content_type='text/csv', options={'x-goog-meta-foo': 'foo', 'x-goog-meta-bar': 'bar'}, retry_params=write_retry_params) gcs_file.write(file_obj.content) gcs_file.close() except Exception, e: # pylint: disable=broad-except logging.exception(e)
def gerar_csv_mp(): try: queryset = Responsavel.objects.filter(status__in=(Responsavel.STATUS_ATUALIZADO_EOL, Responsavel.STATUS_INCONSISTENCIA_RESOLVIDA), enviado_para_mercado_pago=False) queryset_to_csv = queryset.annotate(get_celular=Concat('ddd_celular', V(' '), 'celular')).values( 'nome', 'alunos__nome', 'codigo_eol_aluno', 'cpf', 'email', 'get_celular', 'vinculo', 'data_nascimento', 'nome_mae', 'status', 'nao_possui_celular', 'nao_possui_email', 'responsavel_alterado' ) qtd_linhas_qs = queryset_to_csv.count() nome_arquivo = generate_filename(queryset_to_csv, append_datestamp=True) path = join(MEDIA_ROOT, nome_arquivo) zip_obj = zipfile.ZipFile(path.replace('.csv', '.zip'), 'w') log.info('Inicia geração de arquivo CSV.') with open(path, 'wb') as csv_file: write_csv(queryset_to_csv, csv_file, field_header_map={'nome': 'nome_responsavel', 'alunos__nome': 'nome_aluno', 'get_celular': 'celular', 'nome_mae': 'nome_mae_responsavel' }, use_verbose_names=False) file = open(path) reader = csv.reader(file) qtd_linhas_arquivo = len(list(reader)) - 1 # qtd de linhas menos o cabeçario log.info(f'CSV gerado: {nome_arquivo} - Quantidade de linhas: {qtd_linhas_arquivo}') log.info('Comprimindo arquivo') zip_obj.write(path, basename(path)) if qtd_linhas_qs == qtd_linhas_arquivo and qtd_linhas_qs > 0: hoje = date.today() log.info('Inicia Atualização dos registros para enviados para mercado pago') queryset.update(enviado_para_mercado_pago=True, data_envio_mercado_pago=hoje) log.info('Inicia envio de e-mail para o MP') enviar_email_mp( assunto=f'Lista de novos beneficiarios - {hoje}', mensagem=(f'E-mail automÑtico. Não responda. ' + f'Clique neste link para fazer download do csv: ' + f'{url(nome_arquivo)}'), csv=url(nome_arquivo) ) else: # TODO: Verificar uma forma de guardar essa informação no banco log.info(f'Divergencia no número de linhas da query ({qtd_linhas_qs}) com o número de ' f'linhas do arquivo gerado ({qtd_linhas_arquivo}) ou query sem registro. ' 'Registros não foram atualizados e e-mail não foi enviado.') except Exception as e: log.error('Falha no processo de geração de arquivo e envio de e-mail: ' + str(e))
def simple_async_csv(job, qs): file_obj = TemporaryFile() write_csv(qs, file_obj) job.complete_with(generate_filename(qs), File(file_obj)) job.save()
def async_csv_export(job, model, query, display_filters): instance = job.instance if model == 'species': initial_qs = (Species.objects.filter(instance=instance)) extra_select, values = extra_select_and_values_for_model( instance, job, 'treemap_species', 'Species') ordered_fields = values + extra_select.keys() limited_qs = initial_qs.extra(select=extra_select)\ .values(*ordered_fields) else: # model == 'tree' # TODO: if an anonymous job with the given query has been # done since the last update to the audit records table, # just return that job # get the plots for the provided # query and turn them into a tree queryset initial_qs = Filter(query, display_filters, instance)\ .get_objects(Tree) extra_select_tree, values_tree = extra_select_and_values_for_model( instance, job, 'treemap_tree', 'Tree') extra_select_plot, values_plot = extra_select_and_values_for_model( instance, job, 'treemap_mapfeature', 'Plot', prefix='plot') extra_select_sp, values_sp = extra_select_and_values_for_model( instance, job, 'treemap_species', 'Species', prefix='species') if 'plot__geom' in values_plot: values_plot = [f for f in values_plot if f != 'plot__geom'] values_plot += ['plot__geom__x', 'plot__geom__y'] get_ll = 'ST_Transform(treemap_mapfeature.the_geom_webmercator, 4326)' extra_select = { 'plot__geom__x': 'ST_X(%s)' % get_ll, 'plot__geom__y': 'ST_Y(%s)' % get_ll } extra_select.update(extra_select_tree) extra_select.update(extra_select_plot) extra_select.update(extra_select_sp) ordered_fields = (sorted(values_tree) + sorted(values_plot) + sorted(values_sp)) if ordered_fields: limited_qs = initial_qs.extra(select=extra_select)\ .values(*ordered_fields) else: limited_qs = initial_qs.none() if not initial_qs.exists(): job.status = ExportJob.EMPTY_QUERYSET_ERROR # if the initial queryset was not empty but the limited queryset # is empty, it means that there were no fields which the user # was allowed to export. elif not limited_qs.exists(): job.status = ExportJob.MODEL_PERMISSION_ERROR else: csv_file = TemporaryFile() write_csv(limited_qs, csv_file, field_order=ordered_fields) job.complete_with(generate_filename(limited_qs), File(csv_file)) job.save()
def async_csv_export(job, model, query, display_filters): instance = job.instance select = OrderedDict() select_params = [] field_header_map = {} field_serializer_map = {} if model == 'species': initial_qs = (Species.objects. filter(instance=instance)) values = _values_for_model(instance, job, 'treemap_species', 'Species', select, select_params) field_names = values + select.keys() limited_qs = (initial_qs .extra(select=select, select_params=select_params) .values(*field_names)) else: # model == 'tree' # TODO: if an anonymous job with the given query has been # done since the last update to the audit records table, # just return that job # get the plots for the provided # query and turn them into a tree queryset initial_qs = Filter(query, display_filters, instance)\ .get_objects(Plot) tree_fields = _values_for_model( instance, job, 'treemap_tree', 'Tree', select, select_params, prefix='tree') plot_fields = _values_for_model( instance, job, 'treemap_mapfeature', 'Plot', select, select_params) species_fields = _values_for_model( instance, job, 'treemap_species', 'Species', select, select_params, prefix='tree__species') if 'geom' in plot_fields: plot_fields = [f for f in plot_fields if f != 'geom'] plot_fields += ['geom__x', 'geom__y'] if tree_fields: select['tree_present'] = "treemap_tree.id is not null" plot_fields += ['tree_present'] get_ll = 'ST_Transform(treemap_mapfeature.the_geom_webmercator, 4326)' select['geom__x'] = 'ST_X(%s)' % get_ll select['geom__y'] = 'ST_Y(%s)' % get_ll plot_fields += ['updated_by__username'] field_names = set(tree_fields + plot_fields + species_fields) if field_names: field_header_map = _csv_field_header_map(field_names) field_serializer_map = _csv_field_serializer_map(instance, field_names) limited_qs = (initial_qs .extra(select=select, select_params=select_params) .values(*field_header_map.keys())) else: limited_qs = initial_qs.none() if not initial_qs.exists(): job.status = ExportJob.EMPTY_QUERYSET_ERROR # if the initial queryset was not empty but the limited queryset # is empty, it means that there were no fields which the user # was allowed to export. elif not limited_qs.exists(): job.status = ExportJob.MODEL_PERMISSION_ERROR else: csv_file = TemporaryFile() write_csv(limited_qs, csv_file, field_order=field_header_map.keys(), field_header_map=field_header_map, field_serializer_map=field_serializer_map) filename = generate_filename(limited_qs).replace('plot', 'tree') job.complete_with(filename, File(csv_file)) job.save()
def csv_export(job_pk, model, query, display_filters): job = ExportJob.objects.get(pk=job_pk) instance = job.instance if model == 'species': initial_qs = (Species.objects. filter(instance=instance)) extra_select, values = extra_select_and_values_for_model( instance, job, 'treemap_species', 'species') ordered_fields = values + extra_select.keys() limited_qs = initial_qs.extra(select=extra_select)\ .values(*ordered_fields) else: # model == 'tree' # TODO: if an anonymous job with the given query has been # done since the last update to the audit records table, # just return that job # get the plots for the provided # query and turn them into a tree queryset initial_qs = Filter(query, display_filters, instance)\ .get_objects(Tree) extra_select_tree, values_tree = extra_select_and_values_for_model( instance, job, 'treemap_tree', 'Tree') extra_select_plot, values_plot = extra_select_and_values_for_model( instance, job, 'treemap_mapfeature', 'Plot', prefix='plot') extra_select_sp, values_sp = extra_select_and_values_for_model( instance, job, 'treemap_species', 'Species', prefix='species') if 'plot__geom' in values_plot: values_plot = [f for f in values_plot if f != 'plot__geom'] values_plot += ['plot__geom__x', 'plot__geom__y'] extra_select = {'plot__geom__x': 'ST_X(treemap_mapfeature.the_geom_webmercator)', 'plot__geom__y': 'ST_Y(treemap_mapfeature.the_geom_webmercator)'} extra_select.update(extra_select_tree) extra_select.update(extra_select_plot) extra_select.update(extra_select_sp) ordered_fields = (sorted(values_tree) + sorted(values_plot) + sorted(values_sp)) if ordered_fields: limited_qs = initial_qs.extra(select=extra_select)\ .values(*ordered_fields) else: limited_qs = initial_qs.none() if not initial_qs.exists(): job.status = ExportJob.EMPTY_QUERYSET_ERROR # if the initial queryset was not empty but the limited queryset # is empty, it means that there were no fields which the user # was allowed to export. elif not limited_qs.exists(): job.status = ExportJob.MODEL_PERMISSION_ERROR else: csv_file = TemporaryFile() write_csv(limited_qs, csv_file, field_order=ordered_fields) csv_name = generate_filename(limited_qs) job.outfile.save(csv_name, File(csv_file)) job.status = ExportJob.COMPLETE job.save()
def async_csv_export(job, model, query, display_filters): instance = job.instance select = OrderedDict() select_params = [] field_header_map = {} if model == 'species': initial_qs = (Species.objects.filter(instance=instance)) values = _values_for_model(instance, job, 'treemap_species', 'Species', select, select_params) field_names = values + select.keys() limited_qs = (initial_qs.extra( select=select, select_params=select_params).values(*field_names)) else: # model == 'tree' # TODO: if an anonymous job with the given query has been # done since the last update to the audit records table, # just return that job # get the plots for the provided # query and turn them into a tree queryset initial_qs = Filter(query, display_filters, instance)\ .get_objects(Plot) tree_fields = _values_for_model(instance, job, 'treemap_tree', 'Tree', select, select_params, prefix='tree') plot_fields = _values_for_model(instance, job, 'treemap_mapfeature', 'Plot', select, select_params) species_fields = _values_for_model(instance, job, 'treemap_species', 'Species', select, select_params, prefix='tree__species') if 'geom' in plot_fields: plot_fields = [f for f in plot_fields if f != 'geom'] plot_fields += ['geom__x', 'geom__y'] if tree_fields: select['tree_present'] = "treemap_tree.id is not null" plot_fields += ['tree_present'] get_ll = 'ST_Transform(treemap_mapfeature.the_geom_webmercator, 4326)' select['geom__x'] = 'ST_X(%s)' % get_ll select['geom__y'] = 'ST_Y(%s)' % get_ll field_names = set(tree_fields + plot_fields + species_fields) if field_names: field_header_map = _csv_field_header_map(field_names) limited_qs = (initial_qs.extra( select=select, select_params=select_params).values(*field_header_map.keys())) else: limited_qs = initial_qs.none() if not initial_qs.exists(): job.status = ExportJob.EMPTY_QUERYSET_ERROR # if the initial queryset was not empty but the limited queryset # is empty, it means that there were no fields which the user # was allowed to export. elif not limited_qs.exists(): job.status = ExportJob.MODEL_PERMISSION_ERROR else: csv_file = TemporaryFile() write_csv(limited_qs, csv_file, field_order=field_header_map.keys(), field_header_map=field_header_map) filename = generate_filename(limited_qs).replace('plot', 'tree') job.complete_with(filename, File(csv_file)) job.save()
def download(request): qs = models.Elements.objects.all() filename = djqscsv.generate_filename(qs, append_datestamp=True) return djqscsv.render_to_csv_response(qs, filename)
def getExpressionCSV(request): data = SelUnigeneTable.objects.all().select_related('Expression').values('accession','descrip') filename = djqscsv.generate_filename(data, append_datestamp=True) return djqscsv.render_to_csv_response(data,filename)
def getBrowseCSV(request): data = SelContigTable.objects.values('idcontig','descrip','numreads','accession','evalue','identity','selected') filename = djqscsv.generate_filename(data, append_datestamp=True) return djqscsv.render_to_csv_response(data,filename)
def async_csv_export(job, model, query, display_filters): instance = job.instance select = OrderedDict() select_params = [] if model == "species": initial_qs = Species.objects.filter(instance=instance) values = values_for_model(instance, job, "treemap_species", "Species", select, select_params) ordered_fields = values + select.keys() limited_qs = initial_qs.extra(select=select, select_params=select_params).values(*ordered_fields) else: # model == 'tree' # TODO: if an anonymous job with the given query has been # done since the last update to the audit records table, # just return that job # get the plots for the provided # query and turn them into a tree queryset initial_qs = Filter(query, display_filters, instance).get_objects(Tree) values_tree = values_for_model(instance, job, "treemap_tree", "Tree", select, select_params) values_plot = values_for_model( instance, job, "treemap_mapfeature", "Plot", select, select_params, prefix="plot" ) values_sp = values_for_model( instance, job, "treemap_species", "Species", select, select_params, prefix="species" ) if "plot__geom" in values_plot: values_plot = [f for f in values_plot if f != "plot__geom"] values_plot += ["plot__geom__x", "plot__geom__y"] get_ll = "ST_Transform(treemap_mapfeature.the_geom_webmercator, 4326)" select["plot__geom__x"] = "ST_X(%s)" % get_ll select["plot__geom__y"] = "ST_Y(%s)" % get_ll ordered_fields = sorted(values_tree) + sorted(values_plot) + sorted(values_sp) if ordered_fields: limited_qs = initial_qs.extra(select=select, select_params=select_params).values(*ordered_fields) else: limited_qs = initial_qs.none() if not initial_qs.exists(): job.status = ExportJob.EMPTY_QUERYSET_ERROR # if the initial queryset was not empty but the limited queryset # is empty, it means that there were no fields which the user # was allowed to export. elif not limited_qs.exists(): job.status = ExportJob.MODEL_PERMISSION_ERROR else: csv_file = TemporaryFile() write_csv(limited_qs, csv_file, field_order=ordered_fields) job.complete_with(generate_filename(limited_qs), File(csv_file)) job.save()