def _build_return_value(self, schema, appstruct, query): """ Return the streamed file object """ all_ids = [elem.id for elem in query] logger.debug(" + All_ids where collected : {0}".format(all_ids)) service_ok, msg = check_alive() if not service_ok: if "popup" in self.request.GET: set_close_popup_response(self.request, error=msg) return self.request.response else: self.request.session.flash(msg, 'error') return HTTPFound(self.request.referrer) logger.debug(" + In the GlobalInvoicesCsvView._build_return_value") job = FileGenerationJob() job.set_owner(self.request.user.login.login) self.request.dbsession.add(job) self.request.dbsession.flush() logger.debug(" + The job {job.id} was initialized".format(job=job)) logger.debug(" + Delaying the export_to_file task") celery_job = export_to_file.delay(job.id, 'invoices', all_ids, self.filename, self.file_format) logger.info(u"The Celery Task {0} has been delayed, its result " "sould be retrieved from the FileGenerationJob {1}".format( celery_job.id, job.id)) return HTTPFound( self.request.route_path('job', id=job.id, _query={'popup': 1}))
def submit_success(self, importation_datas): """ Submission has been called and datas have been validated :param dict importation_datas: The datas we want to import """ service_ok, msg = check_alive() if not service_ok: self.request.session.flash(msg, 'error') return HTTPFound(self.get_previous_step_route()) log.info(u"Field association has been configured, we're going to \ import") action = importation_datas['action'] csv_id_key = importation_datas['id_key'] force_rel_creation = importation_datas.get( 'force_rel_creation', False, ) association_dict = self.build_association_dict(importation_datas) # On enregistre le dictionnaire d'association de champs if importation_datas.get('record_association', False): name = importation_datas['record_name'] record_preference(self.request, name, association_dict) # On traduit la "valeur primaire" configurée par l'utilisateur en # attribut de modèle (si il y en a une de configurée) # Colonne du fichier csv -> attribut du modèle à importer id_key = association_dict.get(csv_id_key, csv_id_key) job = self.get_recording_job() celery_job = async_import_datas.delay( self.model_type, job.id, association_dict, self.filepath, id_key, action, force_rel_creation, self.get_default_values(), self.delimiter, self.quotechar, ) log.info(u" * The Celery Task {0} has been delayed, its result " u"should be retrieved from the CsvImportJob : {1}".format( celery_job.id, job.id, )) return HTTPFound(self.request.route_path('job', id=job.id))
def compile_measures_view(context, request): """ Handle compilation of measures :param obj context: The AccountingOperationUpload instance :param obj request: The pyramid request object """ service_ok, msg = check_alive() if not service_ok: request.session.flash(msg, 'error') return HTTPFound(request.referrer) logger.debug(u"Compiling measures for upload {0}".format(context.id)) query = request.dbsession.query(AccountingOperation.id) operation_ids = query.filter_by(upload_id=context.id).all() celery_job = compile_measures_task.delay(context.id, operation_ids) logger.info(u"The Celery Task {0} has been delayed, see celery logs for " u"details".format(celery_job.id)) request.session.flash(u"Les indicateurs sont en cours de génération") return HTTPFound(request.referrer)
def compile_measures_view(context, request): """ Handle compilation of measures :param obj context: The AccountingOperationUpload instance :param obj request: The pyramid request object """ service_ok, msg = check_alive() if not service_ok: request.session.flash(msg, 'error') return HTTPFound(request.referrer) logger.debug(u"Compiling measures for upload {0}".format(context.id)) celery_job = compile_measures_task.delay(context.id) logger.info( u"The Celery Task {0} has been delayed, see celery logs for " u"details".format( celery_job.id ) ) request.session.flash(u"Les indicateurs sont en cours de génération") return HTTPFound(request.referrer)
def _build_return_value(self, schema, appstruct, query): """ Return the streamed file object """ all_ids = [elem.id for elem in query] logger.debug(" + All_ids where collected : {0}".format(all_ids)) service_ok, msg = check_alive() if not service_ok: if "popup" in self.request.GET: set_close_popup_response(self.request, error=msg) return self.request.response else: self.request.session.flash(msg, 'error') return HTTPFound(self.request.referrer) logger.debug(" + In the GlobalInvoicesCsvView._build_return_value") job = FileGenerationJob() job.set_owner(self.request.user.login.login) self.request.dbsession.add(job) self.request.dbsession.flush() logger.debug(" + The job {job.id} was initialized".format(job=job)) logger.debug(" + Delaying the export_to_file task") celery_job = export_to_file.delay( job.id, 'invoices', all_ids, self.filename, self.file_format ) logger.info( u"The Celery Task {0} has been delayed, its result " "sould be retrieved from the FileGenerationJob {1}".format( celery_job.id, job.id ) ) return HTTPFound( self.request.route_path('job', id=job.id, _query={'popup': 1}) )