def handle(self, *args, **options): langs = [ lang[0] for lang in settings.LANGUAGES if lang[0] not in options['exclude'] ] activate_translation(settings.LANGUAGE_CODE) self.stdout.write('') self._show_configured_languages(langs) self.stdout.write('') missing_translations = False if options['no_gettext']: if options['no_gettext_update']: self._update_gettext_translation_files(langs) self.stdout.write('') missing_translations |= self._check_gettext_translations( langs, options['gettext_check_all'], options['gettext_source_has_language']) self.stdout.write('') if options['no_parler']: missing_translations |= self._check_parler_translations(langs) self.stdout.write('') if missing_translations: sys.exit(1)
def setup_app(request): if request.method != 'POST': raise PopupException(_('A POST request is required.')) try: oozie_setup.Command().handle_noargs() activate_translation(request.LANGUAGE_CODE) request.info(_('Workspaces and examples installed.')) except WebHdfsException, e: raise PopupException(_('The app setup could complete.'), detail=e)
def install_examples(request): result = {'status': -1, 'message': ''} if request.method != 'POST': result['message'] = _('A POST request is required.') else: try: oozie_setup.Command().handle_noargs() activate_translation(request.LANGUAGE_CODE) result['status'] = 0 except Exception, e: LOG.exception(e) result['message'] = str(e)
def install_examples(request): result = {"status": -1, "message": ""} if request.method != "POST": result["message"] = _("A POST request is required.") else: try: oozie_setup.Command().handle_noargs() activate_translation(request.LANGUAGE_CODE) result["status"] = 0 except Exception, e: LOG.exception(e) result["message"] = str(e)
def import_from_s3(self, s3_host, s3_key_id, s3_secret_key, cluster_pk, replica_count, default_size=None, s3_port=None, s3_is_secure=True, s3_validate_certs=True, subdir=None, language=None): if language is not None: activate_translation(language) cluster = Cluster.objects.get(pk=cluster_pk) if replica_count is None: raise CeleryTaskError( _("Replica count is not set. Buckets will not be imported.")) s3_context = s3import.S3ConnectionContext(host=s3_host, port=s3_port, access_key_id=s3_key_id, secret_access_key=s3_secret_key, is_secure=s3_is_secure, validate_certs=s3_validate_certs) owner = cluster.build_volume_owner() meta = cluster.build_volume_meta() s3importer = s3import.S3Importer( volume_size=default_size, volume_owner=owner, volume_replica=replica_count, volume_meta=meta, sx=sx._sx, s3_context=s3_context, subdir=subdir, worker_num=settings.S3IMPORT_THREAD_NUMBER) try: buckets = s3importer.get_bucket_names() except InvalidCertificateException as e: raise CeleryTaskError(str(e)) get_volume_name = get_volume_naming_function() volumes = [get_volume_name(bucket) for bucket in buckets] imported = [] skipped = [] try: total_buckets = len(buckets) check_for_resources(cluster_pk, volumes) if default_size is not None: check_for_space(cluster_pk, volumes, default_size) for bucket, volume in zip(buckets, volumes): try: if volume in [item[1] for item in imported]: raise CeleryNonFatalTaskError( _("Bucket '{}' will not be imported because its " "destination volume name '{}' has already been " "used.").format(bucket, volume)) bucket_obj = s3importer.s3.get_bucket(bucket) self.update_state(state='PROGRESS', meta={ 'copied': len(imported), 'skipped': len(skipped), 'total': total_buckets, 'current_from': bucket, 'current_to': volume }) volume_name = cluster.build_name(volume) required_space = s3importer.calculate_required_space( bucket_obj, volume_name) if required_space == 0: raise s3import.exceptions.S3NonFatalImportError( _("Nothing to import for bucket '{}'").format(bucket)) check_for_resources(cluster_pk, [volume]) size = (s3importer.volume_size or s3import.tools.calculate_volume_size(required_space)) check_for_space(cluster_pk, [volume], size) s3importer.check_quota(required_space, volume_name) s3importer.check_size(required_space, volume_name) self.update_state(state='PROGRESS', meta={ 'copied': len(imported), 'skipped': len(skipped), 'total': total_buckets, 'current_from': bucket, 'current_to': volume, 'data_size': required_space }) volume_created = s3importer.create_volume(volume_name, size) s3importer.copy_keys_parallelly(bucket_obj, volume_name) imported.append( (bucket, volume, required_space, volume_created)) except (S3ResponseError, s3import.exceptions.S3NonFatalImportError, CeleryNonFatalTaskError): skipped.append(bucket) except Exception as e: e.imported_buckets = imported e.skipped_buckets = skipped raise e return {'imported_buckets': imported, 'skipped_buckets': skipped}