def fields(self, fields): fields_xml = '' for field in fields: field_dict = DEFAULT_FIELD.copy() field_dict.update(field) fields_xml += FIELD_XML_TEMPLATE % field_dict + '\n' self.xml = force_unicode(force_unicode(self.xml).replace(u'<!-- REPLACE FIELDS -->', force_unicode(fields_xml)))
def _get_fq(self, collection, query): params = () timeFilter = {} if collection: timeFilter = self._get_range_borders(collection, query) if timeFilter and not timeFilter.get('time_filter_overrides'): params += (('fq', urllib.unquote(utf_quoter('%(field)s:[%(from)s TO %(to)s]' % timeFilter))),) # Merge facets queries on same fields grouped_fqs = groupby(query['fqs'], lambda x: (x['type'], x['field'])) merged_fqs = [] for key, group in grouped_fqs: field_fq = next(group) for fq in group: for f in fq['filter']: field_fq['filter'].append(f) merged_fqs.append(field_fq) for fq in merged_fqs: if fq['type'] == 'field': fields = fq['field'] if type(fq['field']) == list else [fq['field']] # 2D facets support for field in fields: f = [] for _filter in fq['filter']: values = _filter['value'] if type(_filter['value']) == list else [_filter['value']] # 2D facets support if fields.index(field) < len(values): # Lowest common field denominator value = values[fields.index(field)] exclude = '-' if _filter['exclude'] else '' if value is not None and ' ' in force_unicode(value): value = force_unicode(value).replace('"', '\\"') f.append('%s%s:"%s"' % (exclude, field, value)) else: f.append('%s{!field f=%s}%s' % (exclude, field, value)) _params ='{!tag=%(id)s}' % fq + ' '.join(f) params += (('fq', urllib.unquote(utf_quoter(_params))),) elif fq['type'] == 'range': params += (('fq', '{!tag=%(id)s}' % fq + ' '.join([urllib.unquote( utf_quoter('%s%s:[%s TO %s}' % ('-' if field['exclude'] else '', fq['field'], f['from'], f['to']))) for field, f in zip(fq['filter'], fq['properties'])])),) elif fq['type'] == 'range-up': params += (('fq', '{!tag=%(id)s}' % fq + ' '.join([urllib.unquote( utf_quoter('%s%s:[%s TO %s}' % ('-' if field['exclude'] else '', fq['field'], f['from'] if fq['is_up'] else '*', '*' if fq['is_up'] else f['from']))) for field, f in zip(fq['filter'], fq['properties'])])),) elif fq['type'] == 'map': _keys = fq.copy() _keys.update(fq['properties']) params += (('fq', '{!tag=%(id)s}' % fq + urllib.unquote( utf_quoter('%(lat)s:[%(lat_sw)s TO %(lat_ne)s} AND %(lon)s:[%(lon_sw)s TO %(lon_ne)s}' % _keys))),) return params
def error_handler(view_fn): def decorator(request, *args, **kwargs): try: return view_fn(request, *args, **kwargs) except Http404, e: raise e except Exception, e: if not hasattr(e, 'message') or not e.message: message = str(e) else: message = force_unicode(e.message, strings_only=True, errors='replace') if 'Invalid OperationHandle' in message and 'id' in kwargs: # Expired state. query_history = authorized_get_query_history(request, kwargs['id'], must_exist=False) if query_history: query_history.set_to_expired() query_history.save() response = { 'status': -1, 'message': message, } if 'database is locked' in message or 'Invalid query handle' in message: response['status'] = 2 # Frontend will not display this type of error return HttpResponse(json.dumps(response), mimetype="application/json", status=200)
def submit_coordinator(request, doc_id): if doc_id.isdigit(): coordinator = Coordinator(document=Document2.objects.get(id=doc_id)) else: coordinator = Coordinator(document=Document2.objects.get_by_uuid(user=request.user, uuid=doc_id)) ParametersFormSet = formset_factory(ParameterForm, extra=0) if request.method == 'POST': params_form = ParametersFormSet(request.POST) if params_form.is_valid(): mapping = dict([(param['name'], param['value']) for param in params_form.cleaned_data]) mapping['dryrun'] = request.POST.get('dryrun_checkbox') == 'on' jsonify = request.POST.get('format') == 'json' try: job_id = _submit_coordinator(request, coordinator, mapping) except Exception, e: message = force_unicode(str(e)) return JsonResponse({'status': -1, 'message': message}, safe=False) if jsonify: return JsonResponse({'status': 0, 'job_id': job_id, 'type': 'schedule'}, safe=False) else: request.info(_('Coordinator submitted.')) return redirect(reverse('oozie:list_oozie_coordinator', kwargs={'job_id': job_id})) else: request.error(_('Invalid submission form: %s' % params_form.errors))
def icon(self): apps = appmanager.get_apps_dict() try: if self.extra == 'workflow2': return staticfiles_storage.url('oozie/art/icon_oozie_workflow_48.png') elif self.extra == 'coordinator2': return staticfiles_storage.url('oozie/art/icon_oozie_coordinator_48.png') elif self.extra == 'bundle2': return staticfiles_storage.url('oozie/art/icon_oozie_bundle_48.png') elif self.extra == 'notebook': return staticfiles_storage.url('spark/art/icon_spark_48.png') elif self.extra.startswith('search'): return staticfiles_storage.url('search/art/icon_search_48.png') elif self.content_type.app_label == 'beeswax': if self.extra == '0': return staticfiles_storage.url(apps['beeswax'].icon_path) elif self.extra == '3': return staticfiles_storage.url(apps['spark'].icon_path) else: return staticfiles_storage.url(apps['impala'].icon_path) elif self.content_type.app_label == 'oozie': if self.extra == 'jobsub': return staticfiles_storage.url(apps['jobsub'].icon_path) else: return staticfiles_storage.url(self.content_type.model_class().ICON) elif self.content_type.app_label in apps: return staticfiles_storage.url(apps[self.content_type.app_label].icon_path) else: return staticfiles_storage.url('desktop/art/icon_hue_48.png') except Exception, e: LOG.warn(force_unicode(e)) return staticfiles_storage.url('desktop/art/icon_hue_48.png')
def error_handler(view_fn): def decorator(request, *args, **kwargs): try: return view_fn(request, *args, **kwargs) except Http404, e: raise e except Exception, e: LOG.exception('error in %s' % view_fn) if not hasattr(e, 'message') or not e.message: message = str(e) else: message = force_unicode(e.message, strings_only=True, errors='replace') if 'Invalid OperationHandle' in message and 'id' in kwargs: # Expired state. query_history = authorized_get_query_history(request, kwargs['id'], must_exist=False) if query_history: query_history.set_to_expired() query_history.save() response = { 'status': -1, 'message': message, } if re.search('database is locked|Invalid query handle|not JSON serializable', message, re.IGNORECASE): response['status'] = 2 # Frontend will not display this type of error LOG.warn('error_handler silencing the exception: %s' % e) return JsonResponse(response)
def sync(self): try: from oozie.models import Workflow, Coordinator, Bundle for job in list(chain(Workflow.objects.all(), Coordinator.objects.all(), Bundle.objects.all())): if job.doc.count() > 1: LOG.warn('Deleting duplicate document %s for %s' % (job.doc.all(), job)) job.doc.all().delete() if not job.doc.exists(): doc = Document.objects.link(job, owner=job.owner, name=job.name, description=job.description) tag = DocumentTag.objects.get_example_tag(user=job.owner) doc.tags.add(tag) if job.is_trashed: doc.send_to_trash() if job.is_shared: doc.share_to_default() if hasattr(job, 'managed'): if not job.managed: doc.extra = 'jobsub' doc.save() if job.owner.username == SAMPLE_USERNAME: job.doc.get().share_to_default() except Exception, e: LOG.warn(force_unicode(e))
def config_validator(user): """ config_validator() -> [ (config_variable, error_message) ] Called by core check_config() view. """ from desktop.lib import i18n res = [ ] if not SECRET_KEY.get(): res.append((SECRET_KEY, unicode(_("Secret key should be configured as a random string.")))) # Validate SSL setup if SSL_CERTIFICATE.get(): res.extend(validate_path(SSL_CERTIFICATE, is_dir=False)) if not SSL_PRIVATE_KEY.get(): res.append((SSL_PRIVATE_KEY, unicode(_("SSL private key file should be set to enable HTTPS.")))) else: res.extend(validate_path(SSL_PRIVATE_KEY, is_dir=False)) # Validate encoding if not i18n.validate_encoding(DEFAULT_SITE_ENCODING.get()): res.append((DEFAULT_SITE_ENCODING, unicode(_("Encoding not supported.")))) # Validate kerberos if KERBEROS.HUE_KEYTAB.get() is not None: res.extend(validate_path(KERBEROS.HUE_KEYTAB, is_dir=False)) # Keytab should not be world or group accessible kt_stat = os.stat(KERBEROS.HUE_KEYTAB.get()) if stat.S_IMODE(kt_stat.st_mode) & 0077: res.append((KERBEROS.HUE_KEYTAB, force_unicode(_("Keytab should have 0600 permissions (has %o).") % stat.S_IMODE(kt_stat.st_mode)))) res.extend(validate_path(KERBEROS.KINIT_PATH, is_dir=False)) res.extend(validate_path(KERBEROS.CCACHE_PATH, is_dir=False)) if LDAP.LDAP_URL.get() is None != LDAP.BASE_DN.get() is None: res.append((LDAP.LDAP_URL, force_unicode(_("LDAP is only partially configured. An LDAP URL and BASE DN must be provided.")))) if LDAP.BIND_DN.get(): if LDAP.BIND_PASSWORD.get() is None: res.append((LDAP.BIND_PASSWORD, force_unicode(_("If you set bind_dn, then you must set bind_password.")))) return res
def decorator(*args, **kwargs): try: return func(*args, **kwargs) except Exception, e: message = force_unicode(str(e)) if 'error occurred while trying to connect to the Java server' in message: raise QueryError(_('%s: is the DB Proxy server running?') % message) else: raise QueryError(message)
def decorator(*args, **kwargs): try: return func(*args, **kwargs) except Exception, e: message = force_unicode(str(e)) if "Invalid query handle" in message or "Invalid OperationHandle" in message: raise QueryExpired(e) else: raise QueryError(message)
def decorator(*args, **kwargs): response = {} try: return func(*args, **kwargs) except Exception, e: LOG.exception("Error running %s" % func) response["status"] = -1 response["message"] = force_unicode(str(e))
def decorator(*args, **kwargs): try: return func(*args, **kwargs) except Exception, e: message = force_unicode(str(e)) if 'Class com.mysql.jdbc.Driver not found' in message: raise QueryError(_('%s: did you export CLASSPATH=$CLASSPATH:/usr/share/java/mysql.jar?') % message) else: raise QueryError(message)
def decorator(*args, **kwargs): try: return func(*args, **kwargs) except StructuredException, e: message = force_unicode(str(e)) if 'timed out' in message: raise OperationTimeout(e) else: raise QueryError(message)
def decorator(*args, **kwargs): response = {} try: return func(*args, **kwargs) except Exception, e: LOG.exception('Error running %s' % func) response['status'] = -1 response['message'] = force_unicode(str(e))
def error_handler(view_fn): def decorator(*args, **kwargs): try: return view_fn(*args, **kwargs) except Http404, e: raise e except Exception, e: LOG.exception(e) response = {"status": -1, "message": force_unicode(str(e))}
def remove_tag(request): response = {'status': -1, 'message': _('Error')} if request.method == 'POST': try: DocumentTag.objects.delete_tag(request.POST['tag_id'], request.user) response['message'] = _('Project removed!') response['status'] = 0 except Exception, e: response['message'] = force_unicode(e)
def add_tag(request): response = {'status': -1, 'message': ''} if request.method == 'POST': try: tag = DocumentTag.objects.create_tag(request.user, request.POST['name']) response['tag_id'] = tag.id response['status'] = 0 except Exception, e: response['message'] = force_unicode(e)
def remove_tag(request): response = {"status": -1, "message": _("Error")} if request.method == "POST": try: DocumentTag.objects.delete_tag(request.POST["tag_id"], request.user) response["message"] = _("Project removed!") response["status"] = 0 except Exception, e: response["message"] = force_unicode(e)
def config_validator(user): """ config_validator() -> [ (config_variable, error_message) ] Called by core check_config() view. """ from desktop.lib import i18n res = [] if not SECRET_KEY.get(): res.append( ( SECRET_KEY, unicode(_("Secret key should be configured as a random string. All sessions will be lost on restart")), ) ) # Validate SSL setup if SSL_CERTIFICATE.get(): res.extend(validate_path(SSL_CERTIFICATE, is_dir=False)) if not SSL_PRIVATE_KEY.get(): res.append((SSL_PRIVATE_KEY, unicode(_("SSL private key file should be set to enable HTTPS.")))) else: res.extend(validate_path(SSL_PRIVATE_KEY, is_dir=False)) # Validate encoding if not i18n.validate_encoding(DEFAULT_SITE_ENCODING.get()): res.append((DEFAULT_SITE_ENCODING, unicode(_("Encoding not supported.")))) # Validate kerberos if KERBEROS.HUE_KEYTAB.get() is not None: res.extend(validate_path(KERBEROS.HUE_KEYTAB, is_dir=False)) # Keytab should not be world or group accessible kt_stat = os.stat(KERBEROS.HUE_KEYTAB.get()) if stat.S_IMODE(kt_stat.st_mode) & 0077: res.append( ( KERBEROS.HUE_KEYTAB, force_unicode(_("Keytab should have 0600 permissions (has %o).") % stat.S_IMODE(kt_stat.st_mode)), ) ) res.extend(validate_path(KERBEROS.KINIT_PATH, is_dir=False)) res.extend(validate_path(KERBEROS.CCACHE_PATH, is_dir=False)) if LDAP.LDAP_SERVERS.get(): for ldap_record_key in LDAP.LDAP_SERVERS.get(): res.extend(validate_ldap(user, LDAP.LDAP_SERVERS.get()[ldap_record_key])) else: res.extend(validate_ldap(user, LDAP)) # Validate MYSQL storage engine of all tables res.extend(validate_mysql_storage()) return res
def update_tags(request): response = {"status": -1, "message": ""} if request.method == "POST": request_json = json.loads(request.POST["data"]) try: doc = DocumentTag.objects.update_tags(request.user, request_json["doc_id"], request_json["tag_ids"]) response["doc"] = massage_doc_for_json(doc, request.user) response["status"] = 0 except Exception, e: response["message"] = force_unicode(e)
def update_tags(request): response = {'status': -1, 'message': ''} if request.method == 'POST': request_json = json.loads(request.POST['data']) try: doc = DocumentTag.objects.update_tags(request.user, request_json['doc_id'], request_json['tag_ids']) response['doc'] = massage_doc_for_json(doc, request.user) response['status'] = 0 except Exception, e: response['message'] = force_unicode(e)
def tag(request): response = {'status': -1, 'message': ''} if request.method == 'POST': request_json = json.loads(request.POST['data']) try: tag = DocumentTag.objects.tag(request.user, request_json['doc_id'], request_json.get('tag'), request_json.get('tag_id')) response['tag_id'] = tag.id response['status'] = 0 except Exception, e: response['message'] = force_unicode(e)
def remove_tags(request): response = {'status': -1, 'message': _('Error')} if request.method == 'POST': request_json = json.loads(request.POST['data']) try: for tag_id in request_json['tag_ids']: DocumentTag.objects.delete_tag(tag_id, request.user) response['message'] = _('Tag(s) removed!') response['status'] = 0 except Exception, e: response['message'] = force_unicode(e)
def query_error_handler(func): def decorator(*args, **kwargs): try: return func(*args, **kwargs) except AuthenticationRequired, e: raise e except Exception, e: message = force_unicode(str(e)) if "error occurred while trying to connect to the Java server" in message: raise QueryError(_("%s: is the DB Proxy server running?") % message) else: raise QueryError(message)
def add_tag(request): response = {"status": -1, "message": ""} if request.method == "POST": try: tag = DocumentTag.objects.create_tag(request.user, request.POST["name"]) response["name"] = request.POST["name"] response["id"] = tag.id response["docs"] = [] response["owner"] = request.user.username response["status"] = 0 except Exception, e: response["message"] = force_unicode(e)
def tag(request): response = {"status": -1, "message": ""} if request.method == "POST": request_json = json.loads(request.POST["data"]) try: tag = DocumentTag.objects.tag( request.user, request_json["doc_id"], request_json.get("tag"), request_json.get("tag_id") ) response["tag_id"] = tag.id response["status"] = 0 except Exception, e: response["message"] = force_unicode(e)
def fetch_result(self, notebook, snippet, rows, start_over): api = get_spark_api(self.user) session = _get_snippet_session(notebook, snippet) cell = snippet['result']['handle']['id'] try: response = api.fetch_data(session['id'], cell) except Exception, e: message = force_unicode(str(e)).lower() if 'session not found' in message: raise SessionExpired(e) else: raise e
def check_status(self, notebook, snippet): api = get_spark_api(self.user) session = _get_snippet_session(notebook, snippet) cell = snippet["result"]["handle"]["id"] try: response = api.fetch_data(session["id"], cell) return {"status": response["state"]} except Exception, e: message = force_unicode(str(e)).lower() if "session not found" in message: raise SessionExpired(e) else: raise e
def update_permissions(request): response = {'status': -1, 'message': _('Error')} if request.method == 'POST': data = json.loads(request.POST['data']) doc_id = request.POST['doc_id'] try: doc = Document.objects.get_doc(doc_id, request.user) doc.sync_permissions(data) response['message'] = _('Permissions updated!') response['status'] = 0 response['doc'] = massage_doc_for_json(doc, request.user) except Exception, e: response['message'] = force_unicode(e)
def update_permissions(request): response = {"status": -1, "message": _("Error")} if request.method == "POST": data = json.loads(request.POST["data"]) doc_id = request.POST["doc_id"] try: doc = Document.objects.get_doc(doc_id, request.user) doc.sync_permissions(data) response["message"] = _("Permissions updated!") response["status"] = 0 response["doc"] = massage_doc_for_json(doc, request.user) except Exception, e: response["message"] = force_unicode(e)
def submit_coordinator(request, doc_id): # TODO: Replace URL by desktop/scheduler API if doc_id.isdigit(): coordinator = Coordinator(document=Document2.objects.get(id=doc_id)) else: coordinator = Coordinator(document=Document2.objects.get_by_uuid( user=request.user, uuid=doc_id)) ParametersFormSet = formset_factory(ParameterForm, extra=0) if request.method == 'POST': params_form = ParametersFormSet(request.POST) if params_form.is_valid(): mapping = dict([(param['name'], param['value']) for param in params_form.cleaned_data]) mapping['dryrun'] = request.POST.get('dryrun_checkbox') == 'on' jsonify = request.POST.get('format') == 'json' try: job_id = _submit_coordinator(request, coordinator, mapping) except Exception as e: message = force_unicode(str(e)) return JsonResponse({ 'status': -1, 'message': message }, safe=False) if jsonify: return JsonResponse( { 'status': 0, 'job_id': job_id, 'type': 'schedule' }, safe=False) else: request.info(_('Coordinator submitted.')) return redirect( reverse('oozie:list_oozie_coordinator', kwargs={'job_id': job_id})) else: request.error(_('Invalid submission form: %s' % params_form.errors)) else: parameters = coordinator.find_all_parameters() initial_params = ParameterForm.get_initial_params( dict([(param['name'], param['value']) for param in parameters])) params_form = ParametersFormSet(initial=initial_params) return render('/scheduler/submit_job_popup.mako', request, { 'params_form': params_form, 'name': coordinator.name, 'action': reverse('oozie:editor_submit_coordinator', kwargs={'doc_id': coordinator.id}), 'show_dryrun': True, 'return_json': request.GET.get('format') == 'json' }, force_template=True)
if job.doc.count() > 1: LOG.warn('Deleting duplicate document %s for %s' % (job.doc.all(), job)) job.doc.all().delete() if not job.doc.exists(): doc = Document.objects.link(job, owner=job.owner, name=job.dict['name'], description='') tag = DocumentTag.objects.get_example_tag(user=job.owner) doc.tags.add(tag) if job.owner.username == SAMPLE_USERNAME: job.doc.get().share_to_default() except Exception, e: LOG.warn(force_unicode(e)) # Make sure doc have at least a tag try: for doc in Document.objects.filter(tags=None): default_tag = DocumentTag.objects.get_default_tag(doc.owner) doc.tags.add(default_tag) except Exception, e: LOG.warn(force_unicode(e)) # For now remove the default tag from the examples try: for doc in Document.objects.filter(tags__tag=DocumentTag.EXAMPLE): default_tag = DocumentTag.objects.get_default_tag(doc.owner) doc.tags.remove(default_tag) except Exception, e:
def config_validator(user): """ config_validator() -> [ (config_variable, error_message) ] Called by core check_config() view. """ from desktop.lib import i18n res = [] if not SECRET_KEY.get(): res.append( (SECRET_KEY, unicode( _("Secret key should be configured as a random string.")))) # Validate SSL setup if SSL_CERTIFICATE.get(): res.extend(validate_path(SSL_CERTIFICATE, is_dir=False)) if not SSL_PRIVATE_KEY.get(): res.append(( SSL_PRIVATE_KEY, unicode( _("SSL private key file should be set to enable HTTPS.")))) else: res.extend(validate_path(SSL_PRIVATE_KEY, is_dir=False)) # Validate encoding if not i18n.validate_encoding(DEFAULT_SITE_ENCODING.get()): res.append( (DEFAULT_SITE_ENCODING, unicode(_("Encoding not supported.")))) # Validate kerberos if KERBEROS.HUE_KEYTAB.get() is not None: res.extend(validate_path(KERBEROS.HUE_KEYTAB, is_dir=False)) # Keytab should not be world or group accessible kt_stat = os.stat(KERBEROS.HUE_KEYTAB.get()) if stat.S_IMODE(kt_stat.st_mode) & 0077: res.append( (KERBEROS.HUE_KEYTAB, force_unicode( _("Keytab should have 0600 permissions (has %o).") % stat.S_IMODE(kt_stat.st_mode)))) res.extend(validate_path(KERBEROS.KINIT_PATH, is_dir=False)) res.extend(validate_path(KERBEROS.CCACHE_PATH, is_dir=False)) if LDAP.SEARCH_BIND_AUTHENTICATION.get(): if LDAP.LDAP_URL.get() is not None and bool( LDAP.BIND_DN.get()) != bool(LDAP.BIND_PASSWORD.get()): if LDAP.BIND_DN.get() == None: res.append(( LDAP.BIND_DN, unicode( _("If you set bind_password, then you must set bind_dn." )))) else: res.append(( LDAP.BIND_PASSWORD, unicode( _("If you set bind_dn, then you must set bind_password." )))) else: if LDAP.NT_DOMAIN.get() is not None or \ LDAP.LDAP_USERNAME_PATTERN.get() is not None: if LDAP.LDAP_URL.get() is None: res.append(( LDAP.LDAP_URL, unicode( _("LDAP is only partially configured. An LDAP URL must be provided." )))) if LDAP.LDAP_URL.get() is not None: if LDAP.NT_DOMAIN.get() is None and \ LDAP.LDAP_USERNAME_PATTERN.get() is None: res.append(( LDAP.LDAP_URL, unicode( _("LDAP is only partially configured. An NT Domain or username " "search pattern must be provided.")))) if LDAP.LDAP_USERNAME_PATTERN.get() is not None and \ '<username>' not in LDAP.LDAP_USERNAME_PATTERN.get(): res.append( (LDAP.LDAP_USERNAME_PATTERN, unicode( _("The LDAP username pattern should contain the special" "<username> replacement string for authentication.")))) return res
def decorator(*args, **kwargs): try: return view_fn(*args, **kwargs) except Http404, e: raise e except NavOptException, e: LOG.exception(e) response = { 'status': -1, 'message': e.message } except MissingSentryPrivilegeException, e: LOG.exception(e) response = { 'status': -1, 'message': 'Missing privileges for %s' % force_unicode(str(e)) } except Exception, e: LOG.exception(e) response = { 'status': -1, 'message': force_unicode(str(e)) } return JsonResponse(response, status=500) return decorator @require_POST @error_handler def get_tenant(request): response = {'status': -1}
def defaultField(self, df=None): self.xml = force_unicode( force_unicode(self.xml).replace( u'<str name="df">text</str>', u'<str name="df">%s</str>' % force_unicode(df) if df is not None else ''))
def uniqueKeyField(self, unique_key_field): self.xml = force_unicode( force_unicode(self.xml).replace(u'<!-- REPLACE UNIQUE KEY -->', force_unicode(unique_key_field)))
def utf_8_encoder(unicode_csv_data): for line in unicode_csv_data: yield force_unicode( line, errors='ignore') # Even 'replace' seems to break the DictReader
try: return func(*args, **kwargs) except SessionExpired, e: response['status'] = -2 except QueryExpired, e: response['status'] = -3 except AuthenticationRequired, e: response['status'] = 401 except ValidationError, e: response['status'] = -1 response['message'] = e.message except QueryError, e: LOG.exception('error running %s' % func) response['status'] = 1 response['message'] = force_unicode(str(e)) except Exception, e: LOG.exception('error running %s' % func) response['status'] = -1 response['message'] = force_unicode(str(e)) finally: if response: return JsonResponse(response) return decorator def json_error_handler(view_fn): def decorator(*args, **kwargs): try: return view_fn(*args, **kwargs)
def _get_fq(self, collection, query): params = () timeFilter = {} if collection: timeFilter = self._get_range_borders(collection, query) if timeFilter and not timeFilter.get('time_filter_overrides'): params += (('fq', urllib.unquote( utf_quoter('%(field)s:[%(from)s TO %(to)s]' % timeFilter))), ) # Merge facets queries on same fields grouped_fqs = groupby(query['fqs'], lambda x: (x['type'], x['field'])) merged_fqs = [] for key, group in grouped_fqs: field_fq = next(group) for fq in group: for f in fq['filter']: field_fq['filter'].append(f) merged_fqs.append(field_fq) for fq in merged_fqs: if fq['type'] == 'field': fields = fq['field'] if type(fq['field']) == list else [ fq['field'] ] # 2D facets support for field in fields: f = [] for _filter in fq['filter']: values = _filter['value'] if type( _filter['value']) == list else [ _filter['value'] ] # 2D facets support if fields.index(field) < len( values): # Lowest common field denominator value = values[fields.index(field)] exclude = '-' if _filter['exclude'] else '' if value is not None and ' ' in force_unicode( value): value = force_unicode(value).replace( '"', '\\"') f.append('%s%s:"%s"' % (exclude, field, value)) else: f.append('%s{!field f=%s}%s' % (exclude, field, value)) _params = '{!tag=%(id)s}' % fq + ' '.join(f) params += (('fq', urllib.unquote(utf_quoter(_params))), ) elif fq['type'] == 'range': params += (('fq', '{!tag=%(id)s}' % fq + ' '.join([ urllib.unquote( utf_quoter('%s%s:[%s TO %s}' % ('-' if field['exclude'] else '', fq['field'], f['from'], f['to']))) for field, f in zip(fq['filter'], fq['properties']) ])), ) elif fq['type'] == 'range-up': params += (('fq', '{!tag=%(id)s}' % fq + ' '.join([ urllib.unquote( utf_quoter('%s%s:[%s TO %s}' % ('-' if field['exclude'] else '', fq['field'], f['from'] if fq['is_up'] else '*', '*' if fq['is_up'] else f['from']))) for field, f in zip(fq['filter'], fq['properties']) ])), ) elif fq['type'] == 'map': _keys = fq.copy() _keys.update(fq['properties']) params += (('fq', '{!tag=%(id)s}' % fq + urllib.unquote( utf_quoter( '%(lat)s:[%(lat_sw)s TO %(lat_ne)s} AND %(lon)s:[%(lon_sw)s TO %(lon_ne)s}' % _keys))), ) nested_fields = self._get_nested_fields(collection) if nested_fields: params += (('fq', urllib.unquote(utf_quoter( ' OR '.join(nested_fields)))), ) return params
def __unicode__(self): return force_unicode('%s %s %s') % (self.content_type, self.name, self.owner)
def __unicode__(self): return force_unicode('%s') % (self.tag, )
class DocumentManager(models.Manager): def documents(self, user): return Document.objects.filter( Q(owner=user) | Q(documentpermission__users=user) | Q(documentpermission__groups__in=user.groups.all())).distinct() def get_docs(self, user, model_class=None, extra=None): docs = Document.objects.documents(user).exclude( name='pig-app-hue-script') if model_class is not None: ct = ContentType.objects.get_for_model(model_class) docs = docs.filter(content_type=ct) if extra is not None: docs = docs.filter(extra=extra) return docs def get_doc(self, doc_id, user): return Document.objects.documents(user).get(id=doc_id) def trashed_docs(self, model_class, user): tag = DocumentTag.objects.get_trash_tag(user=user) return Document.objects.get_docs( user, model_class).filter(tags__in=[tag]).order_by('-last_modified') def trashed(self, model_class, user): docs = self.trashed_docs(model_class, user) return [job.content_object for job in docs if job.content_object] def available_docs(self, model_class, user, with_history=False): exclude = [DocumentTag.objects.get_trash_tag(user=user)] if not with_history: exclude.append(DocumentTag.objects.get_history_tag(user=user)) return Document.objects.get_docs( user, model_class).exclude(tags__in=exclude).order_by('-last_modified') def history_docs(self, model_class, user): include = [DocumentTag.objects.get_history_tag(user=user)] exclude = [DocumentTag.objects.get_trash_tag(user=user)] return Document.objects.get_docs( user, model_class).filter(tags__in=include).exclude( tags__in=exclude).order_by('-last_modified') def available(self, model_class, user, with_history=False): docs = self.available_docs(model_class, user, with_history) return [doc.content_object for doc in docs if doc.content_object] def can_read_or_exception(self, user, doc_class, doc_id, exception_class=PopupException): if doc_id is None: return try: ct = ContentType.objects.get_for_model(doc_class) doc = Document.objects.get(object_id=doc_id, content_type=ct) if doc.can_read(user): return doc else: message = _("Permission denied. %(username)s does not have the permissions required to access document %(id)s") % \ {'username': user.username, 'id': doc.id} raise exception_class(message) except Document.DoesNotExist: raise exception_class( _('Document %(id)s does not exist') % {'id': doc_id}) def can_read(self, user, doc_class, doc_id): ct = ContentType.objects.get_for_model(doc_class) doc = Document.objects.get(object_id=doc_id, content_type=ct) return doc.can_read(user) def link(self, content_object, owner, name='', description='', extra=''): if not content_object.doc.exists(): doc = Document.objects.create(content_object=content_object, owner=owner, name=name, description=description, extra=extra) tag = DocumentTag.objects.get_default_tag(user=owner) doc.tags.add(tag) return doc else: LOG.warn('Object %s already has documents: %s' % (content_object, content_object.doc.all())) return content_object.doc.all()[0] def sync(self): try: from oozie.models import Workflow, Coordinator, Bundle for job in list( chain(Workflow.objects.all(), Coordinator.objects.all(), Bundle.objects.all())): if job.doc.count() > 1: LOG.warn('Deleting duplicate document %s for %s' % (job.doc.all(), job)) job.doc.all().delete() if not job.doc.exists(): doc = Document.objects.link(job, owner=job.owner, name=job.name, description=job.description) tag = DocumentTag.objects.get_example_tag(user=job.owner) doc.tags.add(tag) if job.is_trashed: doc.send_to_trash() if job.is_shared: doc.share_to_default() if hasattr(job, 'managed'): if not job.managed: doc.extra = 'jobsub' doc.save() if job.owner.username == SAMPLE_USERNAME: job.doc.get().share_to_default() except Exception, e: LOG.warn(force_unicode(e)) try: from beeswax.models import SavedQuery for job in SavedQuery.objects.all(): if job.doc.count() > 1: LOG.warn('Deleting duplicate document %s for %s' % (job.doc.all(), job)) job.doc.all().delete() if not job.doc.exists(): doc = Document.objects.link(job, owner=job.owner, name=job.name, description=job.desc, extra=job.type) tag = DocumentTag.objects.get_example_tag(user=job.owner) doc.tags.add(tag) if job.is_trashed: doc.send_to_trash() if job.owner.username == SAMPLE_USERNAME: job.doc.get().share_to_default() except Exception, e: LOG.warn(force_unicode(e))
raise e except StructuredException, e: error_code = e.error_code message = e.message details = e.data or {} except Exception, e: LOG.exception('error in %s' % view_fn) error_code = 500 details = {} (type, value, tb) = sys.exc_info() if not hasattr(e, 'message') or not e.message: message = str(e) else: message = force_unicode(e.message, strings_only=True, errors='replace') response = {'status': 1, 'message': message, 'details': details} return JsonResponse(response, status=error_code) return decorator def get_or_create_node(workflow, node_data, save=True): node = None id = str(node_data['id']) separator_index = id.find(':') if separator_index == -1:
def error_handler(view_fn): def decorator(*args, **kwargs): try: return view_fn(*args, **kwargs) except Http404, e: raise e except NavOptException, e: LOG.exception(e) response = {'status': -1, 'message': e.message} except MissingSentryPrivilegeException, e: LOG.exception(e) response = { 'status': -1, 'message': 'Missing privileges for %s' % force_unicode(str(e)) } except Exception, e: LOG.exception(e) response = {'status': -1, 'message': force_unicode(str(e))} return JsonResponse(response, status=500) return decorator @require_POST @error_handler def get_tenant(request): response = {'status': -1} cluster_id = request.POST.get('cluster_id')
def defaultField(self, df): self.xml = force_unicode( force_unicode(self.xml).replace( u'<str name="df">text</str>', u'<str name="df">%s</str>' % force_unicode(df)))
def __str__(self): res = '%s - %s - %s' % (force_unicode(self.name), self.owner, self.uuid) return force_unicode(res)
status = 200 response = {'status': -1, 'message': ''} try: if has_navigator(args[0].user): return view_fn(*args, **kwargs) else: raise MetadataApiException('Navigator API is not configured.') except Http404, e: raise e except EntityDoesNotExistException, e: response['message'] = e.message response['status'] = -3 status = 200 except NavigathorAuthException, e: response['message'] = force_unicode(e.message) response['status'] = -2 except NavigatorApiException, e: try: response['message'] = json.loads(e.message) response['status'] = -2 except Exception: response['message'] = force_unicode(e.message) except Exception, e: status = 500 message = force_unicode(e) LOG.exception(message) return JsonResponse(response, status=status) return decorator
except ImportError, e: LOG.warn("Hive lib not enabled") def error_handler(view_fn): def decorator(*args, **kwargs): try: return view_fn(*args, **kwargs) except Http404, e: raise e except NavOptException, e: LOG.exception(e) response = {'status': -1, 'message': e.message} except Exception, e: LOG.exception(e) response = {'status': -1, 'message': force_unicode(str(e))} return JsonResponse(response, status=500) return decorator @require_POST @error_handler def get_tenant(request): response = {'status': -1} email = request.POST.get('email') api = OptimizerApi() data = api.get_tenant(email=email)
def __str__(self): return force_unicode(str(self.message))
def decorator(*args, **kwargs): try: return view_fn(*args, **kwargs) except Exception, e: LOG.exception(e) response = {'status': -1, 'message': force_unicode(e)}
def config_validator(user): """ config_validator() -> [ (config_variable, error_message) ] Called by core check_config() view. """ from desktop.lib import i18n res = [] if not SECRET_KEY.get(): res.append( (SECRET_KEY, unicode( _("Secret key should be configured as a random string.")))) # Validate SSL setup if SSL_CERTIFICATE.get(): res.extend(validate_path(SSL_CERTIFICATE, is_dir=False)) if not SSL_PRIVATE_KEY.get(): res.append(( SSL_PRIVATE_KEY, unicode( _("SSL private key file should be set to enable HTTPS.")))) else: res.extend(validate_path(SSL_PRIVATE_KEY, is_dir=False)) # Validate encoding if not i18n.validate_encoding(DEFAULT_SITE_ENCODING.get()): res.append( (DEFAULT_SITE_ENCODING, unicode(_("Encoding not supported.")))) # Validate kerberos if KERBEROS.HUE_KEYTAB.get() is not None: res.extend(validate_path(KERBEROS.HUE_KEYTAB, is_dir=False)) # Keytab should not be world or group accessible kt_stat = os.stat(KERBEROS.HUE_KEYTAB.get()) if stat.S_IMODE(kt_stat.st_mode) & 0077: res.append( (KERBEROS.HUE_KEYTAB, force_unicode( _("Keytab should have 0600 permissions (has %o).") % stat.S_IMODE(kt_stat.st_mode)))) res.extend(validate_path(KERBEROS.KINIT_PATH, is_dir=False)) res.extend(validate_path(KERBEROS.CCACHE_PATH, is_dir=False)) if LDAP.LDAP_URL.get() is None != LDAP.BASE_DN.get() is None: res.append(( LDAP.LDAP_URL, force_unicode( _("LDAP is only partially configured. An LDAP URL and BASE DN must be provided." )))) if LDAP.BIND_DN.get(): if LDAP.BIND_PASSWORD.get() is None: res.append( (LDAP.BIND_PASSWORD, force_unicode( _("If you set bind_dn, then you must set bind_password.")) )) return res
def submit_schedule(request, doc_id): interface = request.GET.get('interface', request.POST.get('interface', 'beat')) if doc_id.isdigit(): coordinator = Coordinator(document=Document2.objects.get(id=doc_id)) else: coordinator = Coordinator(document=Document2.objects.get_by_uuid( user=request.user, uuid=doc_id)) ParametersFormSet = formset_factory(ParameterForm, extra=0) if request.method == 'POST': params_form = ParametersFormSet(request.POST) if params_form.is_valid(): mapping = dict([(param['name'], param['value']) for param in params_form.cleaned_data]) mapping['dryrun'] = request.POST.get('dryrun_checkbox') == 'on' jsonify = request.POST.get('format') == 'json' try: job_id = get_api(request, interface).submit_schedule( request, coordinator, mapping) except Exception as e: message = force_unicode(str(e)) return JsonResponse({ 'status': -1, 'message': message }, safe=False) if jsonify: schedule_type = 'celery-beat' if interface == 'beat' else 'schedule' return JsonResponse( { 'status': 0, 'job_id': job_id, 'type': schedule_type }, safe=False) else: request.info(_('Schedule submitted.')) return redirect( reverse('oozie:list_oozie_coordinator', kwargs={'job_id': job_id})) else: request.error( _('Invalid submission form: %s') % params_form.errors) else: parameters = coordinator.find_all_parameters( ) if interface == 'oozie' else [] initial_params = ParameterForm.get_initial_params( dict([(param['name'], param['value']) for param in parameters])) params_form = ParametersFormSet(initial=initial_params) popup = render( 'scheduler/submit_job_popup.mako', request, { 'params_form': params_form, 'name': coordinator.name, 'action': '/scheduler/api/schedule/submit/%s' % coordinator.id, 'show_dryrun': False, 'return_json': request.GET.get('format') == 'json', 'interface': interface }, force_template=True).content return JsonResponse(popup, safe=False)
def field_values_from_separated_file(fh, delimiter, quote_character, fields=None): if fields is None: field_names = None else: field_names = [field['name'].strip() for field in fields] if fields is None: timestamp_fields = None else: timestamp_fields = [ field['name'].strip() for field in fields if field['type'] in DATE_FIELD_TYPES ] if fields is None: integer_fields = None else: integer_fields = [ field['name'].strip() for field in fields if field['type'] in INTEGER_FIELD_TYPES ] if fields is None: decimal_fields = None else: decimal_fields = [ field['name'].strip() for field in fields if field['type'] in DECIMAL_FIELD_TYPES ] if fields is None: boolean_fields = None else: boolean_fields = [ field['name'].strip() for field in fields if field['type'] in BOOLEAN_FIELD_TYPES ] content = fh.read() headers = None while content: last_newline = content.rfind('\n') if last_newline > -1: # If new line is quoted, skip this iteration and try again. if content[:last_newline].count('"') % 2 != 0: content += fh.read() continue else: if headers is None: csvfile = StringIO.StringIO(content[:last_newline]) else: csvfile = StringIO.StringIO('\n' + content[:last_newline]) content = content[last_newline + 1:] + fh.read() else: if headers is None: csvfile = StringIO.StringIO(content) else: csvfile = StringIO.StringIO('\n' + content) content = fh.read() # First line is headers if headers is None: headers = next( csv.reader(csvfile, delimiter=smart_str(delimiter), quotechar=smart_str(quote_character))) headers = [name.strip() for name in headers] # User dict reader reader = csv.DictReader(csvfile, fieldnames=headers, delimiter=smart_str(delimiter), quotechar=smart_str(quote_character)) remove_keys = None for row in reader: row = dict( [(force_unicode(k), force_unicode(v, errors='ignore')) for k, v in row.iteritems()] ) # Get rid of invalid binary chars and convert to unicode from DictReader # Remove keys that aren't in collection if remove_keys is None: if field_names is None: remove_keys = [] else: remove_keys = set(row.keys()) - set(field_names) if remove_keys: for key in remove_keys: del row[key] # Parse dates if timestamp_fields: tzinfo = pytz.timezone(settings.TIME_ZONE) for key in timestamp_fields: if key in row: dt = parse(row[key]) if not dt.tzinfo: dt = tzinfo.localize(dt) row[key] = dt.astimezone( pytz.utc).strftime('%Y-%m-%dT%H:%M:%SZ') # Parse decimal if decimal_fields: for key in decimal_fields: if key in row: row[key] = float(row[key]) # Parse integer if integer_fields: for key in integer_fields: if key in row: row[key] = int(row[key]) # Parse boolean if boolean_fields: for key in boolean_fields: if key in row: row[key] = str(row[key]).lower() == "true" # Add mock id random value if 'id' not in row: row['id'] = str(uuid.uuid4()) yield row
def decorator(*args, **kwargs): try: return func(*args, **kwargs) except Exception as e: message = force_unicode(str(e)) raise QueryError(message)
def fetch_result(self, notebook, snippet, rows, start_over): api = self.get_api() session = _get_snippet_session(notebook, snippet) cell = snippet['result']['handle']['id'] try: response = api.fetch_data(session['id'], cell) except Exception as e: message = force_unicode(str(e)).lower() if re.search("session ('\d+' )?not found", message): raise SessionExpired(e) else: raise e content = response['output'] if content['status'] == 'ok': data = content['data'] images = [] try: table = data['application/vnd.livy.table.v1+json'] except KeyError: try: images = [data['image/png']] except KeyError: images = [] if 'application/json' in data: result = data['application/json'] data = result['data'] meta = [{ 'name': field['name'], 'type': field['type'], 'comment': '' } for field in result['schema']['fields']] type = 'table' else: data = [[data['text/plain']]] meta = [{ 'name': 'Header', 'type': 'STRING_TYPE', 'comment': '' }] type = 'text' else: data = table['data'] headers = table['headers'] meta = [{ 'name': h['name'], 'type': h['type'], 'comment': '' } for h in headers] type = 'table' # Non start_over not supported if not start_over: data = [] return {'data': data, 'images': images, 'meta': meta, 'type': type} elif content['status'] == 'error': tb = content.get('traceback', None) if tb is None or not tb: msg = content.get('ename', 'unknown error') evalue = content.get('evalue') if evalue is not None: msg = '%s: %s' % (msg, evalue) else: msg = ''.join(tb) raise QueryError(msg)