コード例 #1
0
def login():
    form = LoginForm(next=request.args.get('next'))

    if form.validate_on_submit():
        u = User.find_by_identity(request.form.get('identity'))

        if u and u.authenticated(password=request.form.get('password')):
            # As you can see remember me is always enabled, this was a design
            # decision I made because more often than not users want this
            # enabled. This allows for a less complicated login form.
            #
            # If however you want them to be able to select whether or not they
            # should remain logged in then perform the following 3 steps:
            # 1) Replace 'True' below with: request.form.get('remember', False)
            # 2) Uncomment the 'remember' field in user/forms.py#LoginForm
            # 3) Add a checkbox to the login form with the id/name 'remember'
            if login_user(u, remember=True):
                u.update_activity_tracking(request.remote_addr)

                # Handle optionally redirecting to the next URL safely.
                next_url = request.form.get('next')
                if next_url:
                    return redirect(safe_next_url(next_url))

                return redirect(url_for('user.settings'))
            else:
                flash(_('This account has been disabled.'), 'error')
        else:
            flash(_('Identity or password is incorrect.'), 'error')

    return render_template('user/login.jinja2', form=form)
コード例 #2
0
    def from_profile(cls, user, profile):
        if not user or user.is_anonymous:
            email = profile.data.get("email")
            if not email:
                msg = "Cannot create new user, authentication provider did not not provide email"
                logging.warning(msg)
                raise Exception(_(msg))
            conflict = User.objects(email=email).first()
            if conflict:
                msg = "Cannot create new user, email {} is already used. Login and then connect external profile."
                msg = _(msg).format(email)
                logging.warning(msg)
                raise Exception(msg)

            now = datetime.now()
            user = User(
                email=email,
                first_name=profile.data.get("first_name"),
                last_name=profile.data.get("last_name"),
                confirmed_at=now,
                active=True,
            )
            user.save()

        connection = cls(user=user, **profile.data)
        connection.save()
        return connection
コード例 #3
0
ファイル: user.py プロジェクト: IanZhao/firefly
    def from_profile(cls, user, profile):
        if user and not user.is_anonymous():
            user = None
        if not user or user.is_anonymous():
            email = profile.data.get('email')
            provider = profile.data.get('provider')
            first_name = profile.data.get('first_name')
            last_name = profile.data.get('last_name')
            if provider not in ('Twitter', 'Douban') and not email:
                msg = 'Cannot create new user, authentication provider need provide email'  # noqa
                raise Exception(_(msg))
            if email is None:
                conflict = User.objects(first_name=first_name,
                                        last_name=last_name).first()
            else:
                conflict = User.objects(email=email).first()
            if conflict:
                msg = 'Cannot create new user, email {} is already used. Login and then connect external profile.'  # noqa
                msg = _(msg).format(email)
                raise Exception(msg)

            now = datetime.now()
            user = User(
                email=email,
                first_name=first_name,
                last_name=last_name,
                confirmed_at=now,
                active=True,
            )
            user.save()
            login_user(user)

        connection = cls(user=user, **profile.data)
        connection.save()
        return connection
コード例 #4
0
ファイル: web.py プロジェクト: Glandos/ihatemoney
def admin():
    """Admin authentication.

    When ADMIN_PASSWORD is empty, admin authentication is deactivated.
    """
    form = AdminAuthenticationForm()
    goto = request.args.get('goto', url_for('.home'))
    is_admin_auth_enabled = bool(current_app.config['ADMIN_PASSWORD'])
    if request.method == "POST":
        client_ip = request.remote_addr
        if not login_throttler.is_login_allowed(client_ip):
            msg = _("Too many failed login attempts, please retry later.")
            form.errors['admin_password'] = [msg]
            return render_template("admin.html", form=form, admin_auth=True,
                                   is_admin_auth_enabled=is_admin_auth_enabled)
        if form.validate():
            # Valid password
            if (check_password_hash(current_app.config['ADMIN_PASSWORD'],
                                    form.admin_password.data)):
                session['is_admin'] = True
                session.update()
                login_throttler.reset(client_ip)
                return redirect(goto)
            # Invalid password
            login_throttler.increment_attempts_counter(client_ip)
            msg = _("This admin password is not the right one. Only %(num)d attempts left.",
                    num=login_throttler.get_remaining_attempts(client_ip))
            form.errors['admin_password'] = [msg]
    return render_template("admin.html", form=form, admin_auth=True,
                           is_admin_auth_enabled=is_admin_auth_enabled)
コード例 #5
0
ファイル: roles.py プロジェクト: BMeu/Aerarium
def role_delete(name: str) -> str:
    """
        Show a form to delete the given role and process that form.

        :param name: The name of the role.
        :return: The HTML response.
    """
    role = Role.load_from_name(name)
    if role is None:
        abort(404)

    # If this is the last role allowed to edit roles show an info text.
    if role.is_only_role_allowed_to_edit_roles():
        deletion_not_possible_text = _('This role cannot be deleted because it is the only one that can edit roles.')
        return render_template('administration/role_delete.html', role=name,
                               deletion_not_possible_text=deletion_not_possible_text)

    # Create (and possibly process) the delete form.
    delete_form = RoleDeleteForm(role)
    if delete_form.validate_on_submit():
        try:
            new_role_id = delete_form.new_role.data
            new_role = Role.load_from_id(new_role_id)
        except AttributeError:
            # The new_role field might not exist because there are no users.
            new_role = None

        role.delete(new_role)

        flash(_('The role has been deleted.'))
        return redirect(url_for('.roles_list'))

    return render_template('administration/role_delete.html', role=name, delete_form=delete_form)
コード例 #6
0
def issues_edit(id):
    issue = Issue.query.get(id)

    if request.method == 'GET' and issue and issue.status == 'unread':
        issue = Issue.unread_to_open(issue)

    form = IssueForm(obj=issue)

    subject = _('[Catwatch issue] Re: %(issue_type)s',
                issue_type=issue.LABEL[issue.label])

    # Shenanigans to comply with PEP-8's formatting style.
    body_string = '\n\nYou opened an issue regarding:'
    issue_string = '\n\n---\n{0}\n---\n\n'.format(issue.question)
    message = _('Hello,%(body)s:%(issue)s\n\nThanks,\nCatwatch support team',
                body=body_string, issue=issue_string)

    contact_form = IssueContactForm(email=issue.email,
                                    subject=subject, message=message)

    if form.validate_on_submit():
        form.populate_obj(issue)
        issue.save()

        flash(_('Issue has been saved successfully.'), 'success')
        return redirect(url_for('admin.issues'))

    return render_template('admin/issue/edit.jinja2', form=form,
                           contact_form=contact_form, issue=issue)
コード例 #7
0
ファイル: models.py プロジェクト: hotdoc/hotdoc_server
    def from_profile(cls, user, profile):
        if not user or user.is_anonymous():
            email = profile.data.get("email")
            if not email:
                msg = "Cannot create new user, authentication provider did not not provide email"
                logging.warning(msg)
                raise Exception(_(msg))
            conflict = User.query.filter(User.email == email).first()
            if conflict:
                msg = "Cannot create new user, email {} is already used. Login and then connect external profile."
                msg = _(msg).format(email)
                logging.warning(msg)
                raise Exception(msg)

            now = datetime.now()
            user = User(
                email=email,
                first_name=profile.data.get("first_name"),
                last_name=profile.data.get("last_name"),
                confirmed_at=now,
                active=True,
            )
            db.session.add(user)
            db.session.flush()

        assert user.id, "User does not have an id"
        connection = cls(user_id=user.id, **profile.data)
        db.session.add(connection)
        db.session.commit()
        return connection
コード例 #8
0
    def from_profile(cls, user, profile):
        if not user or user.is_anonymous():
            email = profile.data.get("email")
            if not email:
                msg = "Cannot create new user, authentication provider did not not provide email"
                logging.warning(msg)
                raise Exception(_(msg))
            conflict = User.query(User.email == email).get()
            if conflict:
                msg = "Cannot create new user, email {} is already used. Login and then connect external profile."
                msg = _(msg).format(email)
                logging.warning(msg)
                raise Exception(msg)

            now = datetime.now()
            user = User(
                email=email,
                first_name=profile.data.get("first_name"),
                last_name=profile.data.get("last_name"),
                confirmed_at=now,
                active=True,
            )

        def tx():
            if not user.key or not user.key.id():
                # we can call allocate ids beforehand but it will result in datastore call anyway
                # it will be simpler if we just put model here
                user.put()
            connection = cls(parent=user.key, **profile.data)
            connection.put()
            return connection

        return ndb.transaction(tx)
コード例 #9
0
ファイル: views.py プロジェクト: NickyWu123/SampleofBlog
def edit(id=0):
    form = PostForm()

    if id == 0:
        user = User.query.filter_by(id=current_user.id).first()
        post = Post()
        #post.author=user
        #主要考虑到Post建立的问题如果直接用backref去指定,数据库会默认添加一个对象,这显然不利于体验
        post.author_id=user.id
    else:
        post = Post.query.get_or_404(id)
        post.created=datetime.now().strftime('%y-%m-%d %I:%M:%S %p')
        print post
    if form.validate_on_submit():
        post.body = form.body.data
        post.title = form.title.data
        db.session.add(post)
        db.session.commit()
        return redirect(url_for('.post', id=post.id))

    form.title.data = post.title
    form.body.data = post.body

    title = _(u'添加新文章')
    if id > 0:
        title = _(u'编辑')

    return render_template('posts/edit.html',
                           title=title,
                           form=form,
                           post=post)
コード例 #10
0
def delete(path):
    if not ModuleAPI.can_write('page'):
        return abort(403)

    page = Page.get_by_path(path)
    if not page:
        flash(_('The page you tried to delete does not exist.'), 'danger')
        return redirect(url_for('page.get_page', path=path))
        abort(404)
    rev = page.get_latest_revision()

    class DeleteForm(Form):
        title = StringField(_('Page title'))

    form = DeleteForm(request.form)

    if form.validate_on_submit():
        if rev.title == form.title.data:
            db.session.delete(page)
            db.session.commit()
            flash(_('The page has been deleted'), 'success')
            return redirect(url_for('home.home'))
        else:
            flash(_('The given title does not match the page title.'),
                  'warning')
    else:
        flash_form_errors(form)

    return render_template('page/delete.htm', rev=rev, form=form)
コード例 #11
0
ファイル: pantheon.py プロジェクト: alexandersimoes/oec
def make_pantheon_section(pantheon_id, attr):
    pantheon_fields_iframe = "http://pantheon.media.mit.edu/treemap/country_exports/{}/all/-4000/2010/H15/pantheon/embed".format(pantheon_id)
    pantheon_fields_subtitle = [_(u"This treemap shows the cultural exports %(of_country)s, as proxied by the production of globally famous historical characters", of_country=attr.get_name(article="of"))]
    pantheon_fields_subtitle.append(u"<a target='_blank' href='http://pantheon.media.mit.edu/treemap/country_exports/{}/all/-4000/2010/H15/pantheon/'>{} <i class='fa fa-external-link'></i></a>".format(pantheon_id, _("Explore on Pantheon")))
    
    pantheon_cities_iframe = "http://pantheon.media.mit.edu/treemap/country_by_city/{0}/{0}/-4000/2010/H15/pantheon/embed".format(pantheon_id)
    pantheon_cities_subtitle = [_(u"This treemap shows the cultural exports %(of_country)s by city, as proxied by the production of globally famous historical characters.", of_country=attr.get_name(article="of"))]
    pantheon_cities_subtitle.append(u"<a target='_blank' href='http://pantheon.media.mit.edu/treemap/country_by_city/{0}/{0}/-4000/2010/H15/pantheon/'>{1} <i class='fa fa-external-link'></i></a>".format(pantheon_id, _("Explore on Pantheon")))
    
    pantheon_section = {
        "title": "<a target='_blank' href='http://pantheon.media.mit.edu'><img src='http://pantheon.media.mit.edu/pantheon_logo.png' />",
        "source": "pantheon",
        "builds": [
            {"title": _(u"Globally Famous People %(of_country)s", of_country=attr.get_name(article="of")),
            "iframe": pantheon_fields_iframe,
            "subtitle": pantheon_fields_subtitle
            },
            {"title": _(u"Globally Famous People %(of_country)s by City", of_country=attr.get_name(article="of")),
            "iframe": pantheon_cities_iframe,
            "subtitle": pantheon_cities_subtitle
            },
        ]
    }
    
    return pantheon_section
コード例 #12
0
ファイル: page.py プロジェクト: viaict/viaduct
    def validate(self):

        # Validate all other fields with default validators
        if not SuperPageForm.validate(self):
            return False

        # Test if either english or dutch is entered
        result = True
        if not (self.nl_title.data or self.en_title.data):
            self.nl_title.errors.append(
                _('Either Dutch or English title required'))
            result = False
        if not (self.nl_content.data or self.en_content.data):
            self.nl_content.errors.append(
                _('Either Dutch or English content required'))
            result = False

        # XOR the results to test if both of a language was given
        if bool(self.nl_title.data) != bool(self.nl_content.data):
            self.nl_title.errors.append(
                _('Dutch title requires Dutch content and vice versa'))
            result = False
        if bool(self.en_title.data) != bool(self.en_content.data):
            self.en_title.errors.append(
                _('English title requires English content and vice versa'))
            result = False

        return result
コード例 #13
0
def login():
    if user_is_authenticated(current_user):
        return redirect(url_for('keypad.index'))

    form = LoginForm(login=request.args.get('login', None),
                     next=request.args.get('next', None))

    if form.validate_on_submit():
        user, authenticated = User.authenticate(form.login.data,
                                    form.password.data)

        if user and authenticated:
            remember = request.form.get('remember') == 'y'
            if login_user(user, remember=remember):
                flash(_("Logged in"), 'success')
                login_history_add(user, request.remote_addr)
                license = Setting.get_by_name('license_agreement', default=False).value
                if not license:
                    return redirect(url_for('frontend.license'))

            return redirect(form.next.data or url_for('keypad.index'))
        else:
            failed_login_add(form.login.data, request.remote_addr)
            flash(_('Sorry, invalid login'), 'error')

    return render_template('frontend/login.html', form=form)
コード例 #14
0
ファイル: user.py プロジェクト: gae-init/gae-init-babel
def user_update(user_id=0):
  if user_id:
    user_db = model.User.get_by_id(user_id)
  else:
    user_db = model.User(name='', username='')
  if not user_db:
    flask.abort(404)

  form = UserUpdateForm(obj=user_db)
  for permission in user_db.permissions:
    form.permissions.choices.append((permission, permission))
  form.permissions.choices = sorted(set(form.permissions.choices))
  if form.validate_on_submit():
    if not util.is_valid_username(form.username.data):
      form.username.errors.append(_('This username is invalid.'))
    elif not model.User.is_username_available(form.username.data, user_db.key):
      form.username.errors.append(_('This username is already taken.'))
    else:
      form.populate_obj(user_db)
      if auth.current_user_key() == user_db.key:
        user_db.admin = True
        user_db.active = True
      user_db.put()
      return flask.redirect(flask.url_for(
        'user_list', order='-modified', active=user_db.active,
      ))

  return flask.render_template(
    'user/user_update.html',
    title=user_db.name or _('New User'),
    html_class='user-update',
    form=form,
    user_db=user_db,
    api_url=flask.url_for('api.admin.user', user_key=user_db.key.urlsafe()) if user_db.key else ''
  )
コード例 #15
0
    def from_profile(cls, user, profile):
        if not user or user.is_anonymous():
            if not app.config.get("SECURITY_REGISTERABLE"):
                msg = "User not found. Registration disabled."
                logging.warning(msg)
                raise Exception(_(msg))
            email = profile.data.get("email")
            if not email:
                msg = "Please provide an email address."
                logging.warning(msg)
                raise Exception(_(msg))
            conflict = User.query.filter(User.email == email).first()
            if conflict:
                msg = "Email {} is already used. Login and then connect external profile."
                msg = _(msg).format(email)
                logging.warning(msg)
                raise Exception(msg)

            now = datetime.now()
            user = User(
                email=email,
                name="{} {}".format(profile.data.get("first_name"),
                                    profile.data.get("last_name")),
                confirmed_at=now,
                active=True)

            db.session.add(user)
            db.session.flush()

        assert user.id, "User does not have an id"
        connection = cls(user_id=user.id, **profile.data)
        db.session.add(connection)
        db.session.commit()
        return connection
コード例 #16
0
ファイル: views.py プロジェクト: 18mr/call-congress
def login():
    if current_user.is_authenticated():
        return redirect(url_for('admin.dashboard'))

    form = LoginForm(login=request.args.get('login', None),
                     next=request.args.get('next', None))

    if form.validate_on_submit():
        user, authenticated = User.authenticate(form.login.data,
                                                form.password.data)

        if user and authenticated:
            remember = request.form.get('remember') == 'y'
            if login_user(user, remember=remember):
                flash(_("Logged in"), 'success')
            else:
                flash(_("Unable to log in"), 'warning')

            user.last_accessed = datetime.now()
            db.session.add(user)
            db.session.commit()

            return redirect(form.next.data or url_for('admin.dashboard'))
        else:
            flash(_('Sorry, invalid login'), 'warning')

    return render_template('user/login.html', form=form)
コード例 #17
0
ファイル: user.py プロジェクト: viaict/viaduct
def sign_in_saml_response():
    has_redirected = False
    redir_url = saml_service.get_redirect_url(url_for('home.home'))

    try:
        # Redirect the user to the index page if he or she has been
        # authenticated already.
        if current_user.is_authenticated:
            return redirect(redir_url)

        if not saml_service.user_is_authenticated():
            flash(_('Authentication failed. Please try again.'), 'danger')
            return redirect(redir_url)

        try:
            user = saml_service.get_user_by_uid(needs_confirmed=False)

            if user.student_id_confirmed:
                login_user(user)
            else:
                has_redirected = True
                return redirect(url_for('user.sign_in_confirm_student_id'))

        except (ResourceNotFoundException, ValidationException):
            flash(_('There is no via account linked to this UvA account. '
                    'On this page you can create a new via account that '
                    'is linked to your UvA account.'))
            has_redirected = True
            return redirect(url_for('user.sign_up_saml_response'))

        return redirect(redir_url)
    finally:
        # Only clear the SAML data when we did not redirect to the sign up page
        if not has_redirected:
            saml_service.clear_saml_data()
コード例 #18
0
ファイル: public.py プロジェクト: okosioc/flask-boot
def signup():
    """
    Signup.
    """
    form = SignupForm()

    if form.validate_on_submit():
        if not form.password.data == form.repassword.data:
            return render_template('public/signup.html', form=form, error=_('Password dismatch!'))

        em = form.email.data.strip().lower()
        u = User.find_one({'email': em})
        if u:
            return render_template('public/signup.html', form=form, error=_('This email has been registered!'))

        u = User()
        u.email = em
        u.password = unicode(generate_password_hash(form.password.data.strip()))
        u.name = u.email.split('@')[0]
        u.save()

        current_app.logger.info('A new user created, %s' % u)
        send_support_email('signup()', u'New user %s with id %s.' % (u.email, u._id))

        # Keep the user info in the session using Flask-Login
        login_user(u)

        # Tell Flask-Principal the identity changed
        identity_changed.send(current_app._get_current_object(), identity=Identity(u.get_id()))

        return redirect('/')

    return render_template('public/signup.html', form=form)
コード例 #19
0
ファイル: views.py プロジェクト: 18mr/call-congress
def reset_password():
    form = RecoverPasswordForm()

    if form.validate_on_submit():
        user = User.query.filter_by(email=form.email.data).first()

        if user:
            flash(_('Please check your email for instructions on how to access your account'), 'success')

            user.activation_key = str(uuid4())
            db.session.add(user)
            db.session.commit()

            url = url_for('user.change_password',
                email=user.email,
                activation_key=user.activation_key,
                _external=True)
            body = render_template('user/email/reset_password.txt',
                sitename=current_app.config['SITENAME'],
                username=user.name,
                url=url)
            message = Message(subject='Reset your password for ' + current_app.config['SITENAME'],
                body=body,
                recipients=[user.email])
            mail.send(message)

            return render_template('user/reset_password.html', form=form)
        else:
            flash(_('Sorry, no user found for that email address'), 'error')

    return render_template('user/reset_password.html', form=form)
コード例 #20
0
def create():
    if current_user.subscription:
        flash(_('You already have an active subscription.'), 'info')
        return redirect(url_for('user.settings'))

    plan = request.args.get('plan')
    active_plan = Subscription.get_plan_by_id(plan)

    # Guard against an invalid or missing plan.
    if active_plan is None and request.method == 'GET':
        return redirect(url_for('billing.pricing'))

    stripe_key = current_app.config.get('STRIPE_PUBLISHABLE_KEY')
    form = CreditCardForm(stripe_key=stripe_key, plan=plan)

    if form.validate_on_submit():
        subscription = Subscription()
        created = subscription.create(user=current_user,
                                      name=request.form.get('name'),
                                      plan=request.form.get('plan'),
                                      coupon=request.form.get('coupon_code'),
                                      token=request.form.get('stripe_token'))

        if created:
            flash(_('Awesome, thanks for subscribing!'), 'success')
        else:
            flash(_('You must enable Javascript for this request.'), 'warn')

        return redirect(url_for('user.settings'))

    return render_template('billing/payment_method.jinja2',
                           form=form, plan=active_plan)
コード例 #21
0
ファイル: views.py プロジェクト: RayYu03/maple-blog
def forget():
    '''忘记密码'''
    error = None
    form = ForgetPasswdForm()
    if form.validate_on_submit() and request.method == "POST":
        validate_code = session['validate_code']
        validate = form.code.data
        if validate.lower() != validate_code.lower():
            return jsonify(judge=False, error=_('The validate code is error'))
        else:
            exsited_email = User.query.filter_by(
                email=form.confirm_email.data).first()
            if exsited_email:
                '''email模板'''
                from random import sample
                from string import ascii_letters, digits
                npasswd = ''.join(sample(ascii_letters + digits, 8))
                exsited_email.passwd = generate_password_hash(npasswd)
                db.session.commit()
                html = render_template('templet/forget.html',
                                       confirm_url=npasswd)
                subject = "Please update your password in time"
                email_send(form.confirm_email.data, html, subject)
                flash(_(
                    'An email has been sent to you.Please receive and update your password in time'))
                return jsonify(judge=True, error=error)
            else:
                error = _('The email is error')
                return jsonify(judge=False, error=error)
    else:
        if form.errors:
            return return_errors(form)
        else:
            pass
        return render_template('auth/forget.html', form=form)
コード例 #22
0
def involvement_update(id_, origin_id):
    link_ = LinkMapper.get_by_id(id_)
    event = EntityMapper.get_by_id(link_.domain.id)
    actor = EntityMapper.get_by_id(link_.range.id)
    origin = event if origin_id == event.id else actor
    form = build_form(ActorForm, 'Involvement', link_, request)
    form.save.label.text = _('save')
    del form.actor, form.event, form.insert_and_continue
    form.activity.choices = [('P11', g.properties['P11'].name)]
    if event.class_.code in ['E7', 'E8', 'E12']:
        form.activity.choices.append(('P14', g.properties['P14'].name))
    if event.class_.code == 'E8':
        form.activity.choices.append(('P22', g.properties['P22'].name))
        form.activity.choices.append(('P23', g.properties['P23'].name))
    if form.validate_on_submit():
        g.cursor.execute('BEGIN')
        try:
            link_.delete()
            link_id = event.link(form.activity.data, actor, form.description.data)
            DateMapper.save_link_dates(link_id, form)
            NodeMapper.save_link_nodes(link_id, form)
            g.cursor.execute('COMMIT')
        except Exception as e:  # pragma: no cover
            g.cursor.execute('ROLLBACK')
            logger.log('error', 'database', 'transaction failed', e)
            flash(_('error transaction'), 'error')
        tab = 'actor' if origin.view_name == 'event' else 'event'
        return redirect(url_for(origin.view_name + '_view', id_=origin.id) + '#tab-' + tab)
    form.activity.data = link_.property.code
    form.description.data = link_.description
    link_.set_dates()
    form.populate_dates(link_)
    return render_template('involvement/update.html', origin=origin, form=form,
                           linked_object=event if origin_id != event.id else actor)
コード例 #23
0
ファイル: password_reset.py プロジェクト: BMeu/Aerarium
def reset_password_request() -> str:
    """
        Show a form to request resetting the password and process it upon submission.

        :return: The HTML response.
    """

    form = EmailForm()
    if form.validate_on_submit():
        user = User.load_from_email(form.email.data)
        if user is not None:
            token = user.send_password_reset_email()
        else:
            # Create a fake token to get the validity.
            token = ChangeEmailAddressToken()

        validity = token.get_validity(in_minutes=True)

        # Display a success message even if the specified address does not belong to a user account. Otherwise,
        # infiltrators could deduce if an account exists and use this information for attacks.
        flash(_('An email has been sent to the specified address. Please be aware that the included link for resetting \
                the password is only valid for %(validity)d minutes.', validity=validity))
        return redirect(url_for('userprofile.login'))

    return render_template('userprofile/reset_password_request.html', title=_('Forgot Your Password?'), form=form)
コード例 #24
0
def set_page_revision_locale(page_rev, context):
    """
    Load the correct info in the model.

    This function is called after an PageRevision model is filled with data
    from the database, but before is used in all other code.

    Use the locale of the current user/client to determine which language to
    display on the whole website. If the users locale is unavailable, select
    the alternative language, suffixing the title of the activity with the
    displayed language.
    """
    locale = get_locale()
    nl_available = page_rev.nl_title and page_rev.nl_content
    en_available = page_rev.en_title and page_rev.en_content
    if locale == 'nl' and nl_available:
        page_rev.title = page_rev.nl_title
        page_rev.content = page_rev.nl_content
    elif locale == 'en' and en_available:
        page_rev.title = page_rev.en_title
        page_rev.content = page_rev.en_content
    elif nl_available:
        page_rev.title = page_rev.nl_title + " (" + _('Dutch') + ")"
        page_rev.content = page_rev.nl_content
    elif en_available:
        page_rev.title = page_rev.en_title + " (" + _('English') + ")"
        page_rev.content = page_rev.en_content
    else:
        page_rev.title = 'N/A'
        page_rev.content = 'N/A'
コード例 #25
0
def edit(id=0):
    form = PostForm()

    if id == 0:
        post = Post(author=current_user)
    else:
        post = Post.query.get_or_404(id)

    if form.validate_on_submit():
        post.body = form.body.data
        post.title = form.title.data

        db.session.add(post)
        db.session.commit()

        return redirect(url_for('.post', id=post.id))

    form.title.data = post.title
    form.body.data = post.body

    title = _(u'添加新文章')
    if id > 0:
        title = _(u'编辑 - %(title)', title=post.title)

    return render_template('posts/edit.html',
                           title=title,
                           form=form,
                           post=post)
コード例 #26
0
ファイル: routes.py プロジェクト: kretusmaximus/evergreen
def add_user():
    """The 'New user' route.

    Shows a form that allows for adding a new user to the database.

    :return: The 'New user' form if no POST request was submitted or there was
    an error while adding a user. Redirect to the user management view if user
    was added correctly.
    """
    form = AddUserForm()
    if form.validate_on_submit():
        # Create dictionary of form parameters
        request.form = {x: request.form[x] for x in request.form}
        # Remove superfluous values
        del request.form['csrf_token']
        del request.form['pwd2']
        try:
            dbcontroller.add_user(**request.form, admin='admin' in request.form)
        except DatabaseError:
            flash(_("Integrity error - user might already exist"))
        else:
            flash(_("User %(name)s added", name=request.form['name']))
            return redirect(url_for('.manage_users'))
    return render_template('bandpanel/admin/add_user.html', form=form,
                           title=_("Add user"))
コード例 #27
0
ファイル: routes.py プロジェクト: kretusmaximus/evergreen
def upload_track():
    """The 'Track upload' route.

    Shows a form that allows for uploading an audio track.

    :return: The 'Track upload' form if no POST request was submitted or there
    was an error while uploading the track. Redirect to the user management view
    if track upload was successful.
    """
    form = TrackUploadForm()
    if form.validate_on_submit():
        file = request.files['track']
        if file and allowed_extension(file.filename):
            filename = valid_filename(secure_filename(file.filename))
            filepath = os.path.join(app.config['UPLOAD_FOLDER'], filename)
            file.save(filepath)
            try:
                request.form = {x: request.form[x] for x in request.form}
                del request.form['csrf_token']
                dbcontroller.add_track(**request.form, uploader=current_user,
                                       filename=filename)
            except DatabaseError as e:
                print(e)
                flash(_("A track with that name already exists"))
            else:
                flash(_("Track %(name)s added", name=request.form['name']))
                return redirect(url_for('.browse_tracks'))
    return render_template('bandpanel/tracks/upload_track.html', form=form,
                           title=_("Upload track"))
コード例 #28
0
ファイル: news.py プロジェクト: viaict/viaduct
    def validate(self):

        # Validate all other fields with default validators
        if not FlaskForm.validate(self):
            return False
        result = True

        archive_date = self.archive_date.data
        publish_date = self.publish_date.data
        if archive_date and publish_date > archive_date:
            self.archive_date.errors.append(
                _('Archive date needs to be after the publish date.'))
            result = False

        # Test if either english or dutch is entered
        if not (self.nl_title.data or self.en_title.data):
            self.nl_title.errors.append(
                _('Either Dutch or English title required'))
            result = False
        if not (self.nl_content.data or self.en_content.data):
            self.nl_content.errors.append(
                _('Either Dutch or English content required'))
            result = False

        # XOR the results to test if both of a language was given
        if bool(self.nl_title.data) != bool(self.nl_content.data):
            self.nl_title.errors.append(
                _('Dutch title requires Dutch content and vice versa'))
            result = False
        if bool(self.en_title.data) != bool(self.en_content.data):
            self.en_title.errors.append(
                _('English title requires English content and vice versa'))
            result = False

        return result
コード例 #29
0
def set_news_locale(news, context):
    """
    Fill model content according to language.

    This function is called after an News model is filled with data from
    the database, but before is used in all other code.

    Use the locale of the current user/client to determine which language to
    display on the whole website. If the users locale is unavailable, select
    the alternative language, suffixing the title of the news with the
    displayed language.
    """
    locale = get_locale()
    nl_available = news.nl_title and news.nl_content
    en_available = news.en_title and news.en_content
    if locale == 'nl' and nl_available:
        news.title = news.nl_title
        news.content = news.nl_content
    elif locale == 'en' and en_available:
        news.title = news.en_title
        news.content = news.en_content
    elif nl_available:
        news.title = news.nl_title + " (" + _('Dutch') + ")"
        news.content = news.nl_content
    elif en_available:
        news.title = news.en_title + " (" + _('English') + ")"
        news.content = news.en_content
    else:
        news.title = 'N/A'
        news.content = 'N/A'
コード例 #30
0
def update_payment_method():
    if not current_user.credit_card:
        flash(_('You do not have a payment method on file.'), 'error')
        return redirect(url_for('user.settings'))

    active_plan = Subscription.get_plan_by_id(
        current_user.subscription.plan)

    card_last4 = str(current_user.credit_card.last4)
    stripe_key = current_app.config.get('STRIPE_PUBLISHABLE_KEY')
    form = CreditCardForm(stripe_key=stripe_key,
                          plan=active_plan,
                          name=current_user.name)

    if form.validate_on_submit():
        subscription = Subscription()
        updated = subscription.update_payment_method(user=current_user,
                                                     name=request.form.get(
                                                         'name'),
                                                     token=request.form.get(
                                                         'stripe_token'))

        if updated:
            flash(_('Your payment method has been updated.'), 'success')
        else:
            flash(_('You must enable Javascript for this request.'), 'warn')

        return redirect(url_for('user.settings'))

    return render_template('billing/payment_method.jinja2', form=form,
                           plan=active_plan, card_last4=card_last4)
コード例 #31
0
    def properties(self, gid, sid, did, scid, tid, exid=None):
        """
        This function is used to list all the Exclusion constraint
        nodes within that collection.

        Args:
          gid: Server Group ID
          sid: Server ID
          did: Database ID
          scid: Schema ID
          tid: Table ID
          exid: Exclusion constraint ID

        Returns:

        """
        sql = render_template("/".join([self.template_path, 'properties.sql']),
                              did=did, tid=tid, cid=exid)

        status, res = self.conn.execute_dict(sql)

        if not status:
            return internal_server_error(errormsg=res)

        if len(res['rows']) == 0:
            return gone(_(
                """Could not find the exclusion constraint in the table."""
            ))

        result = res['rows'][0]

        sql = render_template(
            "/".join([self.template_path, 'get_constraint_cols.sql']),
            cid=exid,
            colcnt=result['indnatts'])
        status, res = self.conn.execute_dict(sql)

        if not status:
            return internal_server_error(errormsg=res)

        columns = []
        for row in res['rows']:
            if row['options'] & 1:
                order = False
                nulls_order = True if (row['options'] & 2) else False
            else:
                order = True
                nulls_order = True if (row['options'] & 2) else False

            columns.append({"column": row['coldef'].strip('"'),
                            "oper_class": row['opcname'],
                            "order": order,
                            "nulls_order": nulls_order,
                            "operator": row['oprname'],
                            "col_type": row['datatype']
                            })

        result['columns'] = columns

        return ajax_response(
            response=result,
            status=200
        )
コード例 #32
0
class TableColumnInlineView(CompactCRUDMixin, SupersetModelView):  # noqa
    datamodel = SQLAInterface(models.TableColumn)

    list_title = _('List Columns')
    show_title = _('Show Column')
    add_title = _('Add Column')
    edit_title = _('Edit Column')

    can_delete = False
    list_widget = ListWidgetWithCheckboxes
    edit_columns = [
        'column_name', 'verbose_name', 'description',
        'type', 'groupby', 'filterable',
        'table', 'count_distinct', 'sum', 'min', 'max', 'expression',
        'is_dttm', 'python_date_format', 'database_expression']
    add_columns = edit_columns
    list_columns = [
        'column_name', 'verbose_name', 'type', 'groupby', 'filterable', 'count_distinct',
        'sum', 'min', 'max', 'is_dttm']
    page_size = 500
    description_columns = {
        'is_dttm': _(
            "Whether to make this column available as a "
            "[Time Granularity] option, column has to be DATETIME or "
            "DATETIME-like"),
        'filterable': _(
            "Whether this column is exposed in the `Filters` section "
            "of the explore view."),
        'type': _(
            "The data type that was inferred by the database. "
            "It may be necessary to input a type manually for "
            "expression-defined columns in some cases. In most case "
            "users should not need to alter this."),
        'expression': utils.markdown(
            "a valid SQL expression as supported by the underlying backend. "
            "Example: `substr(name, 1, 1)`", True),
        'python_date_format': utils.markdown(Markup(
            "The pattern of timestamp format, use "
            "<a href='https://docs.python.org/2/library/"
            "datetime.html#strftime-strptime-behavior'>"
            "python datetime string pattern</a> "
            "expression. If time is stored in epoch "
            "format, put `epoch_s` or `epoch_ms`. Leave `Database Expression` "
            "below empty if timestamp is stored in "
            "String or Integer(epoch) type"), True),
        'database_expression': utils.markdown(
            "The database expression to cast internal datetime "
            "constants to database date/timestamp type according to the DBAPI. "
            "The expression should follow the pattern of "
            "%Y-%m-%d %H:%M:%S, based on different DBAPI. "
            "The string should be a python string formatter \n"
            "`Ex: TO_DATE('{}', 'YYYY-MM-DD HH24:MI:SS')` for Oracle"
            "Superset uses default expression based on DB URI if this "
            "field is blank.", True),
    }
    label_columns = {
        'column_name': _("Column"),
        'verbose_name': _("Verbose Name"),
        'description': _("Description"),
        'groupby': _("Groupable"),
        'filterable': _("Filterable"),
        'table': _("Table"),
        'count_distinct': _("Count Distinct"),
        'sum': _("Sum"),
        'min': _("Min"),
        'max': _("Max"),
        'expression': _("Expression"),
        'is_dttm': _("Is temporal"),
        'python_date_format': _("Datetime Format"),
        'database_expression': _("Database Expression"),
        'type': _('Type'),
    }
コード例 #33
0
class TableModelView(DatasourceModelView, DeleteMixin):  # noqa
    datamodel = SQLAInterface(models.SqlaTable)

    list_title = _('List Tables')
    show_title = _('Show Table')
    add_title = _('Add Table')
    edit_title = _('Edit Table')

    list_columns = [
        'link', 'database',
        'changed_by_', 'modified']
    add_columns = ['database', 'schema', 'table_name']
    edit_columns = [
        'table_name', 'sql', 'filter_select_enabled', 'slices',
        'fetch_values_predicate', 'database', 'schema',
        'description', 'owner',
        'main_dttm_col', 'default_endpoint', 'offset', 'cache_timeout']
    show_columns = edit_columns + ['perm']
    related_views = [TableColumnInlineView, SqlMetricInlineView]
    base_order = ('changed_on', 'desc')
    search_columns = (
        'database', 'schema', 'table_name', 'owner',
    )
    description_columns = {
        'slices': _(
            "The list of slices associated with this table. By "
            "altering this datasource, you may change how these associated "
            "slices behave. "
            "Also note that slices need to point to a datasource, so "
            "this form will fail at saving if removing slices from a "
            "datasource. If you want to change the datasource for a slice, "
            "overwrite the slice from the 'explore view'"),
        'offset': _("Timezone offset (in hours) for this datasource"),
        'table_name': _(
            "Name of the table that exists in the source database"),
        'schema': _(
            "Schema, as used only in some databases like Postgres, Redshift "
            "and DB2"),
        'description': Markup(
            "Supports <a href='https://daringfireball.net/projects/markdown/'>"
            "markdown</a>"),
        'sql': _(
            "This fields acts a Superset view, meaning that Superset will "
            "run a query against this string as a subquery."
        ),
        'fetch_values_predicate': _(
            "Predicate applied when fetching distinct value to "
            "populate the filter control component. Supports "
            "jinja template syntax. Applies only when "
            "`Enable Filter Select` is on."
        ),
        'default_endpoint': _(
            "Redirects to this endpoint when clicking on the table "
            "from the table list"),
        'filter_select_enabled': _(
            "Whether to populate the filter's dropdown in the explore "
            "view's filter section with a list of distinct values fetched "
            "from the backend on the fly"),
    }
    base_filters = [['id', DatasourceFilter, lambda: []]]
    label_columns = {
        'slices': _("Associated Slices"),
        'link': _("Table"),
        'changed_by_': _("Changed By"),
        'database': _("Database"),
        'changed_on_': _("Last Changed"),
        'filter_select_enabled': _("Enable Filter Select"),
        'schema': _("Schema"),
        'default_endpoint': _('Default Endpoint'),
        'offset': _("Offset"),
        'cache_timeout': _("Cache Timeout"),
        'table_name': _("Table Name"),
        'fetch_values_predicate': _('Fetch Values Predicate'),
        'owner': _("Owner"),
        'main_dttm_col': _("Main Datetime Column"),
        'description': _('Description'),
    }

    def pre_add(self, table):
        with db.session.no_autoflush:
            table_query = db.session.query(models.SqlaTable).filter(
                models.SqlaTable.table_name == table.table_name,
                models.SqlaTable.schema == table.schema,
                models.SqlaTable.database_id == table.database.id)
            if db.session.query(table_query.exists()).scalar():
                raise Exception(
                    get_datasource_exist_error_mgs(table.full_name))

        # Fail before adding if the table can't be found
        if not table.database.has_table(table):
            raise Exception(_(
                "Table [{}] could not be found, "
                "please double check your "
                "database connection, schema, and "
                "table name").format(table.name))

    def post_add(self, table, flash_message=True):
        table.fetch_metadata()
        security.merge_perm(sm, 'datasource_access', table.get_perm())
        if table.schema:
            security.merge_perm(sm, 'schema_access', table.schema_perm)

        if flash_message:
            flash(_(
                "The table was created. "
                "As part of this two phase configuration "
                "process, you should now click the edit button by "
                "the new table to configure it."), "info")

    def post_update(self, table):
        self.post_add(table, flash_message=False)

    def _delete(self, pk):
        DeleteMixin._delete(self, pk)

    @expose('/edit/<pk>', methods=['GET', 'POST'])
    @has_access
    def edit(self, pk):
        """Simple hack to redirect to explore view after saving"""
        resp = super(TableModelView, self).edit(pk)
        if isinstance(resp, basestring):
            return resp
        return redirect('/superset/explore/table/{}/'.format(pk))
コード例 #34
0
    def sql(self, gid, sid, did, scid, tid, exid=None):
        """
        This function generates sql to show in the sql pane for the selected
        Exclusion constraint.

        Args:
          gid: Server Group ID
          sid: Server ID
          did: Database ID
          scid: Schema ID
          tid: Table ID
          exid: Exclusion constraint ID

        Returns:

        """
        try:
            SQL = render_template(
                "/".join([self.template_path, 'properties.sql']),
                did=did, tid=tid, conn=self.conn, cid=exid)
            status, result = self.conn.execute_dict(SQL)
            if not status:
                return internal_server_error(errormsg=result)
            if len(result['rows']) == 0:
                return gone(_("Could not find the exclusion constraint."))

            data = result['rows'][0]
            data['schema'] = self.schema
            data['table'] = self.table

            sql = render_template(
                "/".join([self.template_path, 'get_constraint_cols.sql']),
                cid=exid,
                colcnt=data['indnatts'])
            status, res = self.conn.execute_dict(sql)

            if not status:
                return internal_server_error(errormsg=res)

            columns = []
            for row in res['rows']:
                if row['options'] & 1:
                    order = False
                    nulls_order = True if (row['options'] & 2) else False
                else:
                    order = True
                    nulls_order = True if (row['options'] & 2) else False

                columns.append({"column": row['coldef'].strip('"'),
                                "oper_class": row['opcname'],
                                "order": order,
                                "nulls_order": nulls_order,
                                "operator": row['oprname']
                                })

            data['columns'] = columns

            if not data['amname'] or data['amname'] == '':
                data['amname'] = 'btree'

            SQL = render_template(
                "/".join([self.template_path, 'create.sql']), data=data)

            sql_header = u"-- Constraint: {0}\n\n-- ".format(data['name'])

            sql_header += render_template(
                "/".join([self.template_path, 'delete.sql']),
                data=data)
            sql_header += "\n"

            SQL = sql_header + SQL

            return ajax_response(response=SQL)

        except Exception as e:
            return internal_server_error(errormsg=str(e))
コード例 #35
0
    def delete(self, gid, sid, did, scid, tid, exid=None):
        """
        This function will delete an existing Exclusion.

        Args:
          gid: Server Group ID
          sid: Server ID
          did: Database ID
          scid: Schema ID
          tid: Table ID
          exid: Exclusion constraint ID

        Returns:

        """
        # Below code will decide if it's simple drop or drop with cascade call
        if self.cmd == 'delete':
            # This is a cascade operation
            cascade = True
        else:
            cascade = False
        try:
            sql = render_template(
                "/".join([self.template_path, 'get_name.sql']),
                cid=exid
            )
            status, res = self.conn.execute_dict(sql)
            if not status:
                return internal_server_error(errormsg=res)

            if not res['rows']:
                return make_json_response(
                    success=0,
                    errormsg=_(
                        'Error: Object not found.'
                    ),
                    info=_(
                        'The specified exclusion constraint could not '
                        'be found.\n'
                    )
                )

            data = res['rows'][0]
            data['schema'] = self.schema
            data['table'] = self.table

            sql = render_template("/".join([self.template_path, 'delete.sql']),
                                  data=data,
                                  cascade=cascade)
            status, res = self.conn.execute_scalar(sql)
            if not status:
                return internal_server_error(errormsg=res)

            return make_json_response(
                success=1,
                info=_("Exclusion constraint dropped."),
                data={
                    'id': exid,
                    'sid': sid,
                    'gid': gid,
                    'did': did
                }
            )

        except Exception as e:
            return internal_server_error(errormsg=str(e))
コード例 #36
0
    def create(self, gid, sid, did, scid, tid, exid=None):
        """
        This function will create a Exclusion constraint.

        Args:
          gid: Server Group ID
          sid: Server ID
          did: Database ID
          scid: Schema ID
          tid: Table ID
          exid: Exclusion constraint ID

        Returns:

        """
        required_args = ['columns']

        data = request.form if request.form else json.loads(
            request.data, encoding='utf-8'
        )

        for k, v in data.items():
            try:
                data[k] = json.loads(v, encoding='utf-8')
            except (ValueError, TypeError, KeyError):
                data[k] = v

        for arg in required_args:
            if arg not in data:
                return make_json_response(
                    status=400,
                    success=0,
                    errormsg=_(
                        "Could not find required parameter (%s)." % str(arg)
                    )
                )
            elif isinstance(data[arg], list) and len(data[arg]) < 1:
                return make_json_response(
                    status=400,
                    success=0,
                    errormsg=_(
                        "Could not find required parameter (%s)." % str(arg)
                    )
                )

        data['schema'] = self.schema
        data['table'] = self.table
        try:
            if 'name' not in data or data['name'] == "":
                SQL = render_template(
                    "/".join([self.template_path, 'begin.sql']))
                # Start transaction.
                status, res = self.conn.execute_scalar(SQL)
                if not status:
                    self.end_transaction()
                    return internal_server_error(errormsg=res)

            # The below SQL will execute CREATE DDL only
            SQL = render_template(
                "/".join([self.template_path, 'create.sql']),
                data=data, conn=self.conn
            )
            status, res = self.conn.execute_scalar(SQL)
            if not status:
                self.end_transaction()
                return internal_server_error(errormsg=res)

            if 'name' not in data or data['name'] == "":
                sql = render_template(
                    "/".join([self.template_path,
                              'get_oid_with_transaction.sql']),
                    tid=tid)

                status, res = self.conn.execute_dict(sql)
                if not status:
                    self.end_transaction()
                    return internal_server_error(errormsg=res)

                self.end_transaction()

                data['name'] = res['rows'][0]['name']

            else:
                sql = render_template(
                    "/".join([self.template_path, 'get_oid.sql']),
                    name=data['name']
                )
                status, res = self.conn.execute_dict(sql)
                if not status:
                    self.end_transaction()
                    return internal_server_error(errormsg=res)

            return jsonify(
                node=self.blueprint.generate_browser_node(
                    res['rows'][0]['oid'],
                    tid,
                    data['name'],
                    icon="icon-exclusion_constraint"
                )
            )

        except Exception as e:
            self.end_transaction()

            return make_json_response(
                status=400,
                success=0,
                errormsg=str(e)
            )
コード例 #37
0
class ExclusionConstraintModule(ConstraintTypeModule):
    """
    class ForeignKeyConstraintModule(CollectionNodeModule)

        A module class for Exclusion constraint node derived from
        ConstraintTypeModule.

    Methods:
    -------
    * __init__(*args, **kwargs)
      - Method is used to initialize the ForeignKeyConstraintModule and
      it's base module.

    * get_nodes(gid, sid, did)
      - Method is used to generate the browser collection node.

    * node_inode()
      - Method is overridden from its base class to make the node as leaf node.

    * script_load()
      - Load the module script for language, when any of the database node is
        initialized.
    """

    NODE_TYPE = 'exclusion_constraint'
    COLLECTION_LABEL = _("Exclusion Constraints")

    def __init__(self, *args, **kwargs):
        """
        Method is used to initialize the ForeignKeyConstraintModule and
        it's base module.

        Args:
          *args:
          **kwargs:

        Returns:

        """
        self.min_ver = None
        self.max_ver = None
        super(ExclusionConstraintModule, self).__init__(*args, **kwargs)

    def get_nodes(self, gid, sid, did, scid, tid):
        """
        Generate the collection node
        """
        pass

    @property
    def node_inode(self):
        """
        Override this property to make the node a leaf node.

        Returns: False as this is the leaf node
        """
        return False

    @property
    def script_load(self):
        """
        Load the module script for exclusion_constraint, when any of the
        table node is initialized.

        Returns: node type of the server module.
        """
        return database.DatabaseModule.NODE_TYPE

    @property
    def module_use_template_javascript(self):
        """
        Returns whether Jinja2 template is used for generating the javascript
        module.
        """
        return False
コード例 #38
0
def execute_sql_statement(sql_statement, query, user_name, session, cursor):
    """Executes a single SQL statement"""
    query_id = query.id
    database = query.database
    db_engine_spec = database.db_engine_spec
    parsed_query = ParsedQuery(sql_statement)
    sql = parsed_query.stripped()
    SQL_MAX_ROWS = app.config.get("SQL_MAX_ROW")

    if not parsed_query.is_readonly() and not database.allow_dml:
        raise SqlLabSecurityException(
            _("Only `SELECT` statements are allowed against this database"))
    if query.select_as_cta:
        if not parsed_query.is_select():
            raise SqlLabException(
                _("Only `SELECT` statements can be used with the CREATE TABLE "
                  "feature."))
        if not query.tmp_table_name:
            start_dttm = datetime.fromtimestamp(query.start_time)
            query.tmp_table_name = "tmp_{}_table_{}".format(
                query.user_id, start_dttm.strftime("%Y_%m_%d_%H_%M_%S"))
        sql = parsed_query.as_create_table(query.tmp_table_name)
        query.select_as_cta_used = True
    if parsed_query.is_select():
        if SQL_MAX_ROWS and (not query.limit or query.limit > SQL_MAX_ROWS):
            query.limit = SQL_MAX_ROWS
        if query.limit:
            sql = database.apply_limit_to_sql(sql, query.limit)

    # Hook to allow environment-specific mutation (usually comments) to the SQL
    SQL_QUERY_MUTATOR = config.get("SQL_QUERY_MUTATOR")
    if SQL_QUERY_MUTATOR:
        sql = SQL_QUERY_MUTATOR(sql, user_name, security_manager, database)

    try:
        if log_query:
            log_query(
                query.database.sqlalchemy_uri,
                query.executed_sql,
                query.schema,
                user_name,
                __name__,
                security_manager,
            )
        query.executed_sql = sql
        with stats_timing("sqllab.query.time_executing_query", stats_logger):
            logging.info(f"Query {query_id}: Running query: \n{sql}")
            db_engine_spec.execute(cursor, sql, async_=True)
            logging.info(f"Query {query_id}: Handling cursor")
            db_engine_spec.handle_cursor(cursor, query, session)

        with stats_timing("sqllab.query.time_fetching_results", stats_logger):
            logging.debug(
                "Query {}: Fetching data for query object: {}".format(
                    query_id, query.to_dict()))
            data = db_engine_spec.fetch_data(cursor, query.limit)

    except SoftTimeLimitExceeded as e:
        logging.exception(f"Query {query_id}: {e}")
        raise SqlLabTimeoutException(
            "SQL Lab timeout. This environment's policy is to kill queries "
            "after {} seconds.".format(SQLLAB_TIMEOUT))
    except Exception as e:
        logging.exception(f"Query {query_id}: {e}")
        raise SqlLabException(db_engine_spec.extract_error_message(e))

    logging.debug(f"Query {query_id}: Fetching cursor description")
    cursor_description = cursor.description
    return SupersetDataFrame(data, cursor_description, db_engine_spec)
コード例 #39
0
ファイル: forms.py プロジェクト: crazyqiqi0639/Forum
class CommentForm(Form):
    body = PageDownField(label=_(u'评论'), validators=[DataRequired()])
    submit = SubmitField(_(u'发表'))
コード例 #40
0
 def validate_username(self, username):
     if username.data != self.original_username:
         user = User.query.filter_by(username=self.username.data).first()
         if user is not None:
             raise ValidationError(_('Please use a different username.'))
コード例 #41
0
 def as_problem_detail_document(self, debug=False):
     """Return a suitable problem detail document."""
     msg = _("Integration error communicating with %(service_name)s",
             service_name=self.service_name)
     return INTEGRATION_ERROR.detailed(msg)
コード例 #42
0
ファイル: forms.py プロジェクト: crazyqiqi0639/Forum
class PostForm(Form):
    title = StringField(label=_(u'标题'), validators=[DataRequired()])
    body = PageDownField(label=_(u'正文'), validators=[DataRequired()])
    submit = SubmitField(_(u'发表'))
コード例 #43
0
            def wrapped(self, **kwargs):
                self.manager = get_driver(
                    PG_DEFAULT_DRIVER).connection_manager(kwargs['sid'])
                self.conn = self.manager.connection()

                driver = get_driver(PG_DEFAULT_DRIVER)
                self.qtIdent = driver.qtIdent

                if not self.conn.connected():
                    return precondition_required(
                        _("Connection to the server has been lost."))

                ver = self.manager.version

                self.sql_path = 'role/sql/{0}/'.format(
                    'post9_4' if ver >= 90500 else \
                        'post9_1' if ver >= 90200 else \
                            'post9_0' if ver >= 90100 else \
                                'post8_4'
                )

                self.alterKeys = [
                    u'rolcanlogin', u'rolsuper', u'rolcreatedb',
                    u'rolcreaterole', u'rolinherit', u'rolreplication',
                    u'rolconnlimit', u'rolvaliduntil', u'rolpassword'
                ] if ver >= 90200 else [
                    u'rolcanlogin', u'rolsuper', u'rolcreatedb',
                    u'rolcreaterole', u'rolinherit', u'rolconnlimit',
                    u'rolvaliduntil', u'rolpassword'
                ]

                check_permission = False
                fetch_name = False
                forbidden_msg = None

                if action in ['drop', 'update']:
                    check_permission = True
                    fetch_name = True
                    if action == 'drop':
                        forbidden_msg = _(
                            "The current user does not have permission to drop the role."
                        )
                    else:
                        forbidden_msg = _(
                            "The current user does not have permission to update the role."
                        )
                elif action == 'create':
                    check_permission = True
                    forbidden_msg = _(
                        "The current user does not have permission to create the role."
                    )
                elif (action == 'msql' and 'rid' in kwargs
                      and kwargs['rid'] != -1):
                    fetch_name = True

                if check_permission:
                    user = self.manager.user_info

                    if not user['is_superuser'] and \
                            not user['can_create_role']:
                        if (action != 'update'
                                or 'rid' in kwargs and kwargs['rid'] != -1
                                and user['id'] != kwargs['rid']):
                            return forbidden(forbidden_msg)

                if fetch_name:

                    status, res = self.conn.execute_dict(
                        render_template(self.sql_path + 'permission.sql',
                                        rid=kwargs['rid'],
                                        conn=self.conn))

                    if not status:
                        return internal_server_error(
                            _("Error retrieving the role information.\n{0}").
                            format(res))

                    if len(res['rows']) == 0:
                        return gone(
                            _("Could not find the role on the database server."
                              ))

                    row = res['rows'][0]

                    self.role = row['rolname']
                    self.rolCanLogin = row['rolcanlogin']
                    self.rolCatUpdate = row['rolcatupdate']
                    self.rolSuper = row['rolsuper']

                return f(self, **kwargs)
コード例 #44
0
ファイル: views.py プロジェクト: wk-j/flask-validator
    datamodel = SQLAInterface(Address)
    list_columns = ["id", "no"]


class GroupMasterView(MasterDetailView):
    datamodel = SQLAInterface(ContactGroup)
    related_views = [ContactGeneralView, AddressView]


class GroupGeneralView(ModelView):
    datamodel = SQLAInterface(ContactGroup)
    related_views = [ContactGeneralView]


fixed_translations_import = [
    _("List Groups"),
    _("Manage Groups"),
    _("List Contacts"),
    _("Contacts Chart"),
    _("Contacts Birth Chart"),
]

db.create_all()
fill_gender()
appbuilder.add_view(GroupMasterView,
                    "List Groups",
                    icon="fa-folder-open-o",
                    category="Contacts")
appbuilder.add_separator("Contacts")
appbuilder.add_view(GroupGeneralView,
                    "Manage Groups",
コード例 #45
0
    def fetch_metadata(self):
        """Fetches the metadata for the table and merges it in"""
        try:
            table = self.get_sqla_table_object()
        except Exception:
            raise Exception(
                _("Table [{}] doesn't seem to exist in the specified database, "
                  "couldn't fetch column information").format(self.table_name))

        M = SqlMetric  # noqa
        metrics = []
        any_date_col = None
        db_dialect = self.database.get_dialect()
        dbcols = (db.session.query(TableColumn).filter(
            TableColumn.table == self).filter(
                or_(TableColumn.column_name == col.name
                    for col in table.columns)))
        dbcols = {dbcol.column_name: dbcol for dbcol in dbcols}

        for col in table.columns:
            try:
                datatype = col.type.compile(dialect=db_dialect).upper()
            except Exception as e:
                datatype = 'UNKNOWN'
                logging.error('Unrecognized data type in {}.{}'.format(
                    table, col.name))
                logging.exception(e)
            dbcol = dbcols.get(col.name, None)
            if not dbcol:
                dbcol = TableColumn(column_name=col.name, type=datatype)
                dbcol.groupby = dbcol.is_string
                dbcol.filterable = dbcol.is_string
                dbcol.sum = dbcol.is_num
                dbcol.avg = dbcol.is_num
                dbcol.is_dttm = dbcol.is_time
            self.columns.append(dbcol)
            if not any_date_col and dbcol.is_time:
                any_date_col = col.name

            quoted = str(col.compile(dialect=db_dialect))
            if dbcol.sum:
                metrics.append(
                    M(
                        metric_name='sum__' + dbcol.column_name,
                        verbose_name='sum__' + dbcol.column_name,
                        metric_type='sum',
                        expression='SUM({})'.format(quoted),
                    ))
            if dbcol.avg:
                metrics.append(
                    M(
                        metric_name='avg__' + dbcol.column_name,
                        verbose_name='avg__' + dbcol.column_name,
                        metric_type='avg',
                        expression='AVG({})'.format(quoted),
                    ))
            if dbcol.max:
                metrics.append(
                    M(
                        metric_name='max__' + dbcol.column_name,
                        verbose_name='max__' + dbcol.column_name,
                        metric_type='max',
                        expression='MAX({})'.format(quoted),
                    ))
            if dbcol.min:
                metrics.append(
                    M(
                        metric_name='min__' + dbcol.column_name,
                        verbose_name='min__' + dbcol.column_name,
                        metric_type='min',
                        expression='MIN({})'.format(quoted),
                    ))
            if dbcol.count_distinct:
                metrics.append(
                    M(
                        metric_name='count_distinct__' + dbcol.column_name,
                        verbose_name='count_distinct__' + dbcol.column_name,
                        metric_type='count_distinct',
                        expression='COUNT(DISTINCT {})'.format(quoted),
                    ))
            dbcol.type = datatype

        metrics.append(
            M(
                metric_name='count',
                verbose_name='COUNT(*)',
                metric_type='count',
                expression='COUNT(*)',
            ))

        dbmetrics = db.session.query(M).filter(M.table_id == self.id).filter(
            or_(M.metric_name == metric.metric_name for metric in metrics))
        dbmetrics = {metric.metric_name: metric for metric in dbmetrics}
        for metric in metrics:
            metric.table_id = self.id
            if not dbmetrics.get(metric.metric_name, None):
                db.session.add(metric)
        if not self.main_dttm_col:
            self.main_dttm_col = any_date_col
        db.session.merge(self)
        db.session.commit()
コード例 #46
0
class BapYetkilisiDashboardView(FlaskView):
    """
    Proje Yurutucu Dashboard view methodlarini icerir
    """
    @staticmethod
    @login_required
    @route("/bap-yetkilisi-kontrol-paneli", methods=["GET"])
    @auth.requires(Or(Role("BAP Yetkilisi"), Role("BAP Admin")),
                   menu_registry={
                       'path': '.bap_yetkilisi_dashboard',
                       'title': _("Kontrol Paneli")
                   })
    def index():
        """Bap yetkilisi dashboard genel bilgiler"""

        personel_sayi = DB.session.query(
            Personel.id).filter(Personel.personel_turu == "akademik").count()
        hakemler = DB.session.query(Hakem).all()
        projeler = DB.session.query(Proje).filter(
            or_(
                Proje.proje_basvuru_durumu == ProjeBasvuruDurumu.tamamlandi,
                Proje.proje_basvuru_durumu ==
                ProjeBasvuruDurumu.revizyon_bekleniyor)).all()
        devam_etmeyen_proje_sayi = 0
        toplam_butce = DB.session.query(
            func.sum(GelirKasasi.toplam_para).label("toplam_butce"),
            func.sum(GelirKasasi.harcanan_para).label("harcanan_para"),
            func.sum(GelirKasasi.rezerv_para).label("rezerv_para"),
            GelirKasasi.adi.label("kasa_adi"),
            GelirKasasi.toplam_para.label("toplam_para")).filter(
                GelirKasasi.mali_yil == datetime.date.today().year).group_by(
                    GelirKasasi.toplam_para, GelirKasasi.adi,
                    GelirKasasi.harcanan_para, GelirKasasi.rezerv_para).all()
        butce_toplami = 0
        butce_kasalari = {}
        harcanan_para = 0
        rezerv_para = 0
        for butce in toplam_butce:
            butce_toplami += butce.toplam_butce.quantize(Decimal(".01"))
            harcanan_para += butce.harcanan_para.quantize(Decimal(".01"))
            rezerv_para += butce.rezerv_para.quantize(Decimal(".01"))

        butce_harcamalari = {
            "Toplam Bütçe": butce_toplami,
            "Harcanan": harcanan_para,
            "Rezerv": rezerv_para
        }

        proje_butce = 0
        proje_degerlendirmeleri = {
            "Olumlu": 0,
            "Olumsuz": 0,
            "Revizyon gerekli": 0,
            "Değerlendirilmedi": 0,
        }
        hakem_sayi = {"Kurum içi": 0, "Kurum dışı": 0}
        for hakem in hakemler:
            if hakem.kurum_ici:
                hakem_sayi["Kurum içi"] += 1
            else:
                hakem_sayi["Kurum dışı"] += 1
        for proje in projeler:
            if proje.proje_durumu.current_app_state == AppStates.son:
                devam_etmeyen_proje_sayi += 1

            for rapor in proje.proje_raporlari:
                if rapor.rapor_degerlendirme_durumu:
                    if rapor.rapor_degerlendirme_durumu == ProjeDegerlendirmeSonuc.olumlu:
                        proje_degerlendirmeleri["Olumlu"] += 1
                    elif rapor.rapor_degerlendirme_durumu == ProjeDegerlendirmeSonuc.olumsuz:
                        proje_degerlendirmeleri["Olumsuz"] += 1
                    elif rapor.rapor_degerlendirme_durumu == ProjeDegerlendirmeSonuc.revizyon:
                        proje_degerlendirmeleri["Revizyon gerekli"] += 1
                    elif rapor.rapor_degerlendirme_durumu == ProjeDegerlendirmeSonuc.degerlendirilmedi:
                        proje_degerlendirmeleri["Değerlendirilmedi"] += 1

            proje_butce += proje.kabul_edilen_butce if proje.kabul_edilen_butce else 0

        proje_sayi = {
            'Devam Eden': len(projeler) - devam_etmeyen_proje_sayi,
            'Devam Etmeyen': devam_etmeyen_proje_sayi
        }

        for butce in toplam_butce:
            toplam_para = butce.toplam_para.quantize(Decimal(".01"))
            butce_kasalari.update({butce.kasa_adi: toplam_para})

        return render_template('bap_yetkilisi_dashboard.html',
                               hakem_sayi=hakem_sayi,
                               butce_toplami=butce_toplami,
                               proje_sayi=proje_sayi,
                               proje_degerlendirmeleri=proje_degerlendirmeleri,
                               proje_butce=proje_butce,
                               personel_sayi=personel_sayi,
                               butce_kasalari=butce_kasalari,
                               butce_harcamalari=butce_harcamalari)

    @staticmethod
    @login_required
    @route("/rektor-kokpiti", methods=["GET"])
    @auth.requires(Role("Rektör"))
    def rektor_kokpiti():

        return render_template('rektor_kokpiti.html')
コード例 #47
0
	def validate_email(self, email):
		user = User.query.filter_by(email=email.data).first()
		if user is not None:
			raise ValidationError(_("Please enter a different email."))
コード例 #48
0
        def wrap(self, **kwargs):

            data = None
            if request.data:
                data = json.loads(request.data, encoding='utf-8')
            else:
                data = dict()
                req = request.args or request.form

                for key in req:

                    val = req[key]
                    if key in [
                            u'rolcanlogin', u'rolsuper', u'rolcreatedb',
                            u'rolcreaterole', u'rolinherit', u'rolreplication',
                            u'rolcatupdate', u'variables', u'rolmembership',
                            u'seclabels'
                    ]:
                        data[key] = json.loads(val, encoding='utf-8')
                    else:
                        data[key] = val

            if u'rid' not in kwargs or kwargs['rid'] == -1:
                if u'rolname' not in data:
                    return precondition_required(_("Name must be specified."))

            if u'rolvaliduntil' in data:
                # Make date explicit so that it works with every
                # postgres database datestyle format
                try:
                    if data[u'rolvaliduntil'] is not None and \
                                    data[u'rolvaliduntil'] != '' and \
                                    len(data[u'rolvaliduntil']) > 0:
                        data[u'rolvaliduntil'] = dateutil_parser.parse(
                            data[u'rolvaliduntil']).isoformat()
                except Exception:
                    return precondition_required(_("Date format is invalid."))

            if u'rolconnlimit' in data:
                if data[u'rolconnlimit'] is not None:
                    data[u'rolconnlimit'] = int(data[u'rolconnlimit'])
                    if type(data[u'rolconnlimit']
                            ) != int or data[u'rolconnlimit'] < -1:
                        return precondition_required(
                            _("Connection limit must be an integer value or equal to -1."
                              ))

            if u'rolmembership' in data:
                if u'rid' not in kwargs or kwargs['rid'] == -1:
                    msg = _("""
Role membership information must be passed as an array of JSON objects in the
following format:

rolmembership:[{
    role: [rolename],
    admin: True/False
    },
    ...
]""")
                    if type(data[u'rolmembership']) != list:
                        return precondition_required(msg)

                    data[u'members'] = []
                    data[u'admins'] = []

                    for r in data[u'rolmembership']:
                        if type(
                                r
                        ) != dict or u'role' not in r or u'admin' not in r:
                            return precondition_required(msg)
                        else:
                            if r[u'admin']:
                                data[u'admins'].append(r[u'role'])
                            else:
                                data[u'members'].append(r[u'role'])
                else:
                    msg = _("""
Role membership information must be passed as a string representing an array of
JSON objects in the following format:
rolmembership:{
    'added': [{
        role: [rolename],
        admin: True/False
        },
        ...
        ],
    'deleted': [{
        role: [rolename],
        admin: True/False
        },
        ...
        ],
    'updated': [{
        role: [rolename],
        admin: True/False
        },
        ...
        ]
""")
                    if type(data[u'rolmembership']) != dict:
                        return precondition_required(msg)

                    data[u'members'] = []
                    data[u'admins'] = []
                    data[u'revoked_admins'] = []
                    data[u'revoked'] = []

                    if u'added' in data[u'rolmembership']:
                        roles = (data[u'rolmembership'])[u'added']

                        if type(roles) != list:
                            return precondition_required(msg)

                        for r in roles:
                            if (type(r) != dict or u'role' not in r
                                    or u'admin' not in r):
                                return precondition_required(msg)

                            if r[u'admin']:
                                data[u'admins'].append(r[u'role'])
                            else:
                                data[u'members'].append(r[u'role'])

                    if u'deleted' in data[u'rolmembership']:
                        roles = (data[u'rolmembership'])[u'deleted']

                        if type(roles) != list:
                            return precondition_required(msg)

                        for r in roles:
                            if type(r) != dict or u'role' not in r:
                                return precondition_required(msg)

                            data[u'revoked'].append(r[u'role'])

                    if u'changed' in data[u'rolmembership']:
                        roles = (data[u'rolmembership'])[u'changed']

                        if type(roles) != list:
                            return precondition_required(msg)

                        for r in roles:
                            if (type(r) != dict or u'role' not in r
                                    or u'admin' not in r):
                                return precondition_required(msg)

                            if not r[u'admin']:
                                data[u'revoked_admins'].append(r[u'role'])
                            else:
                                data[u'admins'].append(r[u'role'])

            if self.manager.version >= 90200:
                if u'seclabels' in data:
                    if u'rid' not in kwargs or kwargs['rid'] == -1:
                        msg = _("""
Security Label must be passed as an array of JSON objects in the following
format:
seclabels:[{
    provider: <provider>,
    label: <label>
    },
    ...
]""")
                        if type(data[u'seclabels']) != list:
                            return precondition_required(msg)

                        for s in data[u'seclabels']:
                            if (type(s) != dict or u'provider' not in s
                                    or u'label' not in s):
                                return precondition_required(msg)
                    else:
                        msg = _("""
Security Label must be passed as an array of JSON objects in the following
format:
seclabels:{
    'added': [{
        provider: <provider>,
        label: <label>
        },
        ...
        ],
    'deleted': [{
        provider: <provider>,
        label: <label>
        },
        ...
        ],
    'updated': [{
        provider: <provider>,
        label: <label>
        },
        ...
        ]
""")
                        seclabels = data[u'seclabels']
                        if type(seclabels) != dict:
                            return precondition_required(msg)

                        if u'added' in seclabels:
                            new_seclabels = seclabels[u'added']

                            if type(new_seclabels) != list:
                                return precondition_required(msg)

                            for s in new_seclabels:
                                if (type(s) != dict or u'provider' not in s
                                        or u'label' not in s):
                                    return precondition_required(msg)

                        if u'deleted' in seclabels:
                            removed_seclabels = seclabels[u'deleted']

                            if type(removed_seclabels) != list:
                                return precondition_required(msg)

                            for s in removed_seclabels:
                                if (type(s) != dict or u'provider' not in s):
                                    return precondition_required(msg)

                        if u'changed' in seclabels:
                            changed_seclabels = seclabels[u'deleted']

                            if type(changed_seclabels) != list:
                                return precondition_required(msg)

                            for s in changed_seclabels:
                                if (type(s) != dict or u'provider' not in s
                                        and u'label' not in s):
                                    return precondition_required(msg)

            if u'variables' in data:
                if u'rid' not in kwargs or kwargs['rid'] == -1:
                    msg = _("""
Configuration parameters/variables must be passed as an array of JSON objects in
the following format in create mode:
variables:[{
    database: <database> or null,
    name: <configuration>,
    value: <value>
    },
    ...
]""")
                    if type(data[u'variables']) != list:
                        return precondition_required(msg)

                    for r in data[u'variables']:
                        if (type(r) != dict or u'name' not in r
                                or u'value' not in r):
                            return precondition_required(msg)
                else:
                    msg = _("""
Configuration parameters/variables must be passed as an array of JSON objects in
the following format in update mode:
rolmembership:{
    'added': [{
        database: <database> or null,
        name: <configuration>,
        value: <value>
        },
        ...
        ],
    'deleted': [{
        database: <database> or null,
        name: <configuration>,
        value: <value>
        },
        ...
        ],
    'updated': [{
        database: <database> or null,
        name: <configuration>,
        value: <value>
        },
        ...
        ]
""")
                    variables = data[u'variables']
                    if type(variables) != dict:
                        return precondition_required(msg)

                    if u'added' in variables:
                        new_vars = variables[u'added']

                        if type(new_vars) != list:
                            return precondition_required(msg)

                        for v in new_vars:
                            if (type(v) != dict or u'name' not in v
                                    or u'value' not in v):
                                return precondition_required(msg)

                    if u'deleted' in variables:
                        delete_vars = variables[u'deleted']

                        if type(delete_vars) != list:
                            return precondition_required(msg)

                        for v in delete_vars:
                            if type(v) != dict or u'name' not in v:
                                return precondition_required(msg)

                    if u'changed' in variables:
                        new_vars = variables[u'changed']

                        if type(new_vars) != list:
                            return precondition_required(msg)

                        for v in new_vars:
                            if (type(v) != dict or u'name' not in v
                                    or u'value' not in v):
                                return precondition_required(msg)

            self.request = data

            return f(self, **kwargs)
コード例 #49
0
ファイル: models.py プロジェクト: tong900801/beta_v1.0
    def get_sqla_query(  # sqla
        self,
        groupby,
        metrics,
        granularity,
        from_dttm,
        to_dttm,
        filter=None,
        is_timeseries=True,
        timeseries_limit=15,
        timeseries_limit_metric=None,
        row_limit=None,
        inner_from_dttm=None,
        inner_to_dttm=None,
        orderby=None,
        extras=None,
        columns=None,
        order_desc=True,
    ) -> SqlaQuery:
        """Querying any sqla table from this common interface"""
        template_kwargs = {
            "from_dttm": from_dttm,
            "groupby": groupby,
            "metrics": metrics,
            "row_limit": row_limit,
            "to_dttm": to_dttm,
            "filter": filter,
            "columns": {col.column_name: col
                        for col in self.columns},
        }
        template_kwargs.update(self.template_params_dict)
        extra_cache_keys: List[Any] = []
        template_kwargs["extra_cache_keys"] = extra_cache_keys
        template_processor = self.get_template_processor(**template_kwargs)
        db_engine_spec = self.database.db_engine_spec
        prequeries: List[str] = []

        orderby = orderby or []

        # For backward compatibility
        if granularity not in self.dttm_cols:
            granularity = self.main_dttm_col

        # Database spec supports join-free timeslot grouping
        time_groupby_inline = db_engine_spec.time_groupby_inline

        cols: Dict[str,
                   Column] = {col.column_name: col
                              for col in self.columns}
        metrics_dict: Dict[str, SqlMetric] = {
            m.metric_name: m
            for m in self.metrics
        }

        if not granularity and is_timeseries:
            raise Exception(
                _("Datetime column not provided as part table configuration "
                  "and is required by this type of chart"))
        if not groupby and not metrics and not columns:
            raise Exception(_("Empty query?"))
        metrics_exprs = []
        for m in metrics:
            if utils.is_adhoc_metric(m):
                metrics_exprs.append(self.adhoc_metric_to_sqla(m, cols))
            elif m in metrics_dict:
                metrics_exprs.append(metrics_dict[m].get_sqla_col())
            else:
                raise Exception(
                    _("Metric '%(metric)s' does not exist", metric=m))
        if metrics_exprs:
            main_metric_expr = metrics_exprs[0]
        else:
            main_metric_expr, label = literal_column("COUNT(*)"), "ccount"
            main_metric_expr = self.make_sqla_column_compatible(
                main_metric_expr, label)

        select_exprs: List[Column] = []
        groupby_exprs_sans_timestamp: OrderedDict = OrderedDict()

        if groupby:
            select_exprs = []
            for s in groupby:
                if s in cols:
                    outer = cols[s].get_sqla_col()
                else:
                    outer = literal_column(f"({s})")
                    outer = self.make_sqla_column_compatible(outer, s)

                groupby_exprs_sans_timestamp[outer.name] = outer
                select_exprs.append(outer)
        elif columns:
            for s in columns:
                select_exprs.append(
                    cols[s].get_sqla_col() if s in cols else self.
                    make_sqla_column_compatible(literal_column(s)))
            metrics_exprs = []

        time_range_endpoints = extras.get("time_range_endpoints")
        groupby_exprs_with_timestamp = OrderedDict(
            groupby_exprs_sans_timestamp.items())
        if granularity:
            dttm_col = cols[granularity]
            time_grain = extras.get("time_grain_sqla")
            time_filters = []

            if is_timeseries:
                timestamp = dttm_col.get_timestamp_expression(time_grain)
                select_exprs += [timestamp]
                groupby_exprs_with_timestamp[timestamp.name] = timestamp

            # Use main dttm column to support index with secondary dttm columns.
            if (db_engine_spec.time_secondary_columns
                    and self.main_dttm_col in self.dttm_cols
                    and self.main_dttm_col != dttm_col.column_name):
                time_filters.append(cols[self.main_dttm_col].get_time_filter(
                    from_dttm, to_dttm, time_range_endpoints))
            time_filters.append(
                dttm_col.get_time_filter(from_dttm, to_dttm,
                                         time_range_endpoints))

        select_exprs += metrics_exprs

        labels_expected = [c._df_label_expected for c in select_exprs]

        select_exprs = db_engine_spec.make_select_compatible(
            groupby_exprs_with_timestamp.values(), select_exprs)
        qry = sa.select(select_exprs)

        tbl = self.get_from_clause(template_processor)

        if not columns:
            qry = qry.group_by(*groupby_exprs_with_timestamp.values())

        where_clause_and = []
        having_clause_and: List = []
        for flt in filter:
            if not all([flt.get(s) for s in ["col", "op"]]):
                continue
            col = flt["col"]
            op = flt["op"]
            col_obj = cols.get(col)
            if col_obj:
                is_list_target = op in ("in", "not in")
                eq = self.filter_values_handler(
                    flt.get("val"),
                    target_column_is_numeric=col_obj.is_num,
                    is_list_target=is_list_target,
                )
                if op in ("in", "not in"):
                    cond = col_obj.get_sqla_col().in_(eq)
                    if "<NULL>" in eq:
                        cond = or_(cond, col_obj.get_sqla_col() == None)
                    if op == "not in":
                        cond = ~cond
                    where_clause_and.append(cond)
                else:
                    if col_obj.is_num:
                        eq = utils.string_to_num(flt["val"])
                    if op == "==":
                        where_clause_and.append(col_obj.get_sqla_col() == eq)
                    elif op == "!=":
                        where_clause_and.append(col_obj.get_sqla_col() != eq)
                    elif op == ">":
                        where_clause_and.append(col_obj.get_sqla_col() > eq)
                    elif op == "<":
                        where_clause_and.append(col_obj.get_sqla_col() < eq)
                    elif op == ">=":
                        where_clause_and.append(col_obj.get_sqla_col() >= eq)
                    elif op == "<=":
                        where_clause_and.append(col_obj.get_sqla_col() <= eq)
                    elif op == "LIKE":
                        where_clause_and.append(
                            col_obj.get_sqla_col().like(eq))
                    elif op == "IS NULL":
                        where_clause_and.append(col_obj.get_sqla_col() == None)
                    elif op == "IS NOT NULL":
                        where_clause_and.append(col_obj.get_sqla_col() != None)
        if extras:
            where = extras.get("where")
            if where:
                where = template_processor.process_template(where)
                where_clause_and += [sa.text("({})".format(where))]
            having = extras.get("having")
            if having:
                having = template_processor.process_template(having)
                having_clause_and += [sa.text("({})".format(having))]
        if granularity:
            qry = qry.where(and_(*(time_filters + where_clause_and)))
        else:
            qry = qry.where(and_(*where_clause_and))
        qry = qry.having(and_(*having_clause_and))

        if not orderby and not columns:
            orderby = [(main_metric_expr, not order_desc)]

        for col, ascending in orderby:
            direction = asc if ascending else desc
            if utils.is_adhoc_metric(col):
                col = self.adhoc_metric_to_sqla(col, cols)
            elif col in cols:
                col = cols[col].get_sqla_col()
            qry = qry.order_by(direction(col))

        if row_limit:
            qry = qry.limit(row_limit)

        if is_timeseries and timeseries_limit and groupby and not time_groupby_inline:
            if self.database.db_engine_spec.allows_joins:
                # some sql dialects require for order by expressions
                # to also be in the select clause -- others, e.g. vertica,
                # require a unique inner alias
                inner_main_metric_expr = self.make_sqla_column_compatible(
                    main_metric_expr, "mme_inner__")
                inner_groupby_exprs = []
                inner_select_exprs = []
                for gby_name, gby_obj in groupby_exprs_sans_timestamp.items():
                    inner = self.make_sqla_column_compatible(
                        gby_obj, gby_name + "__")
                    inner_groupby_exprs.append(inner)
                    inner_select_exprs.append(inner)

                inner_select_exprs += [inner_main_metric_expr]
                subq = select(inner_select_exprs).select_from(tbl)
                inner_time_filter = dttm_col.get_time_filter(
                    inner_from_dttm or from_dttm,
                    inner_to_dttm or to_dttm,
                    time_range_endpoints,
                )
                subq = subq.where(
                    and_(*(where_clause_and + [inner_time_filter])))
                subq = subq.group_by(*inner_groupby_exprs)

                ob = inner_main_metric_expr
                if timeseries_limit_metric:
                    ob = self._get_timeseries_orderby(timeseries_limit_metric,
                                                      metrics_dict, cols)
                direction = desc if order_desc else asc
                subq = subq.order_by(direction(ob))
                subq = subq.limit(timeseries_limit)

                on_clause = []
                for gby_name, gby_obj in groupby_exprs_sans_timestamp.items():
                    # in this case the column name, not the alias, needs to be
                    # conditionally mutated, as it refers to the column alias in
                    # the inner query
                    col_name = db_engine_spec.make_label_compatible(gby_name +
                                                                    "__")
                    on_clause.append(gby_obj == column(col_name))

                tbl = tbl.join(subq.alias(), and_(*on_clause))
            else:
                if timeseries_limit_metric:
                    orderby = [(
                        self._get_timeseries_orderby(timeseries_limit_metric,
                                                     metrics_dict, cols),
                        False,
                    )]

                # run prequery to get top groups
                prequery_obj = {
                    "is_timeseries": False,
                    "row_limit": timeseries_limit,
                    "groupby": groupby,
                    "metrics": metrics,
                    "granularity": granularity,
                    "from_dttm": inner_from_dttm or from_dttm,
                    "to_dttm": inner_to_dttm or to_dttm,
                    "filter": filter,
                    "orderby": orderby,
                    "extras": extras,
                    "columns": columns,
                    "order_desc": True,
                }
                result = self.query(prequery_obj)
                prequeries.append(result.query)
                dimensions = [
                    c for c in result.df.columns
                    if c not in metrics and c in groupby_exprs_sans_timestamp
                ]
                top_groups = self._get_top_groups(
                    result.df, dimensions, groupby_exprs_sans_timestamp)
                qry = qry.where(top_groups)

        return SqlaQuery(
            extra_cache_keys=extra_cache_keys,
            labels_expected=labels_expected,
            sqla_query=qry.select_from(tbl),
            prequeries=prequeries,
        )
コード例 #50
0
    def get_sqla_query(  # sqla
            self,
            groupby,
            metrics,
            granularity,
            from_dttm,
            to_dttm,
            filter=None,  # noqa
            is_timeseries=True,
            timeseries_limit=15,
            timeseries_limit_metric=None,
            row_limit=None,
            inner_from_dttm=None,
            inner_to_dttm=None,
            orderby=None,
            extras=None,
            columns=None,
            form_data=None,
            order_desc=True):
        """Querying any sqla table from this common interface"""
        template_kwargs = {
            'from_dttm': from_dttm,
            'groupby': groupby,
            'metrics': metrics,
            'row_limit': row_limit,
            'to_dttm': to_dttm,
            'form_data': form_data,
        }
        template_processor = self.get_template_processor(**template_kwargs)
        db_engine_spec = self.database.db_engine_spec

        orderby = orderby or []

        # For backward compatibility
        if granularity not in self.dttm_cols:
            granularity = self.main_dttm_col

        # Database spec supports join-free timeslot grouping
        time_groupby_inline = db_engine_spec.time_groupby_inline

        cols = {col.column_name: col for col in self.columns}
        metrics_dict = {m.metric_name: m for m in self.metrics}

        if not granularity and is_timeseries:
            raise Exception(
                _('Datetime column not provided as part table configuration '
                  'and is required by this type of chart'))
        if not groupby and not metrics and not columns:
            raise Exception(_('Empty query?'))
        for m in metrics:
            if m not in metrics_dict:
                raise Exception(_("Metric '{}' is not valid".format(m)))
        metrics_exprs = [metrics_dict.get(m).sqla_col for m in metrics]
        if metrics_exprs:
            main_metric_expr = metrics_exprs[0]
        else:
            main_metric_expr = literal_column('COUNT(*)').label('ccount')

        select_exprs = []
        groupby_exprs = []

        if groupby:
            select_exprs = []
            inner_select_exprs = []
            inner_groupby_exprs = []
            for s in groupby:
                col = cols[s]
                outer = col.sqla_col
                inner = col.sqla_col.label(col.column_name + '__')

                groupby_exprs.append(outer)
                select_exprs.append(outer)
                inner_groupby_exprs.append(inner)
                inner_select_exprs.append(inner)
        elif columns:
            for s in columns:
                select_exprs.append(cols[s].sqla_col)
            metrics_exprs = []

        if granularity:
            dttm_col = cols[granularity]
            time_grain = extras.get('time_grain_sqla')
            time_filters = []

            if is_timeseries:
                timestamp = dttm_col.get_timestamp_expression(time_grain)
                select_exprs += [timestamp]
                groupby_exprs += [timestamp]

            # Use main dttm column to support index with secondary dttm columns
            if db_engine_spec.time_secondary_columns and \
                    self.main_dttm_col in self.dttm_cols and \
                    self.main_dttm_col != dttm_col.column_name:
                time_filters.append(cols[self.main_dttm_col].get_time_filter(
                    from_dttm, to_dttm))
            time_filters.append(dttm_col.get_time_filter(from_dttm, to_dttm))

        select_exprs += metrics_exprs
        qry = sa.select(select_exprs)

        tbl = self.get_from_clause(template_processor, db_engine_spec)

        if not columns:
            qry = qry.group_by(*groupby_exprs)

        where_clause_and = []
        having_clause_and = []
        for flt in filter:
            if not all([flt.get(s) for s in ['col', 'op', 'val']]):
                continue
            col = flt['col']
            op = flt['op']
            eq = flt['val']
            col_obj = cols.get(col)
            if col_obj:
                if op in ('in', 'not in'):
                    values = []
                    for v in eq:
                        # For backwards compatibility and edge cases
                        # where a column data type might have changed
                        if isinstance(v, basestring):
                            v = v.strip("'").strip('"')
                            if col_obj.is_num:
                                v = utils.string_to_num(v)

                        # Removing empty strings and non numeric values
                        # targeting numeric columns
                        if v is not None:
                            values.append(v)
                    cond = col_obj.sqla_col.in_(values)
                    if op == 'not in':
                        cond = ~cond
                    where_clause_and.append(cond)
                else:
                    if col_obj.is_num:
                        eq = utils.string_to_num(flt['val'])
                    if op == '==':
                        where_clause_and.append(col_obj.sqla_col == eq)
                    elif op == '!=':
                        where_clause_and.append(col_obj.sqla_col != eq)
                    elif op == '>':
                        where_clause_and.append(col_obj.sqla_col > eq)
                    elif op == '<':
                        where_clause_and.append(col_obj.sqla_col < eq)
                    elif op == '>=':
                        where_clause_and.append(col_obj.sqla_col >= eq)
                    elif op == '<=':
                        where_clause_and.append(col_obj.sqla_col <= eq)
                    elif op == 'LIKE':
                        where_clause_and.append(col_obj.sqla_col.like(eq))
        if extras:
            where = extras.get('where')
            if where:
                where = template_processor.process_template(where)
                where_clause_and += [sa.text('({})'.format(where))]
            having = extras.get('having')
            if having:
                having = template_processor.process_template(having)
                having_clause_and += [sa.text('({})'.format(having))]
        if granularity:
            qry = qry.where(and_(*(time_filters + where_clause_and)))
        else:
            qry = qry.where(and_(*where_clause_and))
        qry = qry.having(and_(*having_clause_and))

        if not orderby and not columns:
            orderby = [(main_metric_expr, not order_desc)]

        for col, ascending in orderby:
            direction = asc if ascending else desc
            qry = qry.order_by(direction(col))

        if row_limit:
            qry = qry.limit(row_limit)

        if is_timeseries and \
                timeseries_limit and groupby and not time_groupby_inline:
            # some sql dialects require for order by expressions
            # to also be in the select clause -- others, e.g. vertica,
            # require a unique inner alias
            inner_main_metric_expr = main_metric_expr.label('mme_inner__')
            inner_select_exprs += [inner_main_metric_expr]
            subq = select(inner_select_exprs)
            subq = subq.select_from(tbl)
            inner_time_filter = dttm_col.get_time_filter(
                inner_from_dttm or from_dttm,
                inner_to_dttm or to_dttm,
            )
            subq = subq.where(and_(*(where_clause_and + [inner_time_filter])))
            subq = subq.group_by(*inner_groupby_exprs)

            ob = inner_main_metric_expr
            if timeseries_limit_metric:
                timeseries_limit_metric = metrics_dict.get(
                    timeseries_limit_metric)
                ob = timeseries_limit_metric.sqla_col
            direction = desc if order_desc else asc
            subq = subq.order_by(direction(ob))
            subq = subq.limit(timeseries_limit)

            on_clause = []
            for i, gb in enumerate(groupby):
                on_clause.append(groupby_exprs[i] == column(gb + '__'))

            tbl = tbl.join(subq.alias(), and_(*on_clause))

        return qry.select_from(tbl)
コード例 #51
0
ファイル: sql_lab.py プロジェクト: prakritisamanta/datadecade
class SavedQueryView(SupersetModelView, DeleteMixin):  # pylint: disable=too-many-ancestors
    datamodel = SQLAInterface(SavedQuery)
    include_route_methods = RouteMethod.CRUD_SET

    class_permission_name = "SavedQuery"
    method_permission_name = MODEL_VIEW_RW_METHOD_PERMISSION_MAP
    list_title = _("List Saved Query")
    show_title = _("Show Saved Query")
    add_title = _("Add Saved Query")
    edit_title = _("Edit Saved Query")

    list_columns = [
        "label",
        "user",
        "database",
        "schema",
        "description",
        "modified",
        "pop_tab_link",
    ]
    order_columns = ["label", "schema", "description", "modified"]
    show_columns = [
        "id",
        "label",
        "user",
        "database",
        "description",
        "sql",
        "pop_tab_link",
    ]
    search_columns = ("label", "user", "database", "schema", "changed_on")
    add_columns = ["label", "database", "description", "sql"]
    edit_columns = add_columns
    base_order = ("changed_on", "desc")
    label_columns = {
        "label": _("Label"),
        "user": _("User"),
        "database": _("Database"),
        "description": _("Description"),
        "modified": _("Modified"),
        "end_time": _("End Time"),
        "pop_tab_link": _("Pop Tab Link"),
        "changed_on": _("Changed on"),
    }

    @expose("/list/")
    @has_access
    def list(self) -> FlaskResponse:
        if not is_feature_enabled("ENABLE_REACT_CRUD_VIEWS"):
            return super().list()

        return super().render_app_template()

    def pre_add(self, item: "SavedQueryView") -> None:
        item.user = g.user

    def pre_update(self, item: "SavedQueryView") -> None:
        self.pre_add(item)
コード例 #52
0
	def validate_username(self, username):
		user = User.query.filter_by(username=username.data).first()
		if user is not None:
			raise ValidationError(_("Please enter a different username."))
コード例 #53
0
ファイル: views.py プロジェクト: guhongyeying/flask-maple
 def post(self):
     if current_user.is_confirmed:
         return HTTP.BAD_REQUEST(message=_("user has been confirmed."))
     self.send_email(current_user)
     return HTTP.OK(
         message=_('An email has been sent to your.Please receive'))
コード例 #54
0
ファイル: models.py プロジェクト: zhshw/incubator-superset
    def get_sqla_query(  # sqla
            self,
            groupby, metrics,
            granularity,
            from_dttm, to_dttm,
            filter=None,  # noqa
            is_timeseries=True,
            timeseries_limit=15,
            timeseries_limit_metric=None,
            row_limit=None,
            inner_from_dttm=None,
            inner_to_dttm=None,
            orderby=None,
            extras=None,
            columns=None,
            order_desc=True,
            prequeries=None,
            is_prequery=False,
        ):
        """Querying any sqla table from this common interface"""
        template_kwargs = {
            'from_dttm': from_dttm,
            'groupby': groupby,
            'metrics': metrics,
            'row_limit': row_limit,
            'to_dttm': to_dttm,
            'filter': filter,
            'columns': {col.column_name: col for col in self.columns},
        }
        template_kwargs.update(self.template_params_dict)
        template_processor = self.get_template_processor(**template_kwargs)
        db_engine_spec = self.database.db_engine_spec

        orderby = orderby or []

        # For backward compatibility
        if granularity not in self.dttm_cols:
            granularity = self.main_dttm_col

        # Database spec supports join-free timeslot grouping
        time_groupby_inline = db_engine_spec.time_groupby_inline

        cols = {col.column_name: col for col in self.columns}
        metrics_dict = {m.metric_name: m for m in self.metrics}

        if not granularity and is_timeseries:
            raise Exception(_(
                'Datetime column not provided as part table configuration '
                'and is required by this type of chart'))
        if not groupby and not metrics and not columns:
            raise Exception(_('Empty query?'))
        metrics_exprs = []
        for m in metrics:
            if utils.is_adhoc_metric(m):
                metrics_exprs.append(self.adhoc_metric_to_sa(m, cols))
            elif m in metrics_dict:
                metrics_exprs.append(metrics_dict.get(m).sqla_col)
            else:
                raise Exception(_("Metric '{}' is not valid".format(m)))
        if metrics_exprs:
            main_metric_expr = metrics_exprs[0]
        else:
            main_metric_expr = literal_column('COUNT(*)').label('ccount')

        select_exprs = []
        groupby_exprs = []

        if groupby:
            select_exprs = []
            inner_select_exprs = []
            inner_groupby_exprs = []
            for s in groupby:
                col = cols[s]
                outer = col.sqla_col
                inner = col.sqla_col.label(col.column_name + '__')

                groupby_exprs.append(outer)
                select_exprs.append(outer)
                inner_groupby_exprs.append(inner)
                inner_select_exprs.append(inner)
        elif columns:
            for s in columns:
                select_exprs.append(cols[s].sqla_col)
            metrics_exprs = []

        if granularity:
            dttm_col = cols[granularity]
            time_grain = extras.get('time_grain_sqla')
            time_filters = []

            if is_timeseries:
                timestamp = dttm_col.get_timestamp_expression(time_grain)
                select_exprs += [timestamp]
                groupby_exprs += [timestamp]

            # Use main dttm column to support index with secondary dttm columns
            if db_engine_spec.time_secondary_columns and \
                    self.main_dttm_col in self.dttm_cols and \
                    self.main_dttm_col != dttm_col.column_name:
                time_filters.append(cols[self.main_dttm_col].
                                    get_time_filter(from_dttm, to_dttm))
            time_filters.append(dttm_col.get_time_filter(from_dttm, to_dttm))

        select_exprs += metrics_exprs
        qry = sa.select(select_exprs)

        tbl = self.get_from_clause(template_processor, db_engine_spec)

        if not columns:
            qry = qry.group_by(*groupby_exprs)

        where_clause_and = []
        having_clause_and = []
        for flt in filter:
            if not all([flt.get(s) for s in ['col', 'op']]):
                continue
            col = flt['col']
            op = flt['op']
            col_obj = cols.get(col)
            if col_obj:
                is_list_target = op in ('in', 'not in')
                eq = self.filter_values_handler(
                    flt.get('val'),
                    target_column_is_numeric=col_obj.is_num,
                    is_list_target=is_list_target)
                if op in ('in', 'not in'):
                    cond = col_obj.sqla_col.in_(eq)
                    if '<NULL>' in eq:
                        cond = or_(cond, col_obj.sqla_col == None)  # noqa
                    if op == 'not in':
                        cond = ~cond
                    where_clause_and.append(cond)
                else:
                    if col_obj.is_num:
                        eq = utils.string_to_num(flt['val'])
                    if op == '==':
                        where_clause_and.append(col_obj.sqla_col == eq)
                    elif op == '!=':
                        where_clause_and.append(col_obj.sqla_col != eq)
                    elif op == '>':
                        where_clause_and.append(col_obj.sqla_col > eq)
                    elif op == '<':
                        where_clause_and.append(col_obj.sqla_col < eq)
                    elif op == '>=':
                        where_clause_and.append(col_obj.sqla_col >= eq)
                    elif op == '<=':
                        where_clause_and.append(col_obj.sqla_col <= eq)
                    elif op == 'LIKE':
                        where_clause_and.append(col_obj.sqla_col.like(eq))
                    elif op == 'IS NULL':
                        where_clause_and.append(col_obj.sqla_col == None)  # noqa
                    elif op == 'IS NOT NULL':
                        where_clause_and.append(col_obj.sqla_col != None)  # noqa
        if extras:
            where = extras.get('where')
            if where:
                where = template_processor.process_template(where)
                where_clause_and += [sa.text('({})'.format(where))]
            having = extras.get('having')
            if having:
                having = template_processor.process_template(having)
                having_clause_and += [sa.text('({})'.format(having))]
        if granularity:
            qry = qry.where(and_(*(time_filters + where_clause_and)))
        else:
            qry = qry.where(and_(*where_clause_and))
        qry = qry.having(and_(*having_clause_and))

        if not orderby and not columns:
            orderby = [(main_metric_expr, not order_desc)]

        for col, ascending in orderby:
            direction = asc if ascending else desc
            if utils.is_adhoc_metric(col):
                col = self.adhoc_metric_to_sa(col, cols)
            qry = qry.order_by(direction(col))

        if row_limit:
            qry = qry.limit(row_limit)

        if is_timeseries and \
                timeseries_limit and groupby and not time_groupby_inline:
            if self.database.db_engine_spec.inner_joins:
                # some sql dialects require for order by expressions
                # to also be in the select clause -- others, e.g. vertica,
                # require a unique inner alias
                inner_main_metric_expr = main_metric_expr.label('mme_inner__')
                inner_select_exprs += [inner_main_metric_expr]
                subq = select(inner_select_exprs)
                subq = subq.select_from(tbl)
                inner_time_filter = dttm_col.get_time_filter(
                    inner_from_dttm or from_dttm,
                    inner_to_dttm or to_dttm,
                )
                subq = subq.where(and_(*(where_clause_and + [inner_time_filter])))
                subq = subq.group_by(*inner_groupby_exprs)

                ob = inner_main_metric_expr
                if timeseries_limit_metric:
                    if utils.is_adhoc_metric(timeseries_limit_metric):
                        ob = self.adhoc_metric_to_sa(timeseries_limit_metric, cols)
                    elif timeseries_limit_metric in metrics_dict:
                        timeseries_limit_metric = metrics_dict.get(
                            timeseries_limit_metric,
                        )
                        ob = timeseries_limit_metric.sqla_col
                    else:
                        raise Exception(_("Metric '{}' is not valid".format(m)))
                direction = desc if order_desc else asc
                subq = subq.order_by(direction(ob))
                subq = subq.limit(timeseries_limit)

                on_clause = []
                for i, gb in enumerate(groupby):
                    on_clause.append(
                        groupby_exprs[i] == column(gb + '__'))

                tbl = tbl.join(subq.alias(), and_(*on_clause))
            else:
                # run subquery to get top groups
                subquery_obj = {
                    'prequeries': prequeries,
                    'is_prequery': True,
                    'is_timeseries': False,
                    'row_limit': timeseries_limit,
                    'groupby': groupby,
                    'metrics': metrics,
                    'granularity': granularity,
                    'from_dttm': inner_from_dttm or from_dttm,
                    'to_dttm': inner_to_dttm or to_dttm,
                    'filter': filter,
                    'orderby': orderby,
                    'extras': extras,
                    'columns': columns,
                    'order_desc': True,
                }
                result = self.query(subquery_obj)
                dimensions = [c for c in result.df.columns if c not in metrics]
                top_groups = self._get_top_groups(result.df, dimensions)
                qry = qry.where(top_groups)

        return qry.select_from(tbl)
コード例 #55
0
def change_owner():
    """

    Returns:

    """

    data = request.form if request.form else json.loads(
        request.data, encoding='utf-8'
    )
    try:
        new_user = User.query.get(data['new_owner'])
        old_user_servers = Server.query.filter_by(shared=True, user_id=data[
            'old_owner']).all()
        server_group_ids = [server.servergroup_id for server in
                            old_user_servers]
        server_groups = ServerGroup.query.filter(
            ServerGroup.id.in_(server_group_ids)).all()

        new_owner_sg = ServerGroup.query.filter_by(
            user_id=data['new_owner']).all()
        old_owner_sg = ServerGroup.query.filter_by(
            user_id=data['old_owner']).all()
        sg_data = {sg.name: sg.id for sg in new_owner_sg}
        old_sg_data = {sg.id: sg.name for sg in old_owner_sg}

        deleted_sg = []
        # Change server user.
        for server in old_user_servers:
            sh_servers = SharedServer.query.filter_by(
                servergroup_id=server.servergroup_id).all()

            if old_sg_data[server.servergroup_id] in sg_data:

                for sh in sh_servers:
                    sh.servergroup_id = sg_data[
                        old_sg_data[server.servergroup_id]]
                    sh.server_owner = new_user.username
                # Update Server user and server group to prevent deleting
                # shared server associated with deleting user.
                Server.query.filter_by(
                    servergroup_id=server.servergroup_id, shared=True,
                    user_id=data['old_owner']
                ).update(
                    {
                        'servergroup_id': sg_data[old_sg_data[
                            server.servergroup_id]],
                        'user_id': data['new_owner']
                    }
                )
                ServerGroup.query.filter_by(id=server.servergroup_id).delete()
                deleted_sg.append(server.servergroup_id)
            else:
                server.user_id = data['new_owner']
                for sh in sh_servers:
                    sh.server_owner = new_user.username

        # Change server group user.
        for server_group in server_groups:
            if server_group.id not in deleted_sg:
                server_group.user_id = data['new_owner']

        db.session.commit()
        return make_json_response(
            success=1,
            info=_("Owner changed successfully."),
            data={}
        )
    except Exception as e:
        msg = 'Unable to update shared server owner' + _(str(e))
        return internal_server_error(
            errormsg=msg)
コード例 #56
0
ファイル: views.py プロジェクト: guhongyeying/flask-maple
 def decorator(*args, **kwargs):
     if current_user.is_authenticated:
         flash(_("You have logined in ,needn't login again"))
         return redirect('/')
     return func(*args, **kwargs)
コード例 #57
0
def create_maintenance_job(sid, did):
    """
    Args:
        sid: Server ID
        did: Database ID

        Creates a new job for maintenance vacuum operation

    Returns:
        None
    """
    if request.form:
        data = json.loads(request.form['data'], encoding='utf-8')
    else:
        data = json.loads(request.data, encoding='utf-8')

    index_name = get_index_name(data)

    # Fetch the server details like hostname, port, roles etc

    server = get_server(sid)

    if server is None:
        return make_json_response(
            success=0, errormsg=_("Could not find the given server"))

    # To fetch MetaData for the server
    driver = get_driver(PG_DEFAULT_DRIVER)
    manager = driver.connection_manager(server.id)
    conn = manager.connection()
    connected = conn.connected()

    if not connected:
        return make_json_response(
            success=0, errormsg=_("Please connect to the server first."))

    utility = manager.utility('sql')
    ret_val = does_utility_exist(utility)
    if ret_val:
        return make_json_response(success=0, errormsg=ret_val)

    # Create the command for the vacuum operation
    query = render_template('maintenance/sql/command.sql',
                            conn=conn,
                            data=data,
                            index_name=index_name)

    args = [
        '--host',
        manager.local_bind_host if manager.use_ssh_tunnel else server.host,
        '--port',
        str(manager.local_bind_port)
        if manager.use_ssh_tunnel else str(server.port), '--username',
        server.username, '--dbname', data['database'], '--command', query
    ]

    try:
        p = BatchProcess(desc=Message(server.id, data, query),
                         cmd=utility,
                         args=args)
        manager.export_password_env(p.id)
        # Check for connection timeout and if it is greater than 0 then
        # set the environment variable PGCONNECT_TIMEOUT.
        if manager.connect_timeout > 0:
            env = dict()
            env['PGCONNECT_TIMEOUT'] = str(manager.connect_timeout)
            p.set_env_variables(server, env=env)
        else:
            p.set_env_variables(server)

        p.start()
        jid = p.id
    except Exception as e:
        current_app.logger.exception(e)
        return make_json_response(status=410, success=0, errormsg=str(e))

    # Return response
    return make_json_response(data={
        'job_id': jid,
        'status': True,
        'info': _('Maintenance job created.')
    })
コード例 #58
0
def index():
    return bad_request(errormsg=_("This URL cannot be called directly."))
コード例 #59
0
 def type_desc(self):
     return _("Maintenance")
コード例 #60
0
class ChartDataQueryObjectSchema(Schema):
    filters = fields.List(fields.Nested(ChartDataFilterSchema), required=False)
    granularity = fields.String(
        description="Name of temporal column used for time filtering. For legacy Druid "
        "datasources this defines the time grain.",
    )
    granularity_sqla = fields.String(
        description="Name of temporal column used for time filtering for SQL "
        "datasources. This field is deprecated, use `granularity` "
        "instead.",
        deprecated=True,
    )
    groupby = fields.List(
        fields.String(description="Columns by which to group the query.",),
    )
    metrics = fields.List(
        fields.Raw(),
        description="Aggregate expressions. Metrics can be passed as both "
        "references to datasource metrics (strings), or ad-hoc metrics"
        "which are defined only within the query object. See "
        "`ChartDataAdhocMetricSchema` for the structure of ad-hoc metrics.",
    )
    post_processing = fields.List(
        fields.Nested(ChartDataPostProcessingOperationSchema, allow_none=True),
        description="Post processing operations to be applied to the result set. "
        "Operations are applied to the result set in sequential order.",
    )
    time_range = fields.String(
        description="A time rage, either expressed as a colon separated string "
        "`since : until` or human readable freeform. Valid formats for "
        "`since` and `until` are: \n"
        "- ISO 8601\n"
        "- X days/years/hours/day/year/weeks\n"
        "- X days/years/hours/day/year/weeks ago\n"
        "- X days/years/hours/day/year/weeks from now\n"
        "\n"
        "Additionally, the following freeform can be used:\n"
        "\n"
        "- Last day\n"
        "- Last week\n"
        "- Last month\n"
        "- Last quarter\n"
        "- Last year\n"
        "- No filter\n"
        "- Last X seconds/minutes/hours/days/weeks/months/years\n"
        "- Next X seconds/minutes/hours/days/weeks/months/years\n",
        example="Last week",
    )
    time_shift = fields.String(
        description="A human-readable date/time string. "
        "Please refer to [parsdatetime](https://github.com/bear/parsedatetime) "
        "documentation for details on valid values.",
    )
    is_timeseries = fields.Boolean(
        description="Is the `query_object` a timeseries.", required=False
    )
    timeseries_limit = fields.Integer(
        description="Maximum row count for timeseries queries. Default: `0`",
    )
    timeseries_limit_metric = fields.Raw(
        description="Metric used to limit timeseries queries by.", allow_none=True,
    )
    row_limit = fields.Integer(
        description='Maximum row count. Default: `config["ROW_LIMIT"]`',
        validate=[
            Range(min=1, error=_("`row_limit` must be greater than or equal to 1"))
        ],
    )
    row_offset = fields.Integer(
        description="Number of rows to skip. Default: `0`",
        validate=[
            Range(min=0, error=_("`row_offset` must be greater than or equal to 0"))
        ],
    )
    order_desc = fields.Boolean(
        description="Reverse order. Default: `false`", required=False
    )
    extras = fields.Nested(ChartDataExtrasSchema, required=False)
    columns = fields.List(fields.String(), description="",)
    orderby = fields.List(
        fields.List(fields.Raw()),
        description="Expects a list of lists where the first element is the column "
        "name which to sort by, and the second element is a boolean ",
        example=[["my_col_1", False], ["my_col_2", True]],
    )
    where = fields.String(
        description="WHERE clause to be added to queries using AND operator."
        "This field is deprecated and should be passed to `extras`.",
        deprecated=True,
    )
    having = fields.String(
        description="HAVING clause to be added to aggregate queries using "
        "AND operator. This field is deprecated and should be passed "
        "to `extras`.",
        deprecated=True,
    )
    having_filters = fields.List(
        fields.Dict(),
        description="HAVING filters to be added to legacy Druid datasource queries. "
        "This field is deprecated and should be passed to `extras` "
        "as `filters_druid`.",
        deprecated=True,
    )