Esempio n. 1
0
def upload_file(request):
    """
    Process file upload request
    """
    c = {'errors': {}}

    c['file'] = File()

    # process file upload
    # extract file params etc
    req = ('filedata', 'filename', 'dltype')

    for x in req:
        if x not in request.POST:
            return HTTPBadRequest()

    hfile = request.POST['filedata']

    # guess content type
    content_type = guess_type(hfile.filename)[0] or 'application/octet-stream'

    dbsession = DBSession()

    now = datetime.utcnow()

    file = dbsession.query(File).filter(File.name==request.POST['filename']).first()
    if file is None:
        file = File()
    file.name = request.POST['filename']
    file.size = len(hfile.value)
    file.dltype = 'download' if request.POST['dltype'] == 'download' else 'auto'
    file.content_type = content_type
    file.updated = h.dt_to_timestamp(now)

    # save file to the storage
    storage_dirs = get_storage_dirs()
    orig_filename = os.path.join(storage_dirs['orig'], file.name)
    fp = open(orig_filename, 'wb')
    shutil.copyfileobj(hfile.file, fp)
    hfile.file.close()
    fp.close()

    dbsession.add(file)
    dbsession.flush()
    dbsession.expunge(file)

    try:
        transaction.commit()
    except IntegrityError:
        # display error etc
        transaction.abort()
        return HTTPFound(location=route_url('admin_list_files', request))

    return HTTPFound(location=route_url('admin_list_files', request))
Esempio n. 2
0
def login_twitter_finish(request):
    """
    Finish twitter authentication
    """
    consumer_key = str(get_config('tw_consumer_key'))
    consumer_secret = str(get_config('tw_consumer_secret'))
    token = request.session.get('twitter_request_token')
    twitter = Twitter(auth=OAuth(token[0], token[1], consumer_key, consumer_secret), format='', api_version=None)

    verifier = request.GET.get('oauth_verifier')
    try:
        oauth_resp = twitter.oauth.access_token(oauth_verifier=verifier)
    except TwitterHTTPError as e:
        log.error('Invalid "access_token" request: {0}'.format(str(e)))
        return HTTPNotFound()

    oauth_resp_data = dict(urllib.parse.parse_qsl(oauth_resp))
    # typical response:
    # {'user_id': '128607225', 'oauth_token_secret': 'NaGQrWyNRtHHHbvm3tNI0tcr2KTBUEY0J3ng8d7KFXg', 'screen_name': 'otmenych', 'oauth_token': '128607225-NWzT8YL1Wt6qNzMLzmaCEWOxqFtrEI1pjlA8c5FK'}
    tw_username = oauth_resp_data['screen_name']
    user = find_twitter_user(tw_username)

    if user is None:
        dbsession = DBSession()
        # create user
        user = User()
        user.kind = 'twitter'
        user.login = tw_username
        dbsession.add(user)

        # re-request again to correctly read roles
        user = find_twitter_user(tw_username)
        if user is None:
            log.error('Unable to create twitter user')
            return HTTPServerError()

    # save user to the session
    user.detach()
    remember(request, None, user=user)

    return HTTPFound(location=request.GET['pyrone_url'])
Esempio n. 3
0
def restore_backup(request):

    _ = request.translate
    backup_id = request.matchdict['backup_id']

    backups_dir = get_backups_dir()
    filename = b64decode(backup_id).decode('utf-8')
    all_backups = [x for x in os.listdir(backups_dir) if os.path.isfile(os.path.join(backups_dir, x))]

    if filename not in all_backups:
        return {'error': _('Backup file not found')}

    full_filename = os.path.join(backups_dir, filename)

    try:
        z = zipfile.ZipFile(full_filename)
    except zipfile.BadZipfile:
        return {'error': _('Backup file is broken!')}

    # now check zip file contents, first extract file "index.xml"
    try:
        xml_f = z.open('index.xml')
    except KeyError:
        return {'error': _('Backup file is broken, no catalog file inside!')}

    try:
        xmldoc = etree.parse(xml_f)
    except etree.XMLSyntaxError:
        return {'error': _('Backup file is broken, XML catalog is broken!')}

    root = xmldoc.getroot()
    NS = 'http://regolit.com/ns/pyrone/backup/1.0'

    def t(name):
        """
        Convert tag name "name" to full qualified name like "{http://regolit.com/ns/pyrone/backup/1.0}name"
        """
        return '{{{0}}}{1}'.format(NS, name)

    def unt(name):
        """
        Remove namespace
        """
        return name.replace('{{{0}}}'.format(NS), '')

    # now check is backup version supported
    if root.tag != t('backup'):
        return {'error': _('Unknown XML format of catalog file.')}

    backup_version = root.get('version')

    if backup_version not in ('1.0', '1.1'):
        return {'error': _('Unsupported backup version: “{0}”!'.format(root.get('version')))}

    dbsession = DBSession()
    dialect_name = dbsession.bind.name
    # now start to extract all data and fill DB
    # first delete everything from the database
    dbsession.query(Comment).delete()
    dbsession.query(Tag).delete()
    dbsession.query(Article).delete()
    dbsession.query(VerifiedEmail).delete()
    dbsession.query(Role).delete()
    dbsession.query(File).delete()  # also remove files from the storage dir
    dbsession.query(Config).delete()
    dbsession.query(User).delete()
    namespaces = {'b': NS}

    # first restore config
    nodes = xmldoc.xpath('//b:backup/b:settings', namespaces=namespaces)

    if len(nodes) == 0:
        return JSONResponse(httpcode.NotFound, {'error': _('Backup file is broken: settings block not found')})

    node = nodes[0]
    nodes = node.xpath('//b:config', namespaces=namespaces)

    def recursively_restore_commits(tree, root):
        if root not in tree:
            return
        for comment in tree[root]:
            dbsession.add(comment)
        dbsession.flush()
        for comment in tree[root]:
            recursively_restore_commits(tree, comment.id)

    for node in nodes:
        c = dbsession.query(Config).get(node.get('id'))
        if c is None:
            c = Config(node.get('id'), node.text)
            dbsession.add(c)
        else:
            c.value = node.text

    # now restore users
    nodes = xmldoc.xpath('//b:backup/b:users', namespaces=namespaces)
    if len(nodes) == 0:
        return JSONResponse(httpcode.NotFound, {'error': _('Backup file is broken: users block not found')})

    node = nodes[0]
    nodes = node.xpath('./b:user', namespaces=namespaces)

    for node in nodes:
        u = User()
        u.id = int(node.get('id'))

        subnodes = node.xpath('./*', namespaces=namespaces)
        m = {}
        for sn in subnodes:
            m[unt(sn.tag)] = sn.text

        props = {'login': '******', 'password': '******', 'display-name': 'display_name',
                 'email': 'email', 'kind': 'kind'}
        for k, v in props.items():
            if k in m:
                setattr(u, v, m[k])

        dbsession.add(u)

        if backup_version == '1.0':
            # restore permissions now
            permissions_roles_map = {
                'write_article': 'writer',
                'edit_article': 'editor',
                'admin': 'admin',
                'files': 'filemanager'
                }
            subnodes = node.xpath('./b:permissions/b:permission', namespaces=namespaces)
            for sn in subnodes:
                permission_name = sn.text
                if permission_name not in permissions_roles_map:
                    continue

                role_name = permissions_roles_map[permission_name]
                r = Role(None, u.id, role_name)
                dbsession.add(r)
        elif backup_version == '1.1':
            # restore roles directly
            subnodes = node.xpath('./b:roles/b:role', namespaces=namespaces)
            for sn in subnodes:
                r = Role(None, u.id, sn.text)
                dbsession.add(r)

    # restore verified emails
    nodes = xmldoc.xpath('//b:backup/b:verified-emails', namespaces=namespaces)
    if len(nodes) != 0:
        # block is optional
        node = nodes[0]
        nodes = node.xpath('./b:email', namespaces=namespaces)
        for node in nodes:
            vf = VerifiedEmail(node.text)
            vf.last_verify_date = int(node.get('last-verification-date'))
            vf.is_verified = node.get('verified') == 'true'
            vf.verification_code = node.get('verification-code')
            dbsession.add(vf)

    # now restore articles
    nodes = xmldoc.xpath('//b:backup/b:articles', namespaces=namespaces)
    if len(nodes) == 0:
        return JSONResponse(httpcode.NotFound, {'error': _('Backup file is broken: articles block not found')})

    node = nodes[0]
    nodes = node.xpath('./b:article', namespaces=namespaces)

    for node in nodes:
        article = Article()
        article.id = int(node.get('id'))
        article.user_id = int(node.get('user-id'))

        subnodes = node.xpath('./*', namespaces=namespaces)
        m = {}
        for sn in subnodes:
            m[unt(sn.tag)] = sn.text

        props = {'title': 'title', 'body': 'body', 'shortcut': 'shortcut', 'shortcut-date': 'shortcut_date'}
        for k, v in props.items():
            if k in m:
                setattr(article, v, m[k])

        article.set_body(m['body'])

        props = {'published': 'published', 'updated': 'updated'}
        for k, v in props.items():
            if k in m:
                setattr(article, v, int(m[k]))

        props = {'is-commentable': 'is_commentable', 'is-draft': 'is_draft'}

        for k, v in props.items():
            if k in m:
                res = False
                if m[k].lower() == 'true':
                    res = True
                setattr(article, v, res)

        article.comments_total = 0
        article.comments_approved = 0

        # now restore tags
        subnodes = node.xpath('./b:tags/b:tag', namespaces=namespaces)
        tags_set = set()
        for sn in subnodes:
            tags_set.add(sn.text.strip())

        for tag_str in tags_set:
            log.debug('tag: '+tag_str)
            tag = Tag(tag_str, article)
            dbsession.add(tag)

        # now process comments
        # we need to preserve comments hierarchy
        # local_comments = {}  # key is a comment ID, value - comment object
        local_parents = {}  # key is a parent-id, value is a list of child IDs

        subnodes = node.xpath('./b:comments/b:comment', namespaces=namespaces)
        for sn in subnodes:
            comment = Comment()
            comment.article_id = article.id
            comment.id = int(sn.get('id'))
            try:
                comment.parent_id = int(sn.get('parent-id'))
            except KeyError:
                pass
            except TypeError:
                pass

            try:
                comment.user_id = int(sn.get('user-id'))
            except TypeError:
                pass
            except KeyError:
                pass

            subsubnodes = sn.xpath('./*', namespaces=namespaces)
            m = {}
            for sn in subsubnodes:
                m[unt(sn.tag)] = sn.text

            props = {'display-name': 'display_name', 'email': 'email', 'website': 'website',
                     'ip-address': 'ip_address', 'xff-ip-address': 'xff_ip_address'}
            for k, v in props.items():
                if k in m:
                    setattr(comment, v, m[k])

            comment.set_body(m['body'])
            comment.published = int(m['published'])

            props = {'is-approved': 'is_approved', 'is-subscribed': 'is_subscribed'}
            for k, v in props.items():
                if k in m:
                    res = False
                    if m[k].lower() == 'true':
                        res = True
                    setattr(comment, v, res)

            article.comments_total += 1
            if comment.is_approved:
                article.comments_approved += 1

            parent_id = comment.parent_id
            if parent_id not in local_parents:
                local_parents[parent_id] = []
            local_parents[parent_id].append(comment)

        dbsession.add(article)
        dbsession.flush()
        
        recursively_restore_commits(local_parents, None)

    # now process files
    nodes = xmldoc.xpath('//b:backup/b:files', namespaces=namespaces)
    if len(nodes) == 0:
        return JSONResponse(httpcode.NotFound, {'error': _('Backup file is broken: articles block not found')})

    node = nodes[0]
    nodes = node.xpath('./b:file', namespaces=namespaces)

    storage_dirs = get_storage_dirs()
    for node in nodes:
        file = File()
        src = node.get('src')
        # read "name", "dltype", "updated", "content_type"

        subnodes = node.xpath('./*', namespaces=namespaces)
        m = {}
        for sn in subnodes:
            m[unt(sn.tag)] = sn.text

        props = {'name': 'name', 'dltype': 'dltype', 'content-type': 'content_type'}
        for k, v in props.items():
            if k in m:
                setattr(file, v, m[k])

        # check "file.name"
        if file.name == '.' or file.name == '..':
            continue
        if file.name.find('/') != -1 or file.name.find('\\') != -1:
            continue

        if file.dltype not in allowed_dltypes:
            file.dltype = 'auto'

        # extract file from the archive, put to the storage dir, fill attribute "size"
        file_f = z.open(src)
        file_full_path = os.path.join(storage_dirs['orig'], file.name)
        file_out_f = open(file_full_path, 'wb')
        shutil.copyfileobj(file_f, file_out_f)
        file_f.close()
        file_out_f.close()
        file.size = os.path.getsize(file_full_path)

        dbsession.add(file)

    # catch IntegrityError here!
    try:
        transaction.commit()
        
        # reset sequences
        if dialect_name == 'postgresql':
            dbsession.bind.execute(text("SELECT setval('pbarticle_id_seq', (SELECT MAX(id) FROM pbarticle));"))
            dbsession.bind.execute(text("SELECT setval('pbarticlecomment_id_seq', (SELECT MAX(id) FROM pbarticlecomment));"))

    except IntegrityError:
        return JSONResponse(httpcode.BadRequest, {'error': _('Unable to restore backup: database error, maybe your backup file is corrupted')})
    except Exception as e:
        return JSONResponse(httpcode.BadRequest, {'error': _('Unable to restore backup: database error, maybe your backup file is corrupted')})

    # we should also destroy current session (logout)
    forget(request)

    # clear config cache
    cache.clear_cache()

    return {'success': True}
Esempio n. 4
0
def add_article_comment_ajax(request):
    _ = request.translate
    article_id = int(request.matchdict['article_id'])

    dbsession = DBSession()

    q = dbsession.query(Article).filter(Article.id == article_id)
    user = request.user
    if not user.has_role('editor') or not user.has_role('admin'):
        q = q.filter(Article.is_draft==False)
    article = q.first()

    if article is None or not article.is_commentable:
        return HTTPNotFound()

    if 's' not in request.POST:
        return HTTPBadRequest()

    json = {}

    key = request.POST['s']

    # all data elements are constructed from the string "key" as substrings
    body_ind = key[3:14]
    parent_ind = key[4:12]
    display_name_ind = key[0:5]
    email_ind = key[13:25]
    website_ind = key[15:21]
    is_subscribed_ind = key[19:27]

    for ind in (body_ind, parent_ind, display_name_ind, email_ind, website_ind):
        if ind not in request.POST:
            return HTTPBadRequest()

    body = request.POST[body_ind]

    if len(body) == 0:
        return {'error': _('Empty comment body is not allowed.')}

    comment = Comment()
    comment.set_body(body)

    user = request.user

    if user.kind != 'anonymous':
        comment.user_id = user.id
    else:
        # get "email", "display_name" and "website" arguments
        comment.display_name = request.POST[display_name_ind]
        comment.email = request.POST[email_ind]
        comment.website = request.POST[website_ind]

        # remember email, display_name and website in browser cookies
        request.response.set_cookie('comment_display_name', comment.display_name, max_age=31536000)
        request.response.set_cookie('comment_email', comment.email, max_age=31536000)
        request.response.set_cookie('comment_website', comment.website, max_age=31536000)

    # set parent comment
    parent_id = request.POST[parent_ind]
    try:
        parent_id = int(parent_id)
    except ValueError:
        parent_id = None

    if parent_id:
        parent = dbsession.query(Comment).filter(Comment.id == parent_id)\
            .filter(Comment.article_id == article_id).first()
        if parent is not None:
            if not parent.is_approved:
                #
                data = { 'error': _('Answering to not approved comment')}
                return json.dumps(data)

    comment.parent_id = parent_id
    comment.article_id = article_id

    if is_subscribed_ind in request.POST:
        comment.is_subscribed = True

    # this list contains notifications
    ns = []

    # if user has subscribed to answer then check is his/her email verified
    # if doesn't send verification message to the email
    if is_subscribed_ind in request.POST:
        vrf_email = ''
        if user.kind != 'anonymous':
            vrf_email = user.email
        elif request.POST[email_ind]:
            vrf_email = request.POST[email_ind]

        vrf_email = normalize_email(vrf_email)
        if vrf_email:
            # email looks ok so proceed

            send_evn = False

            vf = dbsession.query(VerifiedEmail).filter(VerifiedEmail.email == vrf_email).first()
            vf_token = ''
            if vf is not None:
                if not vf.is_verified:
                    diff = time() - vf.last_verify_date
                    #if diff > 86400:
                    if diff > 1:
                        # delay between verifications requests must be more than 24 hours
                        send_evn = True
                    vf.last_verify_date = time()
                    vf_token = vf.verification_code

            else:
                send_evn = True
                vf = VerifiedEmail(vrf_email)
                vf_token = vf.verification_code
                dbsession.add(vf)

            if send_evn:
                ns.append(notifications.gen_email_verification_notification(request, vrf_email, vf_token))

    request.response.set_cookie('is_subscribed', 'true' if comment.is_subscribed else 'false', max_age=31536000)

    # automatically approve comment if user has role "admin", "writer" or "editor"
    if user.has_role('admin') or user.has_role('writer') \
            or user.has_role('editor'):
        comment.is_approved = True

    # TODO: also automatically approve comment if it's considered as safe:
    # i.e. without hyperlinks, spam etc

    # check how much hyperlinks in the body string
    if len(re.findall('https?://', body, flags=re.IGNORECASE)) <= 1:
        comment.is_approved = True

    # record commenter ip address
    comment.ip_address = request.environ.get('REMOTE_ADDR', 'unknown')
    comment.xff_ip_address = request.environ.get('X_FORWARDED_FOR', None)

    dbsession.add(comment)
    _update_comments_counters(dbsession, article)
    dbsession.flush()
    dbsession.expunge(comment)  # remove object from the session, object state is preserved
    dbsession.expunge(article)
    transaction.commit()  # to delete, probably

    # comment added, now send notifications
    loop_limit = 100
    comment = dbsession.query(Comment).get(comment.id)
    parent = comment.parent
    admin_email = get_config('admin_notifications_email')
    vf_q = dbsession.query(VerifiedEmail)
    notifications_emails = []

    while parent is not None and loop_limit > 0:
        loop_limit -= 1
        c = parent
        parent = c.parent
        # walk up the tree
        if not c.is_subscribed:
            continue
        # find email
        email = None
        if c.user is None:
            email = c.email
        else:
            email = c.user.email

        if email is None or email == admin_email:
            continue

        email = normalize_email(email)

        if email in notifications_emails:
            continue

        vf = vf_q.filter(VerifiedEmail.email == email).first()
        if vf is not None and vf.is_verified:
            # send notification to "email"
            ns.append(notifications.gen_comment_response_notification(request, article, comment, c, email))

    admin_notifications_email = normalize_email(get_config('admin_notifications_email'))

    for nfn in ns:
        if nfn is None:
            continue

        if nfn.to == admin_notifications_email:
            continue
        nfn.send()

    # create special notification for the administrator
    nfn = notifications.gen_new_comment_admin_notification(request, article, comment)
    if nfn is not None:
        nfn.send()

    # cosntruct comment_id
    # we're not using route_url() for the article because stupid Pyramid urlencodes fragments
    comment_url = h.article_url(request, article) + '?commentid=' + str(comment.id)

    # return rendered comment
    data = {
        'body': comment.rendered_body,
        'approved': comment.is_approved,
        'id': comment.id,
        'url': comment_url
        }

    return data
Esempio n. 5
0
def write_article(request):
    _ = request.translate
    c = {
        'new_article': True,
        'submit_url': route_url('blog_write_article', request),
        'errors': {},
        'tags': []
        }

    if request.method == 'GET':
        a = Article('new-article-shortcut', 'New article title')
        c['tags'] = []
        c['article'] = a
        c['article_published_str'] = h.timestamp_to_str(a.published)

    elif request.method == 'POST':
        article = Article()
        e = _check_article_fields(article, request)
        c['errors'].update(e)
        c['article_published_str'] = request.POST['published']

        if 'published' not in request.POST:
            c['errors']['published'] = _('invalid date and time format')
        else:
            # parse value to check structure
            date_re = re.compile('^([0-9]{4})-([0-9]{2})-([0-9]{2}) ([0-9]{2}):([0-9]{2})$')
            mo = date_re.match(request.POST['published'])
            if mo is None:
                c['errors']['published'] = _('invalid date and time format')
            else:
                # we need to convert LOCAL date and time to UTC seconds
                article.published = h.str_to_timestamp(request.POST['published'])
                v = [int(x) for x in mo.groups()[0:3]]
                article.shortcut_date = '{0:04d}/{1:02d}/{2:02d}'.format(*v)

            dbsession = DBSession()
            q = dbsession.query(Article).filter(Article.shortcut_date == article.shortcut_date)\
                .filter(Article.shortcut == article.shortcut)
            res = q.first()

            if res is not None:
                c['errors']['shortcut'] = _('duplicated shortcut')

        # tags
        c['tags'] = []
        if 'tags' in request.POST:
            tags_str = request.POST['tags']
            tags = set([s.strip() for s in tags_str.split(',')])

            for tag_str in tags:
                if tag_str == '':
                    continue
                c['tags'].append(tag_str)

        if len(c['errors']) == 0:
            dbsession = DBSession()

            # save and redirect
            user = request.user
            article.user_id = user.id
            dbsession.add(article)
            dbsession.flush()  # required as we need to obtain article_id

            article_id = article.id

            for tag_str in c['tags']:
                tag = Tag(tag_str, article)
                dbsession.add(tag)

            # force update of tags cloud
            h.get_public_tags_cloud(force_reload=True)

            return HTTPFound(location=route_url('blog_go_article', request, article_id=article_id))

        c['article'] = article

    else:
        return HTTPBadRequest()

    return c
Esempio n. 6
0
def _update_article(article_id, request):
    _ = request.translate

    dbsession = DBSession()

    article = dbsession.query(Article).get(article_id)
    if article is None:
        return HTTPNotFound()

    # check fields etc
    e = _check_article_fields(article, request)
    c = {'errors': {}}
    c['article'] = article
    c['errors'].update(e)
    c['article_published_str'] = request.POST['published']

    if 'published' not in request.POST:
        c['errors']['published'] = _('invalid date and time format')
    else:
        # parse value to check structure
        date_re = re.compile('^([0-9]{4})-([0-9]{2})-([0-9]{2}) ([0-9]{2}):([0-9]{2})$')
        mo = date_re.match(request.POST['published'])
        if mo is None:
            c['errors']['published'] = _('invalid date and time format')
        else:
            # we need to convert LOCAL date and time to UTC seconds
            article.published = h.str_to_timestamp(request.POST['published'])
            v = [int(x) for x in mo.groups()[0:3]]
            article.shortcut_date = '{0:04d}/{1:02d}/{2:02d}'.format(*v)

        dbsession = DBSession()
        q = dbsession.query(Article).filter(Article.shortcut_date == article.shortcut_date)\
            .filter(Article.id != article_id)\
            .filter(Article.shortcut == article.shortcut)
        res = q.first()

        if res is not None:
            c['errors']['shortcut'] = _('duplicated shortcut')

    # tags
    c['tags'] = []  # these are new tags
    if 'tags' in request.POST:
        tags_str = request.POST['tags']
        tags = set([s.strip() for s in tags_str.split(',')])

        for tag_str in tags:
            if tag_str == '':
                continue
            c['tags'].append(tag_str)

    if len(c['errors']) == 0:
        for tag in article.tags:
            dbsession.delete(tag)

        for tag_str in c['tags']:
            tag = Tag(tag_str, article)
            dbsession.add(tag)

        # force update of tags cloud
        h.get_public_tags_cloud(force_reload=True)

        return HTTPFound(location=route_url('blog_go_article', request, article_id=article_id))
    else:
        transaction.abort()

    return c