def login_twitter_finish(request): """ Finish twitter authentication """ consumer_key = str(get_config('tw_consumer_key')) consumer_secret = str(get_config('tw_consumer_secret')) token = request.session.get('twitter_request_token') twitter = Twitter(auth=OAuth(token[0], token[1], consumer_key, consumer_secret), format='', api_version=None) verifier = request.GET.get('oauth_verifier') try: oauth_resp = twitter.oauth.access_token(oauth_verifier=verifier) except TwitterHTTPError as e: log.error('Invalid "access_token" request: {0}'.format(str(e))) return HTTPNotFound() oauth_resp_data = dict(urllib.parse.parse_qsl(oauth_resp)) # typical response: # {'user_id': '128607225', 'oauth_token_secret': 'NaGQrWyNRtHHHbvm3tNI0tcr2KTBUEY0J3ng8d7KFXg', 'screen_name': 'otmenych', 'oauth_token': '128607225-NWzT8YL1Wt6qNzMLzmaCEWOxqFtrEI1pjlA8c5FK'} tw_username = oauth_resp_data['screen_name'] user = find_twitter_user(tw_username) if user is None: dbsession = DBSession() # create user user = User() user.kind = 'twitter' user.login = tw_username dbsession.add(user) # re-request again to correctly read roles user = find_twitter_user(tw_username) if user is None: log.error('Unable to create twitter user') return HTTPServerError() # save user to the session user.detach() remember(request, None, user=user) return HTTPFound(location=request.GET['pyrone_url'])
def restore_backup(request): _ = request.translate backup_id = request.matchdict['backup_id'] backups_dir = get_backups_dir() filename = b64decode(backup_id).decode('utf-8') all_backups = [x for x in os.listdir(backups_dir) if os.path.isfile(os.path.join(backups_dir, x))] if filename not in all_backups: return {'error': _('Backup file not found')} full_filename = os.path.join(backups_dir, filename) try: z = zipfile.ZipFile(full_filename) except zipfile.BadZipfile: return {'error': _('Backup file is broken!')} # now check zip file contents, first extract file "index.xml" try: xml_f = z.open('index.xml') except KeyError: return {'error': _('Backup file is broken, no catalog file inside!')} try: xmldoc = etree.parse(xml_f) except etree.XMLSyntaxError: return {'error': _('Backup file is broken, XML catalog is broken!')} root = xmldoc.getroot() NS = 'http://regolit.com/ns/pyrone/backup/1.0' def t(name): """ Convert tag name "name" to full qualified name like "{http://regolit.com/ns/pyrone/backup/1.0}name" """ return '{{{0}}}{1}'.format(NS, name) def unt(name): """ Remove namespace """ return name.replace('{{{0}}}'.format(NS), '') # now check is backup version supported if root.tag != t('backup'): return {'error': _('Unknown XML format of catalog file.')} backup_version = root.get('version') if backup_version not in ('1.0', '1.1'): return {'error': _('Unsupported backup version: “{0}”!'.format(root.get('version')))} dbsession = DBSession() dialect_name = dbsession.bind.name # now start to extract all data and fill DB # first delete everything from the database dbsession.query(Comment).delete() dbsession.query(Tag).delete() dbsession.query(Article).delete() dbsession.query(VerifiedEmail).delete() dbsession.query(Role).delete() dbsession.query(File).delete() # also remove files from the storage dir dbsession.query(Config).delete() dbsession.query(User).delete() namespaces = {'b': NS} # first restore config nodes = xmldoc.xpath('//b:backup/b:settings', namespaces=namespaces) if len(nodes) == 0: return JSONResponse(httpcode.NotFound, {'error': _('Backup file is broken: settings block not found')}) node = nodes[0] nodes = node.xpath('//b:config', namespaces=namespaces) def recursively_restore_commits(tree, root): if root not in tree: return for comment in tree[root]: dbsession.add(comment) dbsession.flush() for comment in tree[root]: recursively_restore_commits(tree, comment.id) for node in nodes: c = dbsession.query(Config).get(node.get('id')) if c is None: c = Config(node.get('id'), node.text) dbsession.add(c) else: c.value = node.text # now restore users nodes = xmldoc.xpath('//b:backup/b:users', namespaces=namespaces) if len(nodes) == 0: return JSONResponse(httpcode.NotFound, {'error': _('Backup file is broken: users block not found')}) node = nodes[0] nodes = node.xpath('./b:user', namespaces=namespaces) for node in nodes: u = User() u.id = int(node.get('id')) subnodes = node.xpath('./*', namespaces=namespaces) m = {} for sn in subnodes: m[unt(sn.tag)] = sn.text props = {'login': '******', 'password': '******', 'display-name': 'display_name', 'email': 'email', 'kind': 'kind'} for k, v in props.items(): if k in m: setattr(u, v, m[k]) dbsession.add(u) if backup_version == '1.0': # restore permissions now permissions_roles_map = { 'write_article': 'writer', 'edit_article': 'editor', 'admin': 'admin', 'files': 'filemanager' } subnodes = node.xpath('./b:permissions/b:permission', namespaces=namespaces) for sn in subnodes: permission_name = sn.text if permission_name not in permissions_roles_map: continue role_name = permissions_roles_map[permission_name] r = Role(None, u.id, role_name) dbsession.add(r) elif backup_version == '1.1': # restore roles directly subnodes = node.xpath('./b:roles/b:role', namespaces=namespaces) for sn in subnodes: r = Role(None, u.id, sn.text) dbsession.add(r) # restore verified emails nodes = xmldoc.xpath('//b:backup/b:verified-emails', namespaces=namespaces) if len(nodes) != 0: # block is optional node = nodes[0] nodes = node.xpath('./b:email', namespaces=namespaces) for node in nodes: vf = VerifiedEmail(node.text) vf.last_verify_date = int(node.get('last-verification-date')) vf.is_verified = node.get('verified') == 'true' vf.verification_code = node.get('verification-code') dbsession.add(vf) # now restore articles nodes = xmldoc.xpath('//b:backup/b:articles', namespaces=namespaces) if len(nodes) == 0: return JSONResponse(httpcode.NotFound, {'error': _('Backup file is broken: articles block not found')}) node = nodes[0] nodes = node.xpath('./b:article', namespaces=namespaces) for node in nodes: article = Article() article.id = int(node.get('id')) article.user_id = int(node.get('user-id')) subnodes = node.xpath('./*', namespaces=namespaces) m = {} for sn in subnodes: m[unt(sn.tag)] = sn.text props = {'title': 'title', 'body': 'body', 'shortcut': 'shortcut', 'shortcut-date': 'shortcut_date'} for k, v in props.items(): if k in m: setattr(article, v, m[k]) article.set_body(m['body']) props = {'published': 'published', 'updated': 'updated'} for k, v in props.items(): if k in m: setattr(article, v, int(m[k])) props = {'is-commentable': 'is_commentable', 'is-draft': 'is_draft'} for k, v in props.items(): if k in m: res = False if m[k].lower() == 'true': res = True setattr(article, v, res) article.comments_total = 0 article.comments_approved = 0 # now restore tags subnodes = node.xpath('./b:tags/b:tag', namespaces=namespaces) tags_set = set() for sn in subnodes: tags_set.add(sn.text.strip()) for tag_str in tags_set: log.debug('tag: '+tag_str) tag = Tag(tag_str, article) dbsession.add(tag) # now process comments # we need to preserve comments hierarchy # local_comments = {} # key is a comment ID, value - comment object local_parents = {} # key is a parent-id, value is a list of child IDs subnodes = node.xpath('./b:comments/b:comment', namespaces=namespaces) for sn in subnodes: comment = Comment() comment.article_id = article.id comment.id = int(sn.get('id')) try: comment.parent_id = int(sn.get('parent-id')) except KeyError: pass except TypeError: pass try: comment.user_id = int(sn.get('user-id')) except TypeError: pass except KeyError: pass subsubnodes = sn.xpath('./*', namespaces=namespaces) m = {} for sn in subsubnodes: m[unt(sn.tag)] = sn.text props = {'display-name': 'display_name', 'email': 'email', 'website': 'website', 'ip-address': 'ip_address', 'xff-ip-address': 'xff_ip_address'} for k, v in props.items(): if k in m: setattr(comment, v, m[k]) comment.set_body(m['body']) comment.published = int(m['published']) props = {'is-approved': 'is_approved', 'is-subscribed': 'is_subscribed'} for k, v in props.items(): if k in m: res = False if m[k].lower() == 'true': res = True setattr(comment, v, res) article.comments_total += 1 if comment.is_approved: article.comments_approved += 1 parent_id = comment.parent_id if parent_id not in local_parents: local_parents[parent_id] = [] local_parents[parent_id].append(comment) dbsession.add(article) dbsession.flush() recursively_restore_commits(local_parents, None) # now process files nodes = xmldoc.xpath('//b:backup/b:files', namespaces=namespaces) if len(nodes) == 0: return JSONResponse(httpcode.NotFound, {'error': _('Backup file is broken: articles block not found')}) node = nodes[0] nodes = node.xpath('./b:file', namespaces=namespaces) storage_dirs = get_storage_dirs() for node in nodes: file = File() src = node.get('src') # read "name", "dltype", "updated", "content_type" subnodes = node.xpath('./*', namespaces=namespaces) m = {} for sn in subnodes: m[unt(sn.tag)] = sn.text props = {'name': 'name', 'dltype': 'dltype', 'content-type': 'content_type'} for k, v in props.items(): if k in m: setattr(file, v, m[k]) # check "file.name" if file.name == '.' or file.name == '..': continue if file.name.find('/') != -1 or file.name.find('\\') != -1: continue if file.dltype not in allowed_dltypes: file.dltype = 'auto' # extract file from the archive, put to the storage dir, fill attribute "size" file_f = z.open(src) file_full_path = os.path.join(storage_dirs['orig'], file.name) file_out_f = open(file_full_path, 'wb') shutil.copyfileobj(file_f, file_out_f) file_f.close() file_out_f.close() file.size = os.path.getsize(file_full_path) dbsession.add(file) # catch IntegrityError here! try: transaction.commit() # reset sequences if dialect_name == 'postgresql': dbsession.bind.execute(text("SELECT setval('pbarticle_id_seq', (SELECT MAX(id) FROM pbarticle));")) dbsession.bind.execute(text("SELECT setval('pbarticlecomment_id_seq', (SELECT MAX(id) FROM pbarticlecomment));")) except IntegrityError: return JSONResponse(httpcode.BadRequest, {'error': _('Unable to restore backup: database error, maybe your backup file is corrupted')}) except Exception as e: return JSONResponse(httpcode.BadRequest, {'error': _('Unable to restore backup: database error, maybe your backup file is corrupted')}) # we should also destroy current session (logout) forget(request) # clear config cache cache.clear_cache() return {'success': True}