def lookup(): msgs = [] flagged = False for fn in os.listdir(g.loc): if fn == '_FLAG': flagged = True continue if fn.startswith('reply-'): msgs.append(dict( id=fn, date=str( datetime.fromtimestamp( os.stat(store.path(g.sid, fn)).st_mtime)), msg=crypto_util.decrypt( g.sid, g.codename, file(store.path(g.sid, fn)).read()) )) if flagged: session['flagged'] = True def async_genkey(sid, codename): with app.app_context(): background.execute(lambda: crypto_util.genkeypair(sid, codename)) # Generate a keypair to encrypt replies from the journalist # Only do this if the journalist has flagged the source as one # that they would like to reply to. (Issue #140.) if not crypto_util.getkey(g.sid) and flagged: async_genkey(g.sid, g.codename) return render_template( 'lookup.html', codename=g.codename, msgs=msgs, flagged=flagged, haskey=crypto_util.getkey(g.sid))
def lookup(): replies = [] for fn in os.listdir(g.loc): if fn.startswith('reply-'): try: msg = crypto_util.decrypt(g.sid, g.codename, file(store.path(g.sid, fn)).read()).decode("utf-8") except UnicodeDecodeError: app.logger.error("Could not decode reply %s" % fn) else: date = str(datetime.fromtimestamp( os.stat(store.path(g.sid, fn)).st_mtime)) replies.append(dict(id=fn, date=date, msg=msg)) def async_genkey(sid, codename): with app.app_context(): background.execute(lambda: crypto_util.genkeypair(sid, codename)) # Generate a keypair to encrypt replies from the journalist # Only do this if the journalist has flagged the source as one # that they would like to reply to. (Issue #140.) if not crypto_util.getkey(g.sid) and g.source.flagged: async_genkey(g.sid, g.codename) return render_template('lookup.html', codename=g.codename, msgs=replies, flagged=g.source.flagged, haskey=crypto_util.getkey(g.sid))
def create(): sid = crypto_util.hash_codename(session['codename']) if os.path.exists(store.path(sid)): # if this happens, we're not using very secure crypto app.logger.warning("Got a duplicate ID '%s'" % sid) else: os.mkdir(store.path(sid)) session['logged_in'] = True session['flagged'] = False return redirect(url_for('lookup'))
def POST(self): i = web.input('id', fh={}, msg=None, mid=None, action=None) sid = crypto.shash(i.id) if os.path.exists(store.path(sid)): # if this happens, we're not using very secure crypto store.log('Got a duplicate ID.') else: os.mkdir(store.path(sid)) return store_endpoint(i)
def create(): sid = crypto_util.hash_codename(session['codename']) if os.path.exists(store.path(sid)): # if this happens, we're not using very secure crypto log.warning("Got a duplicate ID '%s'" % sid) else: os.mkdir(store.path(sid)) session['logged_in'] = True session['flagged'] = False return redirect(url_for('lookup'))
def create(): sid = crypto_util.shash(session["codename"]) if os.path.exists(store.path(sid)): # if this happens, we're not using very secure crypto store.log("Got a duplicate ID '%s'" % sid) else: os.mkdir(store.path(sid)) session["logged_in"] = True session["flagged"] = False return redirect(url_for("lookup"))
def POST(self): iid = crypto.genrandomid() if os.path.exists(store.path(crypto.shash(iid))): # if this happens, we're not using very secure crypto store.log('Got a duplicate ID.') else: os.mkdir(store.path(crypto.shash(iid))) web.header('Cache-Control', 'no-cache, no-store, must-revalidate') web.header('Pragma', 'no-cache') web.header('Expires', '-1') return render.generate(iid)
def get_docs(sid): """Get docs associated with source id `sid`, sorted by submission date""" docs = [] for filename in os.listdir(store.path(sid)): os_stat = os.stat(store.path(sid, filename)) docs.append(dict( name=filename, date=str(datetime.fromtimestamp(os_stat.st_mtime)), size=os_stat.st_size, )) # sort by date since ordering by filename is meaningless docs.sort(key=lambda x: x['date']) return docs
def get_docs(sid): """Get docs associated with source id `sid` sorted by submission date""" docs = [] flagged = False for filename in os.listdir(store.path(sid)): if filename == "_FLAG": flagged = True continue os_stat = os.stat(store.path(sid, filename)) docs.append(dict(name=filename, date=str(datetime.fromtimestamp(os_stat.st_mtime)), size=os_stat.st_size)) # sort by date since ordering by filename is meaningless docs.sort(key=lambda x: x["date"]) return docs, flagged
def create(): sid = crypto_util.hash_codename(session['codename']) source = Source(sid, crypto_util.display_id()) db_session.add(source) db_session.commit() if os.path.exists(store.path(sid)): # if this happens, we're not using very secure crypto log.warning("Got a duplicate ID '%s'" % sid) else: os.mkdir(store.path(sid)) session['logged_in'] = True return redirect(url_for('lookup'))
def POST(self): i = web.input('id', fh={}, msg=None, mid=None, action=None) sid = crypto.shash(i.id) loc = store.path(sid) if not os.path.exists(loc): raise web.notfound() received = False if i.action == 'upload': if i.msg: loc1 = store.path(sid, '%s_msg.gpg' % time.time()) crypto.encrypt(config.JOURNALIST_KEY, i.msg, loc1) received = 2 if i.fh.value: # we put two zeroes here so that we don't save a file # with the same name as the message loc2 = store.path(sid, '%s_doc.gpg' % time.time()) crypto.encrypt(config.JOURNALIST_KEY, i.fh.file, loc2, fn=i.fh.filename) received = i.fh.filename or '[unnamed]' if not crypto.getkey(sid): background.execute(lambda: crypto.genkeypair(sid, i.id)) elif i.action == 'delete': potential_files = os.listdir(loc) if i.mid not in potential_files: raise web.notfound() assert '/' not in i.mid crypto.secureunlink(store.path(sid, i.mid)) msgs = [] for fn in os.listdir(loc): if fn.startswith('reply-'): msgs.append( web.storage(id=fn, date=datetime.datetime.fromtimestamp( float(store.cleanname(fn))), msg=crypto.decrypt( sid, i.id, file(store.path(sid, fn)).read()))) web.header('Cache-Control', 'no-cache, no-store, must-revalidate') web.header('Pragma', 'no-cache') web.header('Expires', '-1') return render.lookup(i.id, msgs, received=received)
def test_delete_collections(self, async_genkey): """Test the "delete selected" checkboxes on the index page that can be used to delete multiple collections""" # first, add some sources num_sources = 2 for i in range(num_sources): self.source_app.get('/generate') self.source_app.post('/create') self.source_app.post('/submit', data=dict( msg="This is a test " + str(i) + ".", fh=(StringIO(''), ''), ), follow_redirects=True) self.source_app.get('/logout') resp = self.journalist_app.get('/') # get all the checkbox values soup = BeautifulSoup(resp.data, 'html.parser') checkbox_values = [checkbox['value'] for checkbox in soup.select('input[name="cols_selected"]')] resp = self.journalist_app.post('/col/process', data=dict( action='delete', cols_selected=checkbox_values ), follow_redirects=True) self.assertEqual(resp.status_code, 200) self.assertIn("%s collections deleted" % (num_sources,), resp.data) self.assertTrue(async_genkey.called) # Make sure the collections are deleted from the filesystem utils.async.wait_for_assertion(lambda: self.assertFalse( any([os.path.exists(store.path(filesystem_id)) for filesystem_id in checkbox_values])))
def GET(self): dirs = os.listdir(config.STORE_DIR) cols = [] for d in dirs: if not os.listdir(store.path(d)): continue cols.append(web.storage(name=d, codename=crypto.displayid(d), date= str(datetime.datetime.fromtimestamp( os.stat(store.path(d)).st_mtime )).split('.')[0] )) cols.sort(lambda x,y: cmp(x.date, y.date), reverse=True) web.header('Cache-Control', 'no-cache, no-store, must-revalidate') web.header('Pragma', 'no-cache') web.header('Expires', '-1') return render.index(cols)
def test_delete_collections(self): """Test the "delete selected" checkboxes on the index page that can be used to delete multiple collections""" # first, add some sources num_sources = 2 for i in range(num_sources): self.source_app.get('/generate') self.source_app.post('/create') self.source_app.post('/submit', data=dict( msg="This is a test " + str(i) + ".", fh=(StringIO(''), ''), ), follow_redirects=True) self.source_app.get('/logout') resp = self.journalist_app.get('/') # get all the checkbox values soup = BeautifulSoup(resp.data) checkbox_values = [ checkbox['value'] for checkbox in soup.select('input[name="cols_selected"]') ] resp = self.journalist_app.post('/col/process', data=dict( action='delete', cols_selected=checkbox_values), follow_redirects=True) self.assertEqual(resp.status_code, 200) self.assertIn("%s collections deleted" % (num_sources, ), resp.data) # Make sure the collections are deleted from the filesystem utils. async .wait_for_assertion(lambda: self.assertFalse( any([os.path.exists(store.path(sid)) for sid in checkbox_values])))
def helper_filenames_delete(self, soup, i): filesystem_id = soup.select('input[name="filesystem_id"]')[0]['value'] checkbox_values = [ soup.select('input[name="doc_names_selected"]')[i]['value']] # delete resp = self.journalist_app.post('/bulk', data=dict( filesystem_id=filesystem_id, action='confirm_delete', doc_names_selected=checkbox_values ), follow_redirects=True) self.assertEqual(resp.status_code, 200) self.assertIn( "The following file has been selected for" " <strong>permanent deletion</strong>", resp.data) # confirm delete resp = self.journalist_app.post('/bulk', data=dict( filesystem_id=filesystem_id, action='delete', doc_names_selected=checkbox_values ), follow_redirects=True) self.assertEqual(resp.status_code, 200) self.assertIn("Submission deleted.", resp.data) # Make sure the files were deleted from the filesystem utils.async.wait_for_assertion(lambda: self.assertFalse( any([os.path.exists(store.path(filesystem_id, doc_name)) for doc_name in checkbox_values])))
def reply(journalist, source, num_replies): """Generates and submits *num_replies* replies to *source* from *journalist*. Returns reply objects as a list. :param db.Journalist journalist: The journalist to write the reply from. :param db.Source source: The source to send the reply to. :param int num_replies: Number of random-data replies to make. :returns: A list of the :class:`db.Reply`s submitted. """ assert num_replies >= 1 replies = [] for _ in range(num_replies): source.interaction_count += 1 fname = "{}-{}-reply.gpg".format(source.interaction_count, source.journalist_filename) crypto_util.encrypt( str(os.urandom(1)), [crypto_util.getkey(source.filesystem_id), config.JOURNALIST_KEY], store.path(source.filesystem_id, fname)) reply = db.Reply(journalist, source, fname) replies.append(reply) db.db_session.add(reply) db.db_session.commit() return replies
def test_delete_collection(self): """Test the "delete collection" button on each collection page""" # first, add a source self.source_app.get('/generate') self.source_app.post('/create') self.source_app.post('/submit', data=dict( msg="This is a test.", fh=(StringIO(''), ''), ), follow_redirects=True) resp = self.journalist_app.get('/') # navigate to the collection page soup = BeautifulSoup(resp.data) first_col_url = soup.select('ul#cols > li a')[0]['href'] resp = self.journalist_app.get(first_col_url) self.assertEqual(resp.status_code, 200) # find the delete form and extract the post parameters soup = BeautifulSoup(resp.data) delete_form_inputs = soup.select('form#delete_collection')[0]('input') sid = delete_form_inputs[1]['value'] col_name = delete_form_inputs[2]['value'] resp = self.journalist_app.post('/col/delete/' + sid, follow_redirects=True) self.assertEquals(resp.status_code, 200) self.assertIn(escape("%s's collection deleted" % (col_name, )), resp.data) self.assertIn("No documents have been submitted!", resp.data) # Make sure the collection is deleted from the filesystem utils. async .wait_for_assertion( lambda: self.assertFalse(os.path.exists(store.path(sid))))
def lookup(): replies = [] for reply in g.source.replies: reply_path = store.path(g.filesystem_id, reply.filename) try: reply.decrypted = crypto_util.decrypt( g.codename, open(reply_path).read()).decode('utf-8') except UnicodeDecodeError: app.logger.error("Could not decode reply %s" % reply.filename) else: reply.date = datetime.utcfromtimestamp( os.stat(reply_path).st_mtime) replies.append(reply) # Sort the replies by date replies.sort(key=operator.attrgetter('date'), reverse=True) # Generate a keypair to encrypt replies from the journalist # Only do this if the journalist has flagged the source as one # that they would like to reply to. (Issue #140.) if not crypto_util.getkey(g.filesystem_id) and g.source.flagged: async_genkey(g.filesystem_id, g.codename) return render_template('lookup.html', codename=g.codename, replies=replies, flagged=g.source.flagged, haskey=crypto_util.getkey(g.filesystem_id))
def GET(self, sid): fns = os.listdir(store.path(sid)) docs = [] for f in fns: docs.append(web.storage( name=f, date=str(datetime.datetime.fromtimestamp(os.stat(store.path(sid, f)).st_mtime)) )) docs.sort(lambda x,y: cmp(x.date, y.date)) haskey = bool(crypto.getkey(sid)) web.header('Cache-Control', 'no-cache, no-store, must-revalidate') web.header('Pragma', 'no-cache') web.header('Expires', '-1') return render.col(docs, sid, haskey, codename=crypto.displayid(sid))
def helper_filenames_delete(self, soup, i): sid = soup.select('input[name="sid"]')[0]['value'] checkbox_values = [ soup.select('input[name="doc_names_selected"]')[i]['value'] ] # delete resp = self.journalist_app.post( '/bulk', data=dict(sid=sid, action='confirm_delete', doc_names_selected=checkbox_values), follow_redirects=True) self.assertEqual(resp.status_code, 200) self.assertIn( "The following file has been selected for <strong>permanent deletion</strong>", resp.data) # confirm delete resp = self.journalist_app.post( '/bulk', data=dict(sid=sid, action='delete', doc_names_selected=checkbox_values), follow_redirects=True) self.assertEqual(resp.status_code, 200) self.assertIn("Submission deleted.", resp.data) # Make sure the files were deleted from the filesystem utils. async .wait_for_assertion(lambda: self.assertFalse( any([ os.path.exists(store.path(sid, doc_name)) for doc_name in checkbox_values ])))
def bulk_download(sid, docs_selected): source = get_source(sid) filenames = [store.path(sid, doc['name']) for doc in docs_selected] zip = store.get_bulk_archive(filenames) return send_file(zip.name, mimetype="application/zip", attachment_filename=source.journalist_designation + ".zip", as_attachment=True)
def reply(journalist, source, num_replies): """Generates and submits *num_replies* replies to *source* from *journalist*. Returns reply objects as a list. :param db.Journalist journalist: The journalist to write the reply from. :param db.Source source: The source to send the reply to. :param int num_replies: Number of random-data replies to make. :returns: A list of the :class:`db.Reply`s submitted. """ assert num_replies >= 1 replies = [] for _ in range(num_replies): source.interaction_count += 1 fname = "{}-{}-reply.gpg".format(source.interaction_count, source.journalist_filename) crypto_util.encrypt(str(os.urandom(1)), [ crypto_util.getkey(source.filesystem_id), config.JOURNALIST_KEY ], store.path(source.filesystem_id, fname)) reply = db.Reply(journalist, source, fname) replies.append(reply) db.db_session.add(reply) db.db_session.commit() return replies
def test_delete_collection(self): """Test the "delete collection" button on each collection page""" # first, add a source self.source_app.get('/generate') self.source_app.post('/create') self.source_app.post('/submit', data=dict( msg="This is a test.", fh=(StringIO(''), ''), ), follow_redirects=True) rv = self.journalist_app.get('/') # navigate to the collection page soup = BeautifulSoup(rv.data) first_col_url = soup.select('ul#cols > li a')[0]['href'] rv = self.journalist_app.get(first_col_url) self.assertEqual(rv.status_code, 200) # find the delete form and extract the post parameters soup = BeautifulSoup(rv.data) delete_form_inputs = soup.select('form#delete_collection')[0]('input') sid = delete_form_inputs[1]['value'] col_name = delete_form_inputs[2]['value'] rv = self.journalist_app.post('/col/delete/' + sid, follow_redirects=True) self.assertEquals(rv.status_code, 200) self.assertIn(escape("%s's collection deleted" % (col_name,)), rv.data) self.assertIn("No documents have been submitted!", rv.data) # Make sure the collection is deleted from the filesystem self._wait_for( lambda: self.assertFalse(os.path.exists(store.path(sid))) )
def GET(self, sid, fn): web.header('Content-Disposition', 'attachment; filename="' + crypto.displayid(sid).replace(' ', '_') + '_' + fn + '"') web.header('Cache-Control', 'no-cache, no-store, must-revalidate') web.header('Pragma', 'no-cache') web.header('Expires', '-1') return file(store.path(sid, fn)).read()
def test_encrypt_failure(self): source, _ = utils.db_helper.init_source() with self.assertRaisesRegexp(crypto_util.CryptoException, 'no terminal at all requested'): crypto_util.encrypt( str(os.urandom(1)), [], store.path(source.filesystem_id, 'other.gpg'))
def test_path_returns_filename_of_folder(self): """store.path is called in this way in journalist.delete_collection""" filesystem_id = 'example' generated_absolute_path = store.path(filesystem_id) expected_absolute_path = os.path.join(config.STORE_DIR, filesystem_id) self.assertEquals(generated_absolute_path, expected_absolute_path)
def bulk_delete(sid, items_selected): for item in items_selected: item_path = store.path(sid, item.filename) worker.enqueue(store.secure_unlink, item_path) db_session.delete(item) db_session.commit() flash("Submission{} deleted.".format("s" if len(items_selected) > 1 else ""), "notification") return redirect(url_for('col', sid=sid))
def GET(self, sid, fn): web.header( 'Content-Disposition', 'attachment; filename="' + crypto.displayid(sid).replace(' ', '_') + '_' + fn + '"') web.header('Cache-Control', 'no-cache, no-store, must-revalidate') web.header('Pragma', 'no-cache') web.header('Expires', '-1') return file(store.path(sid, fn)).read()
def doc(sid, fn): if '..' in fn or fn.startswith('/'): abort(404) try: Submission.query.filter(Submission.filename == fn).one().downloaded = True except NoResultFound as e: app.logger.error("Could not mark " + fn + " as downloaded: %s" % (e,)) db_session.commit() return send_file(store.path(sid, fn), mimetype="application/pgp-encrypted")
def delete(): query = Reply.query.filter(Reply.filename == request.form['reply_filename']) reply = get_one_or_else(query, app.logger, abort) store.secure_unlink(store.path(g.sid, reply.filename)) db_session.delete(reply) db_session.commit() flash("Reply deleted", "notification") return redirect(url_for('lookup'))
def doc(sid, fn): if ".." in fn or fn.startswith("/"): abort(404) try: Submission.query.filter(Submission.filename == fn).one().downloaded = True except NoResultFound as e: app.logger.error("Could not mark " + fn + " as downloaded: %s" % (e,)) db_session.commit() return send_file(store.path(sid, fn), mimetype="application/pgp-encrypted")
def reply(): sid, msg_candidate = request.form["sid"], request.form["msg"] try: msg = msg_candidate.decode() except (UnicodeDecodeError, UnicodeEncodeError): flash("You have entered text that we could not parse. Please try again.", "notification") return render_template("col.html", sid=sid, codename=db.display_id(sid, db.sqlalchemy_handle())) crypto_util.encrypt(crypto_util.getkey(sid), msg, output=store.path(sid, "reply-%s.gpg" % uuid.uuid4())) return render_template("reply.html", sid=sid, codename=db.display_id(sid, db.sqlalchemy_handle()))
def bulk_download(sid, docs_selected): filenames = [store.path(sid, doc["name"]) for doc in docs_selected] zip = store.get_bulk_archive(filenames) return send_file( zip.name, mimetype="application/zip", attachment_filename=db.display_id(sid, db.sqlalchemy_handle()) + ".zip", as_attachment=True, )
def get_docs(sid): """Get docs associated with source id `sid` sorted by submission date""" docs = [] flagged = False for filename in os.listdir(store.path(sid)): if filename == '_FLAG': flagged = True continue os_stat = os.stat(store.path(sid, filename)) docs.append( dict( name=filename, date=str(datetime.fromtimestamp(os_stat.st_mtime)), size=os_stat.st_size, )) # sort by date since ordering by filename is meaningless docs.sort(key=lambda x: x['date']) return docs, flagged
def bulk_download(sid, docs_selected): filenames = [store.path(sid, doc['name']) for doc in docs_selected] zip = store.get_bulk_archive(filenames) return send_file( zip.name, mimetype="application/zip", attachment_filename=db.display_id(sid, db.sqlalchemy_handle()) + ".zip", as_attachment=True)
def bulk_delete(sid, items_selected): for item in items_selected: item_path = store.path(sid, item.filename) worker.enqueue(store.secure_unlink, item_path) db_session.delete(item) db_session.commit() flash("Submission{} deleted.".format("s" if len(items_selected) > 1 else ""), "notification") return redirect(url_for("col", sid=sid))
def bulk_delete(sid, docs_selected): confirm_delete = bool(request.form.get('confirm_delete', False)) if confirm_delete: for doc in docs_selected: fn = store.path(sid, doc['name']) store.secure_unlink(fn) return render_template( 'delete.html', sid=sid, codename=db.display_id(sid, db.sqlalchemy_handle()), docs_selected=docs_selected, confirm_delete=confirm_delete)
def test_path_returns_filename_of_items_within_folder(self): """store.path is called in this way in journalist.bulk_delete""" filesystem_id = 'example' item_filename = '1-quintuple_cant-msg.gpg' generated_absolute_path = store.path(filesystem_id, item_filename) expected_absolute_path = os.path.join(config.STORE_DIR, filesystem_id, item_filename) self.assertEquals(generated_absolute_path, expected_absolute_path)
def POST(self): i = web.input('id', fh={}, msg=None, mid=None, action=None) sid = crypto.shash(i.id) loc = store.path(sid) if not os.path.exists(loc): raise web.notfound() received = False if i.action == 'upload': if i.msg: loc1 = store.path(sid, '%s_msg.gpg' % time.time()) crypto.encrypt(config.JOURNALIST_KEY, i.msg, loc1) received = 2 if i.fh.value: # we put two zeroes here so that we don't save a file # with the same name as the message loc2 = store.path(sid, '%s_doc.gpg' % time.time()) crypto.encrypt(config.JOURNALIST_KEY, i.fh.file, loc2, fn=i.fh.filename) received = i.fh.filename or '[unnamed]' if not crypto.getkey(sid): background.execute(lambda: crypto.genkeypair(sid, i.id)) elif i.action == 'delete': potential_files = os.listdir(loc) if i.mid not in potential_files: raise web.notfound() assert '/' not in i.mid crypto.secureunlink(store.path(sid, i.mid)) msgs = [] for fn in os.listdir(loc): if fn.startswith('reply-'): msgs.append(web.storage( id=fn, date=datetime.datetime.fromtimestamp(float(store.cleanname(fn))), msg=crypto.decrypt(sid, i.id, file(store.path(sid, fn)).read()) )) web.header('Cache-Control', 'no-cache, no-store, must-revalidate') web.header('Pragma', 'no-cache') web.header('Expires', '-1') return render.lookup(i.id, msgs, received=received)
def POST(self): i = web.input('sid', 'msg') crypto.encrypt(crypto.getkey(i.sid), i.msg, output=store.path(i.sid, 'reply-%s.gpg' % time.time())) web.header('Cache-Control', 'no-cache, no-store, must-revalidate') web.header('Pragma', 'no-cache') web.header('Expires', '-1') return render.reply(i.sid)
def POST(self): i = web.input('sid', 'msg') crypto.encrypt(crypto.getkey(i.sid), i.msg, output= store.path(i.sid, 'reply-%s.gpg' % time.time()) ) web.header('Cache-Control', 'no-cache, no-store, must-revalidate') web.header('Pragma', 'no-cache') web.header('Expires', '-1') return render.reply(i.sid)
def delete(): msgid = request.form['msgid'] assert '/' not in msgid potential_files = os.listdir(g.loc) if msgid not in potential_files: abort(404) # TODO are the checks necessary? store.secure_unlink(store.path(g.sid, msgid)) flash("Reply deleted.", "notification") return redirect(url_for('lookup'))
def setup_g(): """Store commonly used values in Flask's special g object""" # ignore_static here because `crypto_util.shash` is bcrypt (very time consuming), # and we don't need to waste time running if we're just serving a static # resource that won't need to access these common values. if logged_in(): g.flagged = session['flagged'] g.codename = session['codename'] g.sid = crypto_util.shash(g.codename) g.loc = store.path(g.sid)
def delete(): query = Reply.query.filter( Reply.filename == request.form['reply_filename']) reply = get_one_or_else(query, current_app.logger, abort) srm(store.path(g.filesystem_id, reply.filename)) db_session.delete(reply) db_session.commit() flash(gettext("Reply deleted"), "notification") return redirect(url_for('.lookup'))
def delete(): msgid = request.form["msgid"] assert "/" not in msgid potential_files = os.listdir(g.loc) if msgid not in potential_files: abort(404) # TODO are the checks necessary? crypto_util.secureunlink(store.path(g.sid, msgid)) flash("Reply deleted.", "notification") return redirect(url_for("lookup"))
def bulk_delete(sid, docs_selected): source = get_source(sid) confirm_delete = bool(request.form.get('confirm_delete', False)) if confirm_delete: for doc in docs_selected: fn = store.path(sid, doc['name']) store.secure_unlink(fn) return render_template('delete.html', sid=sid, codename=source.journalist_designation, docs_selected=docs_selected, confirm_delete=confirm_delete)
def bulk_delete(sid, docs_selected): confirm_delete = bool(request.form.get('confirm_delete', False)) if confirm_delete: for doc in docs_selected: fn = store.path(sid, doc['name']) store.secure_unlink(fn) return render_template('delete.html', sid=sid, codename=db.display_id(sid, db.sqlalchemy_handle()), docs_selected=docs_selected, confirm_delete=confirm_delete)
def test_encrypt_success(self): source, _ = utils.db_helper.init_source() message = str(os.urandom(1)) ciphertext = crypto_util.encrypt( message, [crypto_util.getkey(source.filesystem_id), config.JOURNALIST_KEY], store.path(source.filesystem_id, 'somefile.gpg')) self.assertIsInstance(ciphertext, str) self.assertNotEqual(ciphertext, message) self.assertGreater(len(ciphertext), 0)
def bulk_delete(filesystem_id, items_selected): for item in items_selected: item_path = store.path(filesystem_id, item.filename) worker.enqueue(srm, item_path) db_session.delete(item) db_session.commit() flash( ngettext("Submission deleted.", "Submissions deleted.", len(items_selected)), "notification") return redirect(url_for('col.col', filesystem_id=filesystem_id))
def lookup(): msgs = [] flagged = False for fn in os.listdir(g.loc): # TODO: make 'flag' a db column, so we can replace this with a db # lookup in the future if fn == '_FLAG': flagged = True continue if fn.startswith('reply-'): msg_candidate = crypto_util.decrypt( g.sid, g.codename, file(store.path(g.sid, fn)).read()) try: msgs.append( dict(id=fn, date=str( datetime.fromtimestamp( os.stat(store.path(g.sid, fn)).st_mtime)), msg=msg_candidate.decode())) except UnicodeDecodeError: # todo: we should have logging here! pass if flagged: session['flagged'] = True def async_genkey(sid, codename): with app.app_context(): background.execute(lambda: crypto_util.genkeypair(sid, codename)) # Generate a keypair to encrypt replies from the journalist # Only do this if the journalist has flagged the source as one # that they would like to reply to. (Issue #140.) if not crypto_util.getkey(g.sid) and flagged: async_genkey(g.sid, g.codename) return render_template('lookup.html', codename=g.codename, msgs=msgs, flagged=flagged, haskey=crypto_util.getkey(g.sid))
def batch_delete(): replies = g.source.replies if len(replies) == 0: app.logger.error("Found no replies when at least one was expected") return redirect(url_for('lookup')) for reply in replies: store.secure_unlink(store.path(g.filesystem_id, reply.filename)) db_session.delete(reply) db_session.commit() flash("All replies have been deleted", "notification") return redirect(url_for('lookup'))
def setup_g(): """Store commonly used values in Flask's special g object""" # ignore_static here because `crypto_util.hash_codename` is scrypt (very # time consuming), and we don't need to waste time running if we're just # serving a static resource that won't need to access these common values. if logged_in(): # We use session.get (which defaults to None if 'flagged' is not in the # session) to avoid a KeyError on the redirect from login/ to lookup/ g.flagged = session.get('flagged') g.codename = session['codename'] g.sid = crypto_util.hash_codename(g.codename) g.loc = store.path(g.sid)
def delete_collection(filesystem_id): # Delete the source's collection of submissions job = worker.enqueue(srm, store.path(filesystem_id)) # Delete the source's reply keypair crypto_util.delete_reply_keypair(filesystem_id) # Delete their entry in the db source = get_source(filesystem_id) db_session.delete(source) db_session.commit() return job
def bulk_delete(filesystem_id, items_selected): for item in items_selected: item_path = store.path(filesystem_id, item.filename) worker.enqueue(srm, item_path) db_session.delete(item) db_session.commit() flash(ngettext("Submission deleted.", "{num} submissions deleted.".format( num=len(items_selected)), len(items_selected)), "notification") return redirect(url_for('col.col', filesystem_id=filesystem_id))