def __init__(self, **config): self.config = Config.copy() self.config.update(config) self.auth_mgr = AuthManager(self.config['appname'], self.config['oauth_client_id'], self.config['oauth_client_secret'], self.config['oauth_scope'], self.config['oauth_redirect_uri']) self.drive = self.build_service() self.data_dir = self.get_data_dir() self.uuid = hashlib.sha1(self.data_dir).hexdigest() self.load_data_dir() self.block_size = self.bd_attr['block_size'] self.block_count = self.bd_attr['block_count'] self.total_size = self.block_size * self.block_count self.mapping = [None] * self.block_count self.que = TimedPriorityQueue() self.lock = Lock() self.running = True self.workers = [] for i in xrange(self.config.get('workers', 8)): worker = GBDWorker(self, self.build_service()) worker.daemon = True worker.start() self.workers.append(worker)
def login_page(): if request.method == "POST": username = request.form["email"] password = request.form["password"] if username == "": flash("No username was provided.") return redirect(url_for("login_page")) elif password == "": flash("No password was provided") return redirect(url_for("login_page")) with AuthManager() as auth: try: hashed = auth.get(username) except KeyError: flash("Your username or password was incorrect.") return redirect(url_for("login_page")) if auth.check_password(hashed, password): user = User(username, password, True) login_user(user, remember=True) return redirect(url_for("home")) else: flash("Your username or password was incorrect.") return redirect(url_for("login_page")) return render_template("login.html")
def __init__(self, **config): self.config = Config.copy() self.config.update(config) self.auth_mgr = AuthManager( self.config['appname'], self.config['oauth_client_id'], self.config['oauth_client_secret'], self.config['oauth_scope'], self.config['oauth_redirect_uri']) self.drive = self.build_service() self.data_dir = self.get_data_dir() self.uuid = hashlib.sha1(self.data_dir).hexdigest() self.load_data_dir() self.block_size = self.bd_attr['block_size'] self.block_count = self.bd_attr['block_count'] self.total_size = self.block_size * self.block_count self.mapping = [None] * self.block_count self.que = TimedPriorityQueue() self.lock = Lock() self.running = True self.workers = [] for i in xrange(self.config.get('workers', 8)): worker = GBDWorker(self, self.build_service()) worker.daemon = True worker.start() self.workers.append(worker)
def test_exec(self): auth = AuthManager("test/etc/users.auth") authkey = AuthKey(None, "my.pass") commander = Commander(auth) print commander.execute(self.command, self.data, authkey)
def __init__(self): handlers = [(r"/", MainHandler), (r'/login', LoginHandler), (r'/register', RegisterHandler), (r'/createGame', CreateGameHandler), (r'/game', GameHandler), (r'/joinGame/([0-9]+)', JoinGameHandler), (r'/quitGame', QuitGameHandler), (r'/roomAction', RoomActionHandler), (r'/setQuestion', SetQuestionHandler), (r'/subscribe/(\w+)', SubscribeHandler), (r'/inTurn/(\w+)', InTurnActionHandler), (r'/chat', ChatHandler), ] settings = dict( cookie_secret="43oETzKXQAGaYdkL5gEmGeJJFuYh7EQnp2XdTP1o/Vo=", login_url="/login", template_path=os.path.join(os.path.dirname(__file__), "templates"), static_path=os.path.join(os.path.dirname(__file__), "static"), xsrf_cookies=True, debug=True, ) self.db = scoped_session(sessionmaker(bind=engine)) self.db.query(Game).delete() self.db.query(GamePlayer).delete() self.db.commit() create_all() self.auth = AuthManager(self.db) publishers['hall'] = Publisher() tornado.web.Application.__init__(self, handlers, **settings)
def main(): try: # Load system configuration conf = parse_arguments() # Send logging output to a file if specified configure_log(conf["log.file"]) # Look for available commands commands = find_commands("cmds/", conf["home.dir"]) # Core components auth = AuthManager(conf["home.dir"] + "/users.auth") commander = Commander(auth) scanner = MailScanner(Parsers(commands)) notifier = EmailNotifier(conf["smtp.host"], conf["smtp.port"]) # Read the email from stdin email = UnicodeParser().parse(sys.stdin) cmd_id, data, sprops, authkey = scanner.scan(email) auth_users = [key.user for key in auth.keys[cmd_id] if key.user] command = commands[cmd_id] output = commander.execute(command, data, authkey) if output: notifier.send(SuccessNotification(conf["notification.sender"], \ auth_users, command, sprops)) except AuthException, err: notifier.send(AuthNotification(cmd_id, conf["commander.address"], \ auth_users, email, data))
def test_user_registration(self): with self.client: self.client.post("register", data={ "email": "test", "password": "******" }) with AuthManager() as auth: self.assertIn("test", auth.keys())
class TestAuth(TestCase): def __init__(self, name): TestCase.__init__(self, name) self.auth = AuthManager("test/etc/users.auth") def test_manager(self): cmd = "test" valid_key = AuthKey("testuser", "my.pass") invalid_key = AuthKey(None, "invalid") garbage_key = AuthKey("garbage", "you") self.assertTrue(self.auth.authorized(cmd, valid_key)) self.assertFalse(self.auth.authorized(cmd, PUBLIC_KEY)) self.assertFalse(self.auth.authorized(cmd, invalid_key)) self.assertFalse(self.auth.authorized(cmd, garbage_key))
def __init__(self, name): TestCase.__init__(self, name) self.commands = find_commands("test/cmds/") self.auth = AuthManager("test/etc/users.auth") self.commander = Commander(self.auth) self.scanner = MailScanner(Parsers(self.commands)) self.email = UnicodeParser().parse(open("test/data/cmd.email")) self.notifier = EmailNotifier("10.2.1.2", 25)
def register(): if request.method == "POST": username = request.form["email"] password = request.form["password"] if username == "": flash("No username was provided") return redirect(url_for("register")) elif password == "": flash("No password was provided") return redirect(url_for("register")) with AuthManager() as auth: try: auth.add(username, password) except AssertionError: flash("This username is already taken.") return redirect(url_for("register")) os.makedirs(os.path.join(DATA_DIR, username), exist_ok=True) flash("Successfully registered. Please login below.") return redirect(url_for("login_page")) return render_template("signup.html")
def load_user(user_id): with AuthManager() as auth: hashed = auth.get(user_id) return User(user_id, hashed, False)
from werkzeug.urls import url_unquote from werkzeug.utils import escape, redirect from werkzeug.wrappers import Response from auth import AuthManager, COOKIE_NAME from bakery import render_path from config import config from tantilla import create_app, HTMLResponse, static_redirect, status MOUNT_POINT = config["mount_point"] stamp = randrange(0, 1 << 31) stamp_mask = (1 << 32) - 1 prev_stamp = 0 auth_mgr = AuthManager(MOUNT_POINT) # This is obviously not thread-safe. def commit_file(name): ret = run(( 'git', '--git-dir=repo/.git/', '--work-tree=repo/', 'add', '--', name, ), stdin=DEVNULL, stdout=DEVNULL).returncode if ret != 0:
import tinydb import poll as polllib from config import Configuration from collections import namedtuple from auth import AuthManager config = Configuration("config.json") db = tinydb.TinyDB("testdb.json") auth = AuthManager(config) db.drop_tables() import pollManager pm = pollManager.PollManager(db,auth) Message = namedtuple('Message', ['author', 'content', 'channel']) User = namedtuple('User', ['id','permissions_in']) Channel = namedtuple('Channel', ['send']) Perms = namedtuple('Perms', ['administrator']) async def sss(text): print(text) def t(text): return Perms(True) def f(text): return Perms(False) async def runA(st): print("----##runA##----") await pm.processMessage(Message(User(123,t),st,Channel(sss))) async def runB(st):
def __init__(self, name): TestCase.__init__(self, name) self.auth = AuthManager("test/etc/users.auth")
class GBD: FOLDER_MIMETYPE = 'application/vnd.google-apps.folder' BLOCK_MIMETYPE = 'application/octet-stream' def __init__(self, **config): self.config = Config.copy() self.config.update(config) self.auth_mgr = AuthManager( self.config['appname'], self.config['oauth_client_id'], self.config['oauth_client_secret'], self.config['oauth_scope'], self.config['oauth_redirect_uri']) self.drive = self.build_service() self.data_dir = self.get_data_dir() self.uuid = hashlib.sha1(self.data_dir).hexdigest() self.load_data_dir() self.block_size = self.bd_attr['block_size'] self.block_count = self.bd_attr['block_count'] self.total_size = self.block_size * self.block_count self.mapping = [None] * self.block_count self.que = TimedPriorityQueue() self.lock = Lock() self.running = True self.workers = [] for i in xrange(self.config.get('workers', 8)): worker = GBDWorker(self, self.build_service()) worker.daemon = True worker.start() self.workers.append(worker) ## init def build_service(self): return build_service('drive', 'v2', http=self.auth_mgr.get_auth_http()) def get_data_dir(self): folder = self.config['gbd_data_folder'] query_str = "title='{0}'".format(folder) results = self.drive.files().list(q=query_str).execute() items = filter(lambda x: not x['labels']['trashed'], results['items']) if len(items) == 0: if not self.config.get('create', False): raise RuntimeError("Can't locate `{0}'".format(folder)) else: return self.create_data_dir() if len(items) > 1: raise AssertionError("{0} results found for `{1}', don't know which to use".format(len(items), folder)) item = items[0] if item['mimeType'] != self.FOLDER_MIMETYPE: raise AssertionError("`{0}' is not a folder!! (mimeType={1})".format(folder, item['mimeType'])) if not item['editable']: raise RuntimeError("folder `{0}' is readonly!".format(folder)) return item['id'] def create_data_dir(self): folder = self.config['gbd_data_folder'] body = { 'title': folder, 'parents': ['root'], 'mimeType': self.FOLDER_MIMETYPE, } result = self.drive.files().insert(body=body).execute() if not result: raise RuntimeError("Can't create folder `{0}'".format(folder)) return result['id'] def load_data_dir(self): query_str = "title='config'" results = self.drive.children().list(folderId=self.data_dir, q=query_str).execute() if len(results['items']) == 0: self.init_data_dir() return if len(results['items']) > 1: raise AssertionError("config file should be unique") fileId = results['items'][0]['id'] results = self.drive.files().get_media(fileId=fileId).execute() assert results self.bd_attr = json.loads(results) if self.bd_attr['version'] != Metadata['version']: raise AssertionError("Version mismatch: {0} vs {1}", Metadata['version'], self.bd_attr['version']) def init_data_dir(self): logger.info("Initializing data dir") if 'default_block_size' in self.config: block_size = int(self.config['default_block_size']) else: block_size = int(raw_input("Desired block size: ")) if 'default_total_size' in self.config: total_size = int(self.config['default_total_size']) else: total_size = int(raw_input("Total size: ")) if total_size < block_size: raise ValueError("block_size should not be bigger than total_size.") used_size = total_size // block_size * block_size if used_size != total_size: logger.info("Only using {0} bytes instead of {1}".format(used_size, total_size)) self.bd_attr = { 'version': Metadata['version'], 'block_size': block_size, 'block_count': used_size // block_size, } body = { 'title': 'config', 'description': 'config file for gbd', 'mimeType': 'application/json', 'parents': [{'id': self.data_dir}], } media_body = MediaInMemoryUpload(json.dumps(self.bd_attr), mimetype='application/json', resumable=False) self.drive.files().insert(body=body, media_body=media_body).execute() ## function def read_block(self, idx, cb=None, pri=TimedPriorityQueue.PRI_NORMAL): assert 0 <= idx < self.block_count if cb: self.que.put((idx, None, cb), pri) else: return self.sync_io(idx, None, pri) def write_block(self, idx, data, cb=None, pri=TimedPriorityQueue.PRI_NORMAL): assert 0 <= idx < self.block_count assert data and len(data) == self.block_size if cb: self.que.put((idx, data, cb), pri) else: return self.sync_io(idx, data, pri) def sync(self): logger.info("Syncing...") self.que.join() def end(self, force): if not force: self.sync() logger.info("End GBD") ## helper @classmethod def idx_to_name(cls, idx): return "gbd_b" + str(idx) def block_id(self, idx): with self.lock: if idx >= self.block_count or idx < 0: raise IndexError("Can't map idx {0}".format(idx)) if self.mapping[idx] is None: query_str = "title='{0}'".format(self.idx_to_name(idx)) results = self.drive.children().list(folderId=self.data_dir, q=query_str).execute() if len(results['items']) == 1: self.mapping[idx] = results['items'][0]['id'] else: assert len(results['items']) == 0 return self.mapping[idx] def new_block(self, idx, data=None): with self.lock: if idx >= self.block_count or idx < 0: raise ValueError("Index out of bound") if self.mapping[idx] is not None: raise ValueError("None empty mapping @ {0}".format(idx)) if data is not None: assert len(data) == self.block_size else: data = "\0" * self.block_size body = { 'title': self.idx_to_name(idx), 'mimeType': self.BLOCK_MIMETYPE, 'parents': [{'id': self.data_dir}], } media_body = MediaInMemoryUpload(data, mimetype=self.BLOCK_MIMETYPE, resumable=False) result = self.drive.files().insert(body=body, media_body=media_body).execute() self.mapping[idx] = result['id'] return result def sync_io(self, idx, data, pri): ret = [] sem = Semaphore(0) def mycb(*param): ret.append(param) sem.release() self.que.put((idx, data, mycb), pri) sem.acquire() err, data = ret.pop() if err: raise err else: return data
class GBD: FOLDER_MIMETYPE = 'application/vnd.google-apps.folder' BLOCK_MIMETYPE = 'application/octet-stream' def __init__(self, **config): self.config = Config.copy() self.config.update(config) self.auth_mgr = AuthManager(self.config['appname'], self.config['oauth_client_id'], self.config['oauth_client_secret'], self.config['oauth_scope'], self.config['oauth_redirect_uri']) self.drive = self.build_service() self.data_dir = self.get_data_dir() self.uuid = hashlib.sha1(self.data_dir).hexdigest() self.load_data_dir() self.block_size = self.bd_attr['block_size'] self.block_count = self.bd_attr['block_count'] self.total_size = self.block_size * self.block_count self.mapping = [None] * self.block_count self.que = TimedPriorityQueue() self.lock = Lock() self.running = True self.workers = [] for i in xrange(self.config.get('workers', 8)): worker = GBDWorker(self, self.build_service()) worker.daemon = True worker.start() self.workers.append(worker) ## init def build_service(self): return build_service('drive', 'v2', http=self.auth_mgr.get_auth_http()) def get_data_dir(self): folder = self.config['gbd_data_folder'] query_str = "title='{0}'".format(folder) results = self.drive.files().list(q=query_str).execute() items = filter(lambda x: not x['labels']['trashed'], results['items']) if len(items) == 0: if not self.config.get('create', False): raise RuntimeError("Can't locate `{0}'".format(folder)) else: return self.create_data_dir() if len(items) > 1: raise AssertionError( "{0} results found for `{1}', don't know which to use".format( len(items), folder)) item = items[0] if item['mimeType'] != self.FOLDER_MIMETYPE: raise AssertionError( "`{0}' is not a folder!! (mimeType={1})".format( folder, item['mimeType'])) if not item['editable']: raise RuntimeError("folder `{0}' is readonly!".format(folder)) return item['id'] def create_data_dir(self): folder = self.config['gbd_data_folder'] body = { 'title': folder, 'parents': ['root'], 'mimeType': self.FOLDER_MIMETYPE, } result = self.drive.files().insert(body=body).execute() if not result: raise RuntimeError("Can't create folder `{0}'".format(folder)) return result['id'] def load_data_dir(self): query_str = "title='config'" results = self.drive.children().list(folderId=self.data_dir, q=query_str).execute() if len(results['items']) == 0: self.init_data_dir() return if len(results['items']) > 1: raise AssertionError("config file should be unique") fileId = results['items'][0]['id'] results = self.drive.files().get_media(fileId=fileId).execute() assert results self.bd_attr = json.loads(results) if self.bd_attr['version'] != Metadata['version']: raise AssertionError("Version mismatch: {0} vs {1}", Metadata['version'], self.bd_attr['version']) def init_data_dir(self): logger.info("Initializing data dir") if 'default_block_size' in self.config: block_size = int(self.config['default_block_size']) else: block_size = int(raw_input("Desired block size: ")) if 'default_total_size' in self.config: total_size = int(self.config['default_total_size']) else: total_size = int(raw_input("Total size: ")) if total_size < block_size: raise ValueError( "block_size should not be bigger than total_size.") used_size = total_size // block_size * block_size if used_size != total_size: logger.info("Only using {0} bytes instead of {1}".format( used_size, total_size)) self.bd_attr = { 'version': Metadata['version'], 'block_size': block_size, 'block_count': used_size // block_size, } body = { 'title': 'config', 'description': 'config file for gbd', 'mimeType': 'application/json', 'parents': [{ 'id': self.data_dir }], } media_body = MediaInMemoryUpload(json.dumps(self.bd_attr), mimetype='application/json', resumable=False) self.drive.files().insert(body=body, media_body=media_body).execute() ## function def read_block(self, idx, cb=None, pri=TimedPriorityQueue.PRI_NORMAL): assert 0 <= idx < self.block_count if cb: self.que.put((idx, None, cb), pri) else: return self.sync_io(idx, None, pri) def write_block(self, idx, data, cb=None, pri=TimedPriorityQueue.PRI_NORMAL): assert 0 <= idx < self.block_count assert data and len(data) == self.block_size if cb: self.que.put((idx, data, cb), pri) else: return self.sync_io(idx, data, pri) def sync(self): logger.info("Syncing...") self.que.join() def end(self, force): if not force: self.sync() logger.info("End GBD") ## helper @classmethod def idx_to_name(cls, idx): return "gbd_b" + str(idx) def block_id(self, idx): with self.lock: if idx >= self.block_count or idx < 0: raise IndexError("Can't map idx {0}".format(idx)) if self.mapping[idx] is None: query_str = "title='{0}'".format(self.idx_to_name(idx)) results = self.drive.children().list(folderId=self.data_dir, q=query_str).execute() if len(results['items']) == 1: self.mapping[idx] = results['items'][0]['id'] else: assert len(results['items']) == 0 return self.mapping[idx] def new_block(self, idx, data=None): with self.lock: if idx >= self.block_count or idx < 0: raise ValueError("Index out of bound") if self.mapping[idx] is not None: raise ValueError("None empty mapping @ {0}".format(idx)) if data is not None: assert len(data) == self.block_size else: data = "\0" * self.block_size body = { 'title': self.idx_to_name(idx), 'mimeType': self.BLOCK_MIMETYPE, 'parents': [{ 'id': self.data_dir }], } media_body = MediaInMemoryUpload(data, mimetype=self.BLOCK_MIMETYPE, resumable=False) result = self.drive.files().insert( body=body, media_body=media_body).execute() self.mapping[idx] = result['id'] return result def sync_io(self, idx, data, pri): ret = [] sem = Semaphore(0) def mycb(*param): ret.append(param) sem.release() self.que.put((idx, data, mycb), pri) sem.acquire() err, data = ret.pop() if err: raise err else: return data