def setUp(self): self.fake_logger = MagicMock() self.fake_redis = fakeredis.FakeStrictRedis() self.user_manager = UserManager( self.fake_redis, self.fake_logger, )
def __init__(self): logger.info("The app has started") databases = Databases() logger.info("DB initialized") self.telegram_updater = Updater(token=config.token) self.user_manager = UserManager(databases, self.telegram_updater.bot) self.telegram_updater.dispatcher.add_handler( MessageHandler(filters=[], callback=self.message_handler)) self.telegram_updater.dispatcher.add_handler( CallbackQueryHandler(self.callback_handler)) self.asyncio_handler = AsyncioFriendlyHandler( self.initialize_asyncio_loop) self.telegram_updater.dispatcher.add_handler(self.asyncio_handler) self.telegram_updater.dispatcher.add_error_handler(self.telegram_error) logger.debug('Adding asyncio update to queue') self.telegram_updater.update_queue.put(AsyncioUpdate()) logger.info("Staring polling") self.telegram_updater.start_polling( timeout=config.timeout_for_pooling, bootstrap_retries=config.retries_on_error) logger.info("Idle") self.telegram_updater.idle()
def __init__(self, name='Repository', location=path.join('Repositories', 'repo_1'), roles_file_type='txt'): """ Initialisation of a new :py:class:Repository object. :param name: The name of the :py:class:Repository object, the default value is 'Repository'. :param location: The path of the :py:class:Repository object, the default value is 'Repositories/repo_1'. :param roles_file_type: The type of roles metadata file, it can be: TXT, XML, JSON. """ self._name = name self._location = location self._metadata_file = path.join( self._location, '{}_metadata.edd'.format(path.basename(name))) self._paths_file = path.join(self._location, PATHS_FILE) if roles_file_type.lower() in ['txt', 'xml', 'json']: self._roles_file_type = roles_file_type.lower() else: raise ValueError( "The roles_file_type must be txt, xml or json, not {}!".format( roles_file_type)) self.load() self._user_manager = UserManager(self._location, self._paths_file) self._document_manager = DocumentManager(self._location, self._paths_file) schedule.every(BACKUP_FREQUENCY).days.at('4:00').do(self.create_backup) self.initialize_logger(self.location) logger.info("The repository is initialized.")
def __init__(self, redis_pool, app, connected_websockets): self.chess_manager = ChessManager(redis_pool) self.user_manager = UserManager(redis_pool, app) self.tournament_manager = TournamentManager(redis_pool, self.chess_manager) self.board_subscribers = {} self.redis_pool = redis_pool self.app = app self.connected_websockets = connected_websockets
class Main: def __init__(self): logger.info("The app has started") databases = Databases() logger.info("DB initialized") self.telegram_updater = Updater(token=config.token) self.user_manager = UserManager(databases, self.telegram_updater.bot) self.telegram_updater.dispatcher.add_handler( MessageHandler(filters=[], callback=self.message_handler)) self.telegram_updater.dispatcher.add_handler( CallbackQueryHandler(self.callback_handler)) self.asyncio_handler = AsyncioFriendlyHandler( self.initialize_asyncio_loop) self.telegram_updater.dispatcher.add_handler(self.asyncio_handler) self.telegram_updater.dispatcher.add_error_handler(self.telegram_error) logger.debug('Adding asyncio update to queue') self.telegram_updater.update_queue.put(AsyncioUpdate()) logger.info("Staring polling") self.telegram_updater.start_polling( timeout=config.timeout_for_pooling, bootstrap_retries=config.retries_on_error) logger.info("Idle") self.telegram_updater.idle() def initialize_asyncio_loop(self): logger.debug('Initializing asyncio loop') asyncio.set_event_loop(asyncio.new_event_loop()) self.telegram_updater.dispatcher.remove_handler(self.asyncio_handler) def message_handler(self, bot, update): self.user_manager.get_user(update.message.chat_id).process_message( bot, update) def callback_handler(self, bot, update): self.user_manager.get_user( update.callback_query.message.chat_id).process_callback( bot, update) def telegram_error(self, bot, update, error): try: raise error except TelegramError as details: logger.warning('There was a Telegram Error {}'.format(details)) raise
def __init__(self, redis_pool, app): self.chess_manager = ChessManager(redis_pool) self.user_manager = UserManager(redis_pool, app) self.board_subscribers = {} self.redis_pool = redis_pool self.app = app # self.notify_next_turn() self.pool = Pool(1000)
def production(): """Production server that uses port 9090 for the websocket server. """ app = Flask(__name__, static_folder='dist', static_url_path='') app_manager = apps.AppManager(catkin_ws=secrets.CATKIN_WS) client = MongoClient() db = client.rws user_manager = UserManager(db) robot_blueprint = Blueprint('robot', __name__) robot = Robot(robot_blueprint, user_manager) websocket_server = WebsocketServer(9090) server = RobotWebServer(app, app_manager, user_manager, robot, websocket_server) return server
def test(): """Test server. Most likely many of the objects will be mocked anyway. """ app = Flask(__name__, static_folder='dist', static_url_path='') app_manager = apps.AppManager(catkin_ws=None) client = MongoClient() db = client.rws_test user_manager = UserManager(db) robot_blueprint = Blueprint('robot', __name__) robot = Robot(robot_blueprint, user_manager) websocket_server = WebsocketServer(9090) server = RobotWebServer(app, app_manager, user_manager, robot, websocket_server) server._app.config['TESTING'] = True server._app = server._app.test_client() return server
def sign_in(): """Request to authenticate with the system""" try: username_email = flask.request.form['username'] password = flask.request.form['password'] except: return responses.get_invalid_request() try: user, jwt = UserManager(db).login(username_email, password) except: # TODO probably use a redirect instead of rendering return flask.render_template('index.html', login_error=True) response = flask.make_response(flask.redirect(CP_WEB_PATH)) response.set_cookie('jwt', jwt) return response
def __init__(self, host="mumble.koalabeast.com", name="ChangeThis", channel=None, user_manager=None, root=False, home_server="origin"): self.reader = None self.writer = None self.username = name self.host = host self.users = UserManager() self.channels = {} self.own_user = None self.channel = channel self.channel_manager = ChannelManager() self.command_manager = CommandManager(self, self.users) self.group_manager = GroupManager(self) self.connected = False self.home_server = home_server self.bots.append(self) if root: self.start_bots()
def development(): """Development server. """ app = Flask(__name__, static_folder='dist', static_url_path='') cors = CORS( app, resources={r'/api/*': { 'origins': secrets.DEV_FRONTEND_ORIGIN }}) app_manager = apps.AppManager(catkin_ws=secrets.CATKIN_WS) client = MongoClient() db = client.rws_dev user_manager = UserManager(db) robot_blueprint = Blueprint('robot', __name__) robot = Robot(robot_blueprint, user_manager) websocket_server = WebsocketServer(9090) server = RobotWebServer(app, app_manager, user_manager, robot, websocket_server) return server
def __init__(self, api_key, api_base_url=None, _requestor=None): if api_key is None: raise TypeError('api_key cannot be blank.') if api_base_url is None: api_base_url = 'https://api.userkit.io/v1' else: api_base_url += '/v1' self.api_key = api_key self.api_base_url = api_base_url # make the encapsulated objects self._rq = _requestor or Requestor(self.api_key, self.api_base_url) self.users = UserManager(self._rq) self.invites = InviteManager(self._rq) self.emails = EmailManager(self._rq) self.widget = WidgetManager(self._rq) self.logs = LogsManager(self._rq)
def register(): """Request to register with the system""" try: email = flask.request.form['email'] username = flask.request.form['username'] display_name = flask.request.form['display-name'] password = flask.request.form['password'] except: return responses.get_invalid_request() try: UserManager(db).register(username, email, password, display_name) return flask.render_template('register.html', success=True) except: # Such user already existed return flask.render_template('register.html', success=False, email=email, username=username, display_name=display_name)
class Repository(object): """Represents the document management system as a repository. The :py:class:Repository is defined by: name, location, metadata file's location, paths metadata file's location, roles file type, :py:class:UserManager object and :py:class:DocumentManagement object. """ def __init__(self, name='Repository', location=path.join('Repositories', 'repo_1'), roles_file_type='txt'): """ Initialisation of a new :py:class:Repository object. :param name: The name of the :py:class:Repository object, the default value is 'Repository'. :param location: The path of the :py:class:Repository object, the default value is 'Repositories/repo_1'. :param roles_file_type: The type of roles metadata file, it can be: TXT, XML, JSON. """ self._name = name self._location = location self._metadata_file = path.join( self._location, '{}_metadata.edd'.format(path.basename(name))) self._paths_file = path.join(self._location, PATHS_FILE) if roles_file_type.lower() in ['txt', 'xml', 'json']: self._roles_file_type = roles_file_type.lower() else: raise ValueError( "The roles_file_type must be txt, xml or json, not {}!".format( roles_file_type)) self.load() self._user_manager = UserManager(self._location, self._paths_file) self._document_manager = DocumentManager(self._location, self._paths_file) schedule.every(BACKUP_FREQUENCY).days.at('4:00').do(self.create_backup) self.initialize_logger(self.location) logger.info("The repository is initialized.") @property def name(self): """ The property of the :py:attr:_name attribute. :return: The name of the :py:class:Repository object :py:attr:_name. """ return self._name @name.setter def name(self, value): """ The setter of the :py:attr:_name. :param value: New name. :return: """ raise AttributeError("The repository object's name can't be changed!") @property def location(self): """ The property of the :py:attr:_location attribute. :return: The location of the :py:class:Repository object :py:attr:_location`. """ return self._location @location.setter def location(self, value): """ The setter of the :py:attr:_location. :param value: New location. :return: """ raise AttributeError( "The repository object's location can't be changed!") @property def metadata_file(self): """ The property of the :py:attr:_metadata_file attribute. :return: The metadata_file of the :py:class:Repository object :py:attr:_metadata_file. """ return self._metadata_file @metadata_file.setter def metadata_file(self, value): """ The setter of the :py:attr:_metadata_file. :param value: New metadata_file. :return: """ raise AttributeError( "The repository object's metadata_file location can't be changed!") @property def paths_file(self): """ The property of the :py:attr:_paths_file attribute. :return: The paths_file of the :py:class:Repository object :py:attr:_paths_file. """ return self._name @paths_file.setter def paths_file(self, value): """ The setter of the :py:attr:_paths_file. :param value: New paths_file. :return: """ raise AttributeError( "The repository object's paths_file location can't be changed!") @property def roles_file_type(self): """ The property of the :py:attr:_roles_file_type attribute. :return: The roles_file_type of the :py:class:Repository object :py:attr:_roles_file_type. """ return self._roles_file_type @roles_file_type.setter def roles_file_type(self, value): """ The setter of the :py:attr:_roles_file_type. :param value: New roles_file_type. :return: """ raise AttributeError( "The repository object's roles_file_type can't be changed!") @property def user_manager(self): """ The property of the :py:attr:_user_manager attribute. :return: The user_manager of the :py:class:Repository object :py:attr:_user_manager. """ return self._user_manager @user_manager.setter def user_manager(self, value): """ The setter of the :py:attr:_user_manager. :param value: New user_manager. :return: """ raise AttributeError( "The repository object's user_manager object can't be changed!") @property def document_manager(self): """ The property of the :py:attr:_document_manager attribute. :return: The document_manager of the :py:class:Repository object :py:attr:_document_manager. """ return self._document_manager @document_manager.setter def document_manager(self, value): """ The setter of the :py:attr:_document_manager. :param value: New document_manager. :return: """ raise AttributeError( "The repository object's document_manager object can't be changed!" ) def load(self): """ Try to load a :py:class:Repository object. If the :py:attr:_location path already exists then it tries to load it, but if not it will create a new :py:class:Repository object by calling the :py:meth:initialize method. :exception ValueError is raised if the path is not a directory. :return: """ if path.exists(self._location): if path.isdir(self._location): self._creation_date = self.read_date('creation_date') self._last_backup_date = self.read_date('last_backup_date') self.create_repo_metadata_file(self._creation_date, self._last_backup_date) if self.is_backup_needed(): self._last_backup_date = datetime.utcnow().date() self.create_repo_metadata_file(self._creation_date, self._last_backup_date) self.create_backup(backup_file_name=self._name) self._name = read_ini_file( self._paths_file)['repository']['name'] else: raise ValueError('The repository should be a directory!') else: self.initialize() def initialize(self): """ Initialize a :py:class:Repository object on the :py:attr:_location path. :return: """ makedirs(self._location) for name_key, dir_name_value in FOLDERS_PATH.iteritems(): makedirs(path.join(self._location, dir_name_value)) role_file_path = reduce(path.join, [ self._location, 'users', '{}.{}'.format(ROLES_FILE, self._roles_file_type) ]) with open(role_file_path, 'w') as role_file: utime(role_file_path, None) self.create_default_path_file() self._creation_date = datetime.utcnow() self._last_backup_date = datetime.strptime('0001/1/1', '%Y/%m/%d').date() self.create_repo_metadata_file(self._creation_date, self._last_backup_date) def absolute_path(self): """ Determines the absolute path of the :py:class:Repository object. :return: The absolute path of the :py:class:Repository object. """ if path.isabs(self._location): return self._location else: return path.abspath(self._location) def create_default_path_file(self): """ Creates the paths file metadata file for the :py:class:Repository object and writes the data to file too. :return: """ data = { 'directories': FOLDERS_PATH, 'files': { 'repo_main_folder': path.basename(self._location), 'paths': self._paths_file, 'metadata': self._metadata_file }, 'repository': { 'name': self._name } } write_ini_file(self._paths_file, data) logger.info( "The path file is created and the data is written intto it.") def create_repo_metadata_file(self, date_obj, backup_date_obj): """ Creates the :py:class:Repository object's metadata file and writes the data into it. :param date_obj: :py:attr:_cration_date attribute of the :py:class:Repository object. :param backup_date_obj: :py:attr:_last_backup_date attribute of the :py:class:Repository object. :return: """ data = { 'creation_date': { 'year': date_obj.year, 'month': date_obj.month, 'day': date_obj.day, 'hour': date_obj.hour, 'minute': date_obj.minute, 'second': date_obj.second, 'microsecond': date_obj.microsecond }, 'last_backup_date': { 'year': backup_date_obj.year, 'month': backup_date_obj.month, 'day': backup_date_obj.day } } write_ini_file(self._metadata_file, data) logger.info( "The repository's metadata file is created and the data is written into it." ) def read_date(self, type_of_date): """ Reds the :py:attr:_creation_date or :py:attr:_last_backup_date attribute. :param type_of_date: 'creation_date' or 'last_backup_date'. :return: A datetime.datetime object or a datetime.date object in function of the ``type_of_date``. """ metadata_data = read_ini_file(self._metadata_file) if type_of_date == 'creation_date': logger.info( "The creation date is read form the repository's metadata file." ) return datetime.strptime( '{} {} {} {} {} {} {}'.format( metadata_data[type_of_date]['year'], metadata_data[type_of_date]['month'], metadata_data[type_of_date]['day'], metadata_data[type_of_date]['hour'], metadata_data[type_of_date]['minute'], metadata_data[type_of_date]['second'], metadata_data[type_of_date]['microsecond']), '%Y %m %d %H %M %S %f') elif type_of_date == 'last_backup_date': logger.info( "The last backup date is read form the repository's metadata file." ) return datetime.strptime( '{:0>4} {} {}'.format(metadata_data[type_of_date]['year'], metadata_data[type_of_date]['month'], metadata_data[type_of_date]['day']), '%Y %m %d').date() @classmethod def find_all_documents_in_path(cls, from_path): """ Finds all :py:class:Document objects in a path. :param from_path: The path where to search for :py:class:Document objects. :return: A list of available :py:class:Document IDs. """ all_available_documents = [] for file_or_folder in listdir(from_path): if path.isdir(path.join(from_path, file_or_folder)): try: all_available_documents.append(int(file_or_folder)) logger.info( "The {} directory is a repository document.".format( file_or_folder)) except: logger.debug( "The {} file/directory is not a repository document.". format(file_or_folder)) return all_available_documents def import_documents(self, from_path): """ Imports all :py:class:Document objects from a path to the :py:class:Repository. :param from_path: The path where to search for :py:class:Document objects. :exception RuntimeError is raised if the :py:class:Document object doesn't contains the referenced file. :exception ValueError is raised if the :py:class:Document object has no author. :exception ValueError is raised if there is no available :py:class:Document objects on the ``from_path`` path. :exception ValueError is raised if the ``from_path`` doesn't exists. :return: """ if path.exists(from_path): all_documents = Repository.find_all_documents_in_path(from_path) logger.debug( "All documents are loaded form the {} path.".format(from_path)) metadata_data = read_ini_file(self._paths_file) logger.debug("The content repositories metadata file is loaded.") to_path = path.join(self._location, metadata_data['directories']['documents']) if len(all_documents) > 0: for document_id in all_documents: new_path = reduce(path.join, [to_path, str(document_id)]) old_path = path.join(from_path, str(document_id)) copytree(old_path, new_path) logger.info( "The {} directory's content is copied to {} path.". format(old_path, new_path)) try: document_files_existence = self._document_manager.document_files_exist( document_id, user_manager=self._user_manager) for file_name_key, exists_value in document_files_existence.iteritems( ): if not exists_value: logger.exception( "The {} file doesn't exists in the {} ID document!" .format(file_name_key, document_id)) raise RuntimeError( "The {} file doesn't exists in the {} ID document!" .format(file_name_key, document_id)) logger.info("All the directory's files exist.") document = self._document_manager.load_document( document_id, self._user_manager) logger.debug( "The document with {} ID is loaded into the memory" .format(document_id)) if not isinstance(document.author, list): doc_author = [document.author] else: doc_author = document.author if len(doc_author) == 0: logger.exception("No author related to document!") raise ValueError("No author related to document!") except Exception as e: rmtree(new_path) logger.exception( "An {} exception is raised when importing the document with {} ID." .format(e.__class__.__name__, document_id)) raise e else: logger.exception( "No document to import from the '{}' path!".format( from_path)) raise ValueError( "No document to import from the '{}' path!".format( from_path)) else: logger.exception("The '{}' doesn't exists!".format(from_path)) raise ValueError("The '{}' doesn't exists!".format(from_path)) def export_documents(self, list_of_documents_id, path_to): """ Exports all :py:class:Document objects in the ``list_of_documents_id`` from the :py:class:Repository object to the ``path_to`` path. :param list_of_documents_id: List of :py:class:Document object IDs to export. :param path_to: The path to export the :py:class:Document objects. :exception TypeError is raised if a not accepted and private :py:class:Document object is exported. :return: """ if not path.exists(path_to): makedirs(path_to) logger.info("The {} path is created.".format(path_to)) for document_id in list_of_documents_id: document = self._document_manager.load_document(document_id) logger.debug( "The document with {} ID is loaded into the memory.".format( document_id)) if document.state == 'accepted' and document.is_public(): logger.debug( "The document with {} id is in accepted state and is public." .format(document_id)) exported_document_path = path.join( self._document_manager._location, str(document_id)) for file_name in listdir(exported_document_path): if file_name != '{}_document_metadata.edd'.format( document_id): copy2(path.join(exported_document_path, file_name), path_to) logger.info( "The {} ID document's {} file is copied to {}.". format( document_id, path.join(exported_document_path, file_name), path_to)) existing_metadata_file = '{}_document_metadata.edd'.format( document_id) remove( path.join(exported_document_path, existing_metadata_file)) logger.debug("The {} metadata file is deleted.".format( path.join(exported_document_path, existing_metadata_file))) user = self._user_manager.find_user_by_id(document.author) logger.debug( "The {} ID user (author of the document) is loaded.". format(document.author)) data = { 'document': { 'title': document.title, 'description': document.description, 'author': '{} {}'.format(user.first_name, user.family_name), 'files': document.files, 'doc_format': document.doc_format, 'creation_date': document.creation_date, 'modification_date': document.modification_date } } for key, value in data['document'].iteritems(): data['document'][key] = str(value) write_ini_file( path.join(path_to, '{}.edd'.format(document_id)), data) logger.info( "The document's metadata file is written to the filesystem." ) else: logger.exception( "The docuement must be accepted and public to export, not {} and {}!" .format( document.state, 'Private' if not document.is_public() else 'Public')) raise TypeError( "The docuement must be accepted and public to export, not {} and {}!" .format( document.state, 'Private' if not document.is_public() else 'Public')) def create_backup(self, backup_file_name='backup', backup_path='./Backups', verbose=False, date_format='%Y/%m/%d %H:%M:%S', backup_documents=True, backup_logs=True, backup_projects=True, backup_reports=True, backup_users=True): """ Creates a backup of the :py:class:Repository object to the ``backup_path`` with ``backup_file_name``. :param backup_file_name: The backup files name of the :py:class:Repository object, the default value is 'backup'. :param backup_path: The backup path where the ``backup_file_name`` is saved, the default value is './Backups' :param verbose: Bool, if it's True it will print out some information about the backup process, default value is False. :param date_format: The date format in which the date are printed out if the ``verbose`` parameter is True, the default value is '%Y/%m/%d %H:%M:%S'. :param backup_documents: Bool, determines if to back up the :py:class:Document objects of the :py:class:Repository. :param backup_logs: Bool, determines if to back up the log files of the :py:class:Repository. :param backup_projects: Bool, determines if to back up the :py:class:Project objects of the :py:class:Repository. :param backup_reports: Bool, determines if to back up the :py:class:Report objects of the :py:class:Repository. :param backup_users: Bool, determines if to back up the :py:class:User objects of the :py:class:Repository. :return: """ start_time = datetime.utcnow() logger.info( "The backup of the {} repository has started on UTC {}.".format( self._name, start_time.strftime(date_format))) if verbose: print("The backup of the {} repository has started on UTC {}.". format(self._name, start_time.strftime(date_format))) if not path.exists(backup_path): makedirs(backup_path) logger.info( "The {} backup path structure is created.".format(backup_path)) if verbose: print("The {} backup path structure is created.".format( backup_path)) else: logger.info("The {} backup path exists.".format(backup_path)) if verbose: print("The {} backup path exists.".format(backup_path)) backup_file_name = self.determine_export_file_name( backup_file_name, backup_path) logger.info( "The name of the backup file is: {}.zip.".format(backup_file_name)) if verbose: print("The name of the backup file is: {}.zip.".format( backup_file_name)) new_location = self._location if not (backup_documents and backup_logs and backup_projects and backup_reports and backup_users): pats_file = read_ini_file(self._paths_file) copytree(new_location, './{}'.format(backup_file_name)) logger.debug( "The backup file is copied from to {} with {} name.".format( new_location, './{}'.format(backup_file_name))) new_location = './{}'.format(backup_file_name) if not backup_documents: rmtree( path.join(self._location, pats_file['directories']['documents'])) makedirs( path.join(self._location, pats_file['directories']['documents'])) logger.debug("The {} directory is removed.".format( path.join(self._location, pats_file['directories']['documents']))) if not backup_logs: rmtree( path.join(self._location, pats_file['directories']['logs'])) makedirs( path.join(self._location, pats_file['directories']['logs'])) logger.debug("The {} directory is removed.".format( path.join(self._location, pats_file['directories']['logs']))) if not backup_projects: rmtree( path.join(self._location, pats_file['directories']['projects'])) makedirs( path.join(self._location, pats_file['directories']['projects'])) logger.debug("The {} directory is removed.".format( path.join(self._location, pats_file['directories']['projects']))) if not backup_reports: rmtree( path.join(self._location, pats_file['directories']['reports'])) makedirs( path.join(self._location, pats_file['directories']['reports'])) logger.debug("The {} directory is removed.".format( path.join(self._location, pats_file['directories']['reports']))) if not backup_users: rmtree( path.join(self._location, pats_file['directories']['users'])) makedirs( path.join(self._location, pats_file['directories']['users'])) logger.debug("The {} directory is removed.".format( path.join(self._location, pats_file['directories']['users']))) make_archive(path.join(backup_path, backup_file_name), 'zip', new_location, verbose=verbose, logger=logger) if new_location == './{}'.format(backup_file_name) and path.exists( new_location): rmtree(new_location) end_time = datetime.utcnow() if verbose: print( "The backup is completed on UTC {}, please check the {} file". format(end_time.strftime(date_format), path.join(backup_path, backup_file_name))) print("The process lasted {} seconds.".format( (end_time - start_time).total_seconds())) logger.info( "The backup is completed on UTC {}, please check the {} file". format(end_time.strftime(date_format), path.join(backup_path, backup_file_name))) logger.info("The process lasted {} seconds.".format( (end_time - start_time).total_seconds())) def determine_export_file_name(self, backup_file_name, backup_path): """ Determines based on the :py:meth:create_backup methods ``backup_file_name`` parameter the backup files name. :param backup_file_name: The :py:meth:create_backup methods ``backup_file_name``. :param backup_path: :py:meth:create_backup methods ``backup_path``. :return: The new backup files name. """ if path.exists(path.join(backup_path, backup_file_name + '.zip')): new_backup_file_name = backup_file_name number = 1 if '_' in backup_file_name: try: number = int(backup_file_name.split('_')[-1]) new_backup_file_name = '_'.join( backup_file_name[:-1]) + '_{}' except ValueError: new_backup_file_name += '_{}' else: new_backup_file_name += '_{}' while True: if new_backup_file_name.format(number) + '.zip' in listdir( backup_path): number += 1 else: logger.info("The export file name is {}.".format( new_backup_file_name.format(number))) return new_backup_file_name.format(number) else: logger.info("The export file name is {}.".format(backup_file_name)) return backup_file_name def restore(self, backup_file_name='backup', backup_path='./Backups', verbose=False, date_format='%Y/%m/%d %H:%M:%S', backup_documents=True, backup_logs=True, backup_projects=True, backup_reports=True, backup_users=True): """ Restores a :py:class:Repository object from the filesystem and deletes the old :py:class:Repository object. :param backup_file_name: The backup files name of the :py:class:Repository object, the default value is 'backup'. :param backup_path: The backup path from where the ``backup_file_name`` will be restored, the default value is './Backups' :param verbose: Bool, if it's True it will print out some information about the restore process, default value is False. :param date_format: The date format in which the date are printed out if the ``verbose`` parameter is True, the default value is '%Y/%m/%d %H:%M:%S'. :param backup_documents: Bool, determines if to restore the :py:class:Document objects of the :py:class:Repository. :param backup_logs: Bool, determines if to restpre the log files of the :py:class:Repository. :param backup_projects: Bool, determines if to restore the :py:class:Project objects of the :py:class:Repository. :param backup_reports: Bool, determines if to restore the :py:class:Report objects of the :py:class:Repository. :param backup_users: Bool, determines if to restore the :py:class:User objects of the :py:class:Repository. """ start_time = datetime.utcnow() logger.info( "The restore of the {} repository has started on UTC {}.".format( self._name, start_time.strftime(date_format))) if verbose: print("The restore of the {} repository has started on UTC {}.". format(self._name, start_time.strftime(date_format))) rmtree(self._location) logger.info("The old repository is deleted on {} path.".format( self._location)) if verbose: print("The old repository is deleted on {} path.".format( self._location)) with ZipFile(path.join(backup_path, backup_file_name + '.zip'), "r") as z: z.extractall(self._location) if not (backup_documents and backup_logs and backup_projects and backup_reports and backup_users): pats_file = read_ini_file(self._paths_file) unimported = [] if not backup_documents: rmtree( path.join(self._location, pats_file['directories']['documents'])) makedirs( path.join(self._location, pats_file['directories']['documents'])) logger.debug("The {} directory is removed.".format( path.join(self._location, pats_file['directories']['documents']))) unimported.append('documents') if not backup_logs: rmtree( path.join(self._location, pats_file['directories']['logs'])) makedirs( path.join(self._location, pats_file['directories']['logs'])) logger.debug("The {} directory is removed.".format( (path.join(self._location, pats_file['directories']['logs'])))) unimported.append('logs') if not backup_projects: rmtree( path.join(self._location, pats_file['directories']['projects'])) makedirs( path.join(self._location, pats_file['directories']['projects'])) logger.debug("The {} directory is removed.".format( (path.join(self._location, pats_file['directories']['projects'])))) unimported.append('projects') if not backup_reports: rmtree( path.join(self._location, pats_file['directories']['reports'])) makedirs( path.join(self._location, pats_file['directories']['reports'])) logger.debug("The {} directory is removed.".format( (path.join(self._location, pats_file['directories']['reports'])))) unimported.append('reports') if not backup_users: rmtree( path.join(self._location, pats_file['directories']['users'])) makedirs( path.join(self._location, pats_file['directories']['users'])) logger.debug("The {} directory is removed.".format( (path.join(self._location, pats_file['directories']['users'])))) unimported.append('users') if len(unimported) > 0: print("The {} were not imported.".format( ', '.join(unimported))) logger.debug("The {} were not imported.".format( ', '.join(unimported))) end_time = datetime.utcnow() if verbose: print( "The restore is completed on UTC {}, please check the {} repository" .format(end_time.strftime(date_format), self._location)) print("The process lasted {} seconds.".format( (end_time - start_time).total_seconds())) logger.info( "The restore is completed on UTC {}, please check the {} repository" .format(end_time.strftime(date_format), self._location)) logger.info("The process lasted {} seconds.".format( (end_time - start_time).total_seconds())) def show_repository_info(self, name=''): """ Shows some information about the :py:class:Repository object in an index.html file. The following information is getherd in the HTML file: :py:class:Repository object :py:attr:name, :py:attr:_creation_date, :py:attr:_last_backup_date, the :py:attr:path_file meta data, number and all :py:class:User objects, all :py:class:Roles object and the number and all :py:class:Document objects. :param name: This attribute is not used to show information about an actual :py:class:Repository, because the information is printed into the index.html file, but we can prin out information about an archived :py:class:Repository too, and for those this parameter is the name of the backup file. :return: """ paths = read_ini_file(self._paths_file) users = dict() documents = dict() roles = dict() for user_id in self._user_manager.find_all_users(): users[user_id] = self._user_manager.load_user(user_id) for document_id in self._document_manager.find_all_documents(): documents[document_id] = self._document_manager.load_document( document_id, self._user_manager) for role_key, user_ids_value in self._user_manager.list_users_by_role( ).iteritems(): roles[role_key] = ', '.join([str(i) for i in user_ids_value]) abs_path = path.dirname(path.abspath(__file__)) logger.info("All data is collected to show in HTML.") env = Environment( loader=FileSystemLoader(path.join(abs_path, 'templates'))) template = env.get_template('rep_info.html') output_from_parsed_template = template.render( repository_name=self._name, creation_date=self._creation_date, backup_date=self._last_backup_date, paths=paths, users=users, roles=roles, documents=documents) logger.info("The template is rendered.") with open(path.join(abs_path, "index{}.html".format('_' + name)), "wb") as fh: fh.write(output_from_parsed_template) logger.info("The template is written to {} file.".format( "index{}.html".format('_' + name))) webbrowser.open(path.join(abs_path, "index{}.html".format('_' + name))) logger.info("The {} file is opened in the browser.".format( "index{}.html".format('_' + name))) def show_backup_info(self, backup_file): """ It will print into an HTML file the information of a backed up :py:class:Repository object with the help of the :py:meth:show_repository_info method. The HTML file will be like: index_[name_of_the_backup_file].HTML. :param backup_file: The backed up :py:class:Repository file, path and file name. :return: """ if '.zip' not in backup_file: full_backup_file = backup_file + '.zip' else: full_backup_file = backup_file if path.exists(full_backup_file): abs_path = path.dirname(path.abspath(__file__)) tmp_location = abs_path + '/tmp/tmp_repository' with ZipFile(full_backup_file, "r") as z: z.extractall(tmp_location) logger.debug("The temporary repository is extracted.") tmp_repo = Repository(location=tmp_location) tmp_repo.show_repository_info(name=path.basename(backup_file)) rmtree(tmp_location) logger.debug("The temporary repository is deleted.") else: logger.exception( "The {} backup doesn't exists!".format(full_backup_file)) raise TypeError( "The {} backup doesn't exists!".format(full_backup_file)) def is_backup_needed(self): """ This metod is called every time a :py:class:Repository object is loaded with the :py:meth:load method. If the :py:attr:_last_backup_date is older than the :py:const:BACKUP_FREQUENCY in days. :return: Bool. """ if (datetime.utcnow().date() - self._last_backup_date).days > BACKUP_FREQUENCY: return True else: return False def initialize_logger(self, repository_path): base_path = path.dirname(repository_path) full_path = path.join(base_path, 'logs') if not path.exists(full_path): makedirs(full_path) debug_file_logger = logging.FileHandler( path.join(full_path, '{}_debug_log.log'.format(self.name))) debug_file_logger.setLevel(logging.DEBUG) info_file_logger = logging.FileHandler( path.join(full_path, '{}_log.log'.format(self.name))) info_file_logger.setLevel(logging.INFO) console_logger = logging.StreamHandler() console_logger.setLevel(logging.INFO) formatter = logging.Formatter( '%(asctime)s - %(processName)s / %(threadName)s - %(name)s - %(levelname)s - %(message)s' ) debug_file_logger.setFormatter(formatter) info_file_logger.setFormatter(formatter) console_logger.setFormatter(formatter) logger.addHandler(debug_file_logger) logger.addHandler(info_file_logger) logger.addHandler(console_logger)
def test_delete_user(self): self.assertTrue(UserManager().delete_user(11))
class Protocol: connection_lock = asyncio.Lock() bots = [] VERSION_MAJOR = 1 VERSION_MINOR = 2 VERSION_PATCH = 4 VERSION_DATA = (VERSION_MAJOR << 16) | (VERSION_MINOR << 8) | VERSION_PATCH PREFIX_FORMAT = ">HI" PREFIX_LENGTH = 6 ID_MESSAGE = [ Mumble_pb2.Version, Mumble_pb2.UDPTunnel, Mumble_pb2.Authenticate, Mumble_pb2.Ping, Mumble_pb2.Reject, Mumble_pb2.ServerSync, Mumble_pb2.ChannelRemove, Mumble_pb2.ChannelState, Mumble_pb2.UserRemove, Mumble_pb2.UserState, Mumble_pb2.BanList, Mumble_pb2.TextMessage, Mumble_pb2.PermissionDenied, Mumble_pb2.ACL, Mumble_pb2.QueryUsers, Mumble_pb2.CryptSetup, Mumble_pb2.ContextActionModify, Mumble_pb2.ContextAction, Mumble_pb2.UserList, Mumble_pb2.VoiceTarget, Mumble_pb2.PermissionQuery, Mumble_pb2.CodecVersion, Mumble_pb2.UserStats, Mumble_pb2.RequestBlob, Mumble_pb2.ServerConfig ] MESSAGE_ID = {v: k for k, v in enumerate(ID_MESSAGE)} PING_REPEAT_TIME = 5 @property def num_channels(self): return len(self.channels) def __init__(self, host="mumble.koalabeast.com", name="ChangeThis", channel=None, user_manager=None, root=False, home_server="origin"): self.reader = None self.writer = None self.username = name self.host = host self.users = UserManager() self.channels = {} self.own_user = None self.channel = channel self.channel_manager = ChannelManager() self.command_manager = CommandManager(self, self.users) self.group_manager = GroupManager(self) self.connected = False self.home_server = home_server self.bots.append(self) if root: self.start_bots() @property def channel_id(self): if self.own_user is not None: return self.own_user.channel_id def read_loop(self): try: try: while self.connected: header = yield from self.reader.readexactly(6) message_type, length = struct.unpack(Protocol.PREFIX_FORMAT, header) if message_type not in Protocol.MESSAGE_ID.values(): critical("Unknown ID, exiting.") self.die() raw_message = (yield from self.reader.readexactly(length)) message = Protocol.ID_MESSAGE[message_type]() message.ParseFromString(raw_message) yield from self.mumble_received(message) except asyncio.IncompleteReadError: critical("Disconnected. Reconnecting...") except GeneratorExit: self.connected = False finally: self.pinger.cancel() self.writer.close() if not self.connected: self.bots.remove(self) if not self.bots: l = asyncio.get_event_loop() l.stop() if self.connected: yield from self.reconnect() @asyncio.coroutine def mumble_received(self, message): if isinstance(message, Mumble_pb2.Version): pass elif isinstance(message, Mumble_pb2.Reject): critical("Rejected") self.die() elif isinstance(message, Mumble_pb2.CodecVersion): pass elif isinstance(message, Mumble_pb2.CryptSetup): pass elif isinstance(message, Mumble_pb2.ChannelState): self.channel_manager.add_from_message(message) elif isinstance(message, Mumble_pb2.PermissionQuery): pass elif isinstance(message, Mumble_pb2.UserState): if self.own_user is None: self.own_user = self.users.from_message(message) u = self.own_user print(u) elif message.session and message.session == self.own_user.session: self.own_user.update_from_message(message) u = self.own_user else: try: u = self.users.from_message(message) except NameError: u = None if u and u is not self.own_user: if u.channel_id == self.own_user.channel_id: yield from self.user_joined_channel(u) elif isinstance(message, Mumble_pb2.ServerSync): pass elif isinstance(message, Mumble_pb2.ServerConfig): if self.connection_lock.locked(): self.connection_lock.release() # We're as connected as possible. if self.home_server: asyncio.Task(self.group_manager.new_group(server=self.home_server)) elif isinstance(message, Mumble_pb2.Ping): pass elif isinstance(message, Mumble_pb2.UserRemove): pass elif isinstance(message, Mumble_pb2.TextMessage): yield from self.handle_text_message(message) elif isinstance(message, Mumble_pb2.ChannelRemove): self.channel_manager.del_channel(message.channel_id) else: warning("Received unknown message type") info(message) @asyncio.coroutine def send_protobuf(self, message): msg_type = Protocol.MESSAGE_ID[message.__class__] msg_data = message.SerializeToString() length = len(msg_data) data = struct.pack(Protocol.PREFIX_FORMAT, msg_type, length) + msg_data self.writer.write(data) @asyncio.coroutine def send_text_message(self, message, dest): m = Mumble_pb2.TextMessage() m.message = message if isinstance(dest, User): m.session.append(dest.session) elif isinstance(dest, Channel): m.channel_id.append(dest.id) yield from self.send_protobuf(m) @asyncio.coroutine def init_ping(self): while True: yield from asyncio.sleep(Protocol.PING_REPEAT_TIME) yield from self.send_protobuf(Mumble_pb2.Ping()) @asyncio.coroutine def connect(self): info("Connecting...") yield from self.connection_lock ssl_context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) # sslcontext.options |= ssl.CERT_NONE self.reader, self.writer = ( yield from asyncio.open_connection(self.host, 64738, server_hostname='', ssl=ssl_context)) version = Mumble_pb2.Version() version.version = Protocol.VERSION_DATA version.release = "%d.%d.%d" % (Protocol.VERSION_MAJOR, Protocol.VERSION_MINOR, Protocol.VERSION_PATCH) version.os = platform.system() version.os_version = "Mumble %s asyncio" % version.release auth = Mumble_pb2.Authenticate() auth.username = self.username self.pinger = asyncio.Task(self.init_ping()) message = Mumble_pb2.UserState() message.self_mute = True message.self_deaf = True yield from self.send_protobuf(version) yield from self.send_protobuf(auth) yield from self.send_protobuf(message) asyncio.Task(self.join_channel(self.channel)) self.connected = True yield from self.read_loop() def die(self): self.connected = False def update_user(self, message): pass @asyncio.coroutine def handle_text_message(self, message): try: actor = self.users.by_session(message.actor) info("Message from {0}: {1}", actor, message.message) m = {} except KeyError: critical("Unknown actor in handle_text_message") return m['origin'] = actor m['private'] = False if len(message.session) > 0: # It's directed as a private message info("Received private") m['destination'] = self.own_user m['private'] = True elif message.channel_id: info("Received channel message") m['destination'] = self.channel_manager.get(message.channel_id[0]) else: info("Received tree message") m['destination'] = None return m m['message'] = message.message try: x = yield from self.command_manager.handle_message(m) except NewBot as e: self.create_bot(*e.args[0]) yield from self.send_text_message("Creating new bot.", m['origin']) return if isinstance(x, str): if m['destination'] == self.get_channel(self.channel): s = m['destination'] else: s = m['origin'] yield from self.send_text_message(x, s) def get_channel(self, name) -> Channel: if name.isdigit(): return self.channel_manager.get(name) else: return self.channel_manager.get_by_name(name) @asyncio.coroutine def join_channel(self, channel): if isinstance(channel, str): channel = self.get_channel(channel) if channel is None: return False if isinstance(channel, Channel): channel = channel.id msg = Mumble_pb2.UserState() msg.channel_id = channel yield from self.send_protobuf(msg) return True def create_bot(self, name, channel, home_server="origin"): print("Creating bot.", name, channel) p = Protocol('mumble.koalabeast.com', name=name, channel=channel) asyncio.Task(p.connect()) @asyncio.coroutine def user_joined_channel(self, u): if self.group_manager.group: l = self.group_manager.group.group_link yield from self.send_text_message("Hi, I'm the PUGBot for this " "channel! The current group " "link is <a href='{}'>{}</a>" "".format(l, l), u) def start_bots(self): with open("bots") as f: bots = f.read() for n, c, s in [x.rstrip().split(",") for x in bots.splitlines()]: asyncio.Task( Protocol("mumble.koalabeast.com", name=n, channel=c, home_server=s).connect()) @asyncio.coroutine def reconnect(self): yield from asyncio.sleep(5) asyncio.Task(self.connect())
class TestUserManager(unittest.TestCase): def setUp(self): self.fake_logger = MagicMock() self.fake_redis = fakeredis.FakeStrictRedis() self.user_manager = UserManager( self.fake_redis, self.fake_logger, ) def test_register_success(self, send_simple_message_patch): fake_registration_token = 'ABCDEF' with patch('uuid.uuid4', return_value=fake_registration_token), \ patch.dict('os.environ', {'DOMAIN_URL': 'http://MY_DOMAIN_URL'}): self.user_manager.register('gabriel', '12345678', '*****@*****.**') self.assertEqual(send_simple_message_patch.call_count, 1) self.assertEqual( send_simple_message_patch.call_args[0], ('*****@*****.**', 'Welcome to Megachess!!', ('<p>Please confirm your email account</p>' '<a href="http://MY_DOMAIN_URL/confirm_registration?token=ABCDEF">CONFIRM YOUR REGISTRATION</a>' )), ) self.assertTrue( self.fake_redis.exists( self.user_manager._registration_id(fake_registration_token))) def test_register_invalid_username(self, send_simple_message_patch): with self.assertRaises(InvalidRegistrationUsername): self.user_manager.register('g aby', 'pass', '*****@*****.**') with self.assertRaises(InvalidRegistrationUsername): self.user_manager.register('gaby1', 'pass', '*****@*****.**') def test_register_invalid_email(self, send_simple_message_patch): with self.assertRaises(InvalidRegistrationEmail): self.user_manager.register('gaby', 'pass', 'gab@') with self.assertRaises(InvalidRegistrationEmail): self.user_manager.register('gaby', 'pass', 'gabexample.com') def test_register_already_exists(self, send_simple_message_patch): fake_registration_token = 'ABCDEFH' self.user_manager._save_user('gabrieltwo', '<fake+pass>', '*****@*****.**') with patch('uuid.uuid4', return_value=fake_registration_token), \ self.assertRaises(UserAlreadyExistsException): self.user_manager.register('gabrieltwo', '12345678', '*****@*****.**') self.assertEqual(send_simple_message_patch.call_count, 0) self.assertFalse( self.fake_redis.exists( self.user_manager._registration_id(fake_registration_token))) def test_confirm_registration_success(self, send_simple_message_patch): fake_registration_token = 'ABCDEFGI' with patch('uuid.uuid4', return_value=fake_registration_token): self.user_manager.register('gabrielthree', '12345678', '*****@*****.**') fake_auth_token = 'wqerqwerqwer' with patch('uuid.uuid4', return_value=fake_auth_token): self.user_manager.confirm_registration(fake_registration_token) self.assertIsNotNone( self.user_manager.get_user_by_username('gabrielthree')) self.assertFalse( self.fake_redis.exists( self.user_manager._registration_id(fake_registration_token))) self.assertEqual(send_simple_message_patch.call_count, 2) self.assertEqual( send_simple_message_patch.call_args[0], ('*****@*****.**', 'Your account in Megachess is confirmed!!!', ('<p>This is your personal auth_token to play</p>' '<p><strong>{}</strong></p>').format(fake_auth_token)), ) def test_confirm_registration_error(self, send_simple_message_patch): with self.assertRaises(InvalidRegistrationToken): self.user_manager.confirm_registration('<fake_registration_token>')
loginmanager.init_app(app) loginmanager.login_view = 'user_login' markup = dict([(klass.NAME, klass) for klass in markup.Markup.__subclasses__() ])[app.config.get('MARKUP')] wiki = Wiki(app.config.get('CONTENT_DIR'), markup) # FIX ME: This monkeypatching is pollution crap . # Should be possible to import them wherever, # Wiki class should be a singleton. app.wiki = wiki app.signals = wiki_signals app.EditorForm = EditorForm app.loginmanager = loginmanager app.manager = manager app.users = UserManager(app.config.get('DATA_DIR'), app) app.check_password = check_password app.make_password = make_password app.jinja_env.globals.update(user_can_edit=user_can_edit) #=============================================================================== # VARIABLE STATIC FILE #=============================================================================== for cs in CUSTOM_STATICS_LIST: csvalue = app.config.get(cs) if csvalue: csbasename = os.path.basename(csvalue) cspath = (csvalue if os.path.isabs(cs) else os.path.join( app.config["WIKI_ROOT"], csvalue))
# app.config['DEBUG'] = options.debug # app.config['CONTENT_DIR'] = options.directory app.config['TITLE'] = 'wiki' # app.config['AUTHENTICATION_METHOD'] = options.authentication_method app.config['AUTHENTICATION_METHOD'] = 'cleartext' app.config['SEARCH_IGNORE_CASE'] = True try: app.config.from_pyfile( os.path.join(app.config.get('CONTENT_DIR'), 'config.py')) except IOError: print("Startup Failure: You need to place a " "config.py in your content directory.") wiki = Wiki(app.config.get('CONTENT_DIR')) users = UserManager(app.config.get('CONTENT_DIR')) users.add_user('admin', 'dev', authentication_method=app.config.get('AUTHENTICATION_METHOD')) loginmanager = LoginManager() loginmanager.init_app(app) loginmanager.login_view = 'user_login' """ Forms ~~~~~ """ # https://stackoverflow.com/questions/13585663/flask-wtfform-flash-does-not-display-errors
def reset_password(): """Request to reset user password""" UserManager(db).reset_password(None, "", "")
def test_get_user(self): user_manager = UserManager() self.assertEqual(11, user_manager.get_user(11).id)
''' Created on 2013-8-14 @author: haojinming ''' from users import UserManager from contacts import contact from bottle import jinja2_template as template, jinja2_view as view, run, route, \ static_file, install, get, put, request, post, delete, template, TEMPLATE_PATH, \ redirect, response import bottle import jsonpickle app = bottle.app() um = UserManager.BLUserManager() def test_register(): result = um.register('*****@*****.**', '12345678') print result def test_activate(): result = um.activate("TuX49hE6vNxfGy58w7nb") print result def test_login(): result = um.login('*****@*****.**', '12345678') print result
class Controller: def __init__(self, redis_pool, app, connected_websockets): self.chess_manager = ChessManager(redis_pool) self.user_manager = UserManager(redis_pool, app) self.tournament_manager = TournamentManager(redis_pool, self.chess_manager) self.board_subscribers = {} self.redis_pool = redis_pool self.app = app self.connected_websockets = connected_websockets async def execute_message(self, client, message): self.app.logger.info('process_message: message: {}'.format(message)) await self.process_message(client, message) async def get_current_username(self, client): auth_token = client.args.get('authtoken') if not auth_token: raise NoTokenException() return await self.user_manager.get_username_by_auth_token(auth_token) async def process_message(self, client, message): method_name, data = await self.parse_message(message) current_username = await self.get_current_username(client) self.app.logger.info('process_message from {}: {} {}'.format( current_username, method_name, data)) method = getattr(self, method_name) try: await method(current_username, client, data) # await self.send(client, 'response_ok', data) except Exception as e: tb = traceback.format_exc() self.app.logger.error('exception {} {}'.format(e, tb)) data = {'exception': str(type(e))} # gevent.spawn( await self.send(client, 'response_error', data) raise e async def parse_message(self, message): try: job = ujson.loads(message) except ValueError: raise InvalidActionFormatException() if 'action' not in job: raise InvalidNoActionException() action_name = job['action'] method_name = 'action_' + str(action_name) if not hasattr(self, method_name): raise InvalidActionNameException() if 'data' not in job: raise InvalidNoDataException() data = job['data'] return method_name, data def valid_auth(self, data): return 'username' in data and 'password' in data async def action_register(self, current_username, client, data): if not self.valid_auth(data): raise InvalidRegisterException() return self.user_manager.register(data['username'], data['password']) async def action_get_connected_users(self, current_username, client, data): self.app.logger.info('action_get_connected {} {}'.format( client, current_username)) data = {'users_list': await self.get_active_users()} await self.send(client, 'update_user_list', data) async def action_login(self, current_username, client, data=None): self.app.logger.info('action_login {} {}'.format( client, current_username)) client.username = current_username client.queue.username = current_username data = {'users_list': await self.get_active_users()} self.app.logger.info('connected users: {}'.format(data)) await self.broadcast('update_user_list', data) return True async def get_active_users(self): return { queue.username for queue in self.connected_websockets if hasattr(queue, 'username') } def get_username_by_client(self, client): for queue in self.connected_websockets: if (hasattr(queue, 'webservice') and queue.webservice == client and hasattr(queue, 'username')): return queue.username async def action_challenge(self, current_username, client, data): challenged_username = data['username'] challenger_username = current_username await self._challenge(challenger_username, challenged_username) async def challenge_with_auth_token(self, auth_token, username, message): challenger_username = await self.user_manager.get_username_by_auth_token( auth_token) return await self._challenge(challenger_username, username) async def _challenge(self, challenger_username, challenged_username): self.app.logger.info('action_challenge {} from {}'.format( challenged_username, challenger_username)) if random.choice([True, False]): white_username = challenger_username black_username = challenged_username else: white_username = challenged_username black_username = challenger_username move_left = 200 board_id = self.chess_manager.challenge( white_username=white_username, black_username=black_username, move_left=move_left, ) data = { 'username': challenger_username, 'board_id': board_id, } await self.broadcast('ask_challenge', data, challenged_username) return True async def broadcast(self, event, data, username=None): for queue in self.connected_websockets: if (not username or (hasattr(queue, 'username') and username == queue.username)): message = { 'event': event, 'data': data, } await queue.put(ujson.dumps(message)) async def action_accept_challenge(self, current_username, client, data): board_id = data['board_id'] await self._start_board(board_id) return True async def _start_board(self, board_id): turn_token, username, actual_turn, board, move_left, opponent_username = self.chess_manager.challenge_accepted( board_id) next_turn_data = { 'board_id': board_id, 'turn_token': turn_token, 'username': username, 'actual_turn': actual_turn, 'board': board, 'move_left': move_left, 'opponent_username': opponent_username, } self.app.logger.info('action_accept_challenge ok'.format( board_id, next_turn_data)) await self.set_next_turn(board_id, next_turn_data) async def action_abort(self, current_username, client, data): board_id = data['board_id'] self.chess_manager.abort(board_id, current_username) await self.send_gameover(board_id) async def action_move(self, current_username, client, data): board_id = data['board_id'] turn_token = data['turn_token'] key = self.get_next_turn_key(board_id, turn_token) self.app.logger.info('action_move control timeout {}'.format(key)) if self.redis_pool.exists(key): self.app.logger.info( 'action_move control timeout OK {}'.format(key)) self.redis_pool.delete(key) else: # timeout... self.app.logger.info( 'action_move control timeout ERROR {}'.format(key)) raise TimeoutException() processed = False try: turn_token, username, actual_turn, board, move_left, opponent_username = self.chess_manager.move_with_turn_token( turn_token=data['turn_token'], from_row=data['from_row'], from_col=data['from_col'], to_row=data['to_row'], to_col=data['to_col'], ) processed = True except GameOverException: await self.send_gameover(board_id) return except Exception as e: tb = traceback.format_exc() try: self.app.logger.error('action_move {} exception {} {}'.format( board_id, e, tb)) await self.force_change_turn(data['board_id'], data['turn_token']) return # turn_token, username, actual_turn, board, move_left = self.chess_manager._next_turn_token(board_id) except GameOverException: await self.send_gameover(board_id) return next_turn_data = { 'board_id': board_id, 'turn_token': turn_token, 'username': username, 'actual_turn': actual_turn, 'board': board, 'move_left': move_left, 'opponent_username': opponent_username, } await self.set_next_turn(board_id, next_turn_data) def get_next_turn_key(self, board_id, turn_token): return "next_turn:{}:{}".format(board_id, turn_token) async def set_next_turn(self, board_id, next_turn_data): self.app.logger.info('set_next_turn {} {}'.format( board_id, next_turn_data)) key = self.get_next_turn_key(board_id, next_turn_data['turn_token']) self.redis_pool.set(key, ujson.dumps(next_turn_data)) await self.enqueue_next_turn(key) # if not self._save_turn(next_turn_data): # raise InvalidSaveTurnException() async def enqueue_next_turn(self, key): self.app.logger.info('enqueue_next_turn {}'.format(key)) # self.redis_pool.rpush("next_turn_queue", key) # self.pool.wait_available() # self.pool.spawn(self.process_next_turn, key) await self.process_next_turn(key) def _save_turn(self, data): try: data_json = ujson.dumps(data) self.redis_pool.set("{0}:{1}".format('turn', data['turn_token']), data_json) return True except Exception: return False async def send_gameover(self, board_id): board = self.chess_manager.get_board_by_id(board_id) if self.tournament_manager.get_tournament_key('') in board_id: self.tournament_manager.board_finish(board_id) data = { 'board': board.board.get_simple(), 'white_username': str(board.white_username), 'black_username': str(board.black_username), 'white_score': str(board.white_score), 'black_score': str(board.black_score), 'board_id': board_id, } await self.broadcast('gameover', data, board.white_username) await self.broadcast('gameover', data, board.black_username) async def force_change_turn(self, board_id, turn_token): self.app.logger.info('force_change_turn {} {}'.format( board_id, turn_token)) try: turn_token, username, actual_turn, board, move_left, opponent_username = self.chess_manager.force_change_turn( board_id, turn_token) except GameOverException: await self.send_gameover(board_id) return next_turn_data = { 'board_id': board_id, 'turn_token': turn_token, 'username': username, 'actual_turn': actual_turn, 'board': board, 'move_left': move_left, 'opponent_username': opponent_username, } self.app.logger.info('force_change_turn set_next_turn {} {}'.format( board_id, turn_token)) await self.set_next_turn(board_id, next_turn_data) async def process_next_turn(self, key): self.app.logger.info('process_next_turn {}'.format(key)) try: # key = self.redis_pool.blpop('next_turn_queue') if not key: self.app.logger.info('Nothing pending to process') return data = ujson.loads(self.redis_pool.get(key)) self.app.logger.info('next_turn key: {} data: {}'.format( key, data)) await self.broadcast('your_turn', data, data['username']) # self.notify_to_board_subscribers(data['board_id']) # control timeout await asyncio.sleep(30) self.app.logger.info('Checking timeout {} {}'.format( data['board_id'], data['turn_token'])) if self.redis_pool.exists(key): self.app.logger.info('Forcing timeout {} {}'.format( data['board_id'], data['turn_token'])) self.redis_pool.delete(key) await self.force_change_turn(data['board_id'], data['turn_token']) except Exception as e: tb = traceback.format_exc() self.app.logger.error( 'process_next_turn {} exception {} {}'.format(key, e, tb)) self.app.logger.info('end process_next_turn {}'.format(key)) def notify_to_board_subscribers(self, board_id): board = self.chess_manager.get_board_by_id(board_id) for board_subscriber_client in self.board_subscribers.get( board_id, []): self.notify_board_update(board_subscriber_client, board) async def notify_board_update(self, board_subscriber_client, board): data = { 'board': board.board.get_simple(), 'white_username': board.white_username, 'black_username': board.black_username, 'white_score': board.white_score, 'black_score': board.black_score, } await self.send(board_subscriber_client, 'update_board', data) async def action_subscribe(self, current_username, client, data): board_id = data['board_id'] board = self.chess_manager.get_board_by_id(board_id) if board_id not in self.board_subscribers: self.board_subscribers[board_id] = [] self.board_subscribers[board_id].append(client) self.notify_board_update(client, board) return True async def send(self, client, event, data): """ Send given data to the registered client. Automatically discards invalid connections. """ try: self.app.logger.info( u'send to client: {}, event: {}, data: {}'.format( client, event, data)) message = { 'event': event, 'data': data, } # print 'sent to {0}: {1}'.format(client, message) await client.send(ujson.dumps(message)) except Exception: pass # app.logger.info(u'Exception on sending to client: {}'.format(client)) # self.clients.remove(client) async def action_create_tournament(self, current_username, client, data): tournament = self.tournament_manager.create_tournament() await self.send(client, 'tournament_created', tournament) return True async def action_add_user_to_tournament(self, current_username, client, data): tournament_id = data['tournament_id'] username = data['username'] if username == '*': active_usernames = await self.get_active_users() for username in active_usernames: self.tournament_manager.add_user(tournament_id, username) else: self.tournament_manager.add_user(tournament_id, username) users = self.tournament_manager.get_users(tournament_id) await self.send(client, 'user_added_to_tournament', users) return True async def action_start_tournament(self, current_username, client, data): tournament_id = data['tournament_id'] tournament = self.tournament_manager.get_tournament(tournament_id) # TODO: control and change state... boards = self.tournament_manager.start(tournament_id) for board_id in boards: asyncio.create_task(self._start_board(board_id)) users = self.tournament_manager.get_users(tournament_id) await self.send(client, 'tournament_started', users) return True
def get_user_manager(self): if self._user_manager is None: self._user_manager = UserManager(self) return self._user_manager
from flask import Flask, jsonify, request from reviews import ReviewManager from users import UserManager rm = ReviewManager() um = UserManager() app = Flask(__name__) @app.route('/users/login', methods=['GET']) def login(): username = request.args.get('username') password = request.args.get('password') return jsonify(user_id=um.login(username, password)) @app.route('/users/register', methods=['POST']) def register(): username = request.form.get('username') password = request.form.get('password') user_id = um.create_account(username, password) return jsonify(user_id=user_id) @app.route('/reviews/save', methods=['POST']) def save_review(): user_id = request.form.get('user_id') review = request.form.get('review') grade = request.form.get('grade') major = request.form.get('major')
text=etc.text["submit"] + str(mark) + '/100' + etc.text["submit_end"], parse_mode='markdown', reply_markup=reply_markup) # comments = Review.getComments(user.place['id'], user.chat_id) # if len(comments) > 0: # message = mark_text(update, comments,0,user) # context.bot.send_message(update.message.chat.id, text=message, parse_mode='markdown', reply_markup=ReplyKeyboardRemove()) UM.delete_user(user.chat_id) if __name__ == "__main__": # Initialized BOT UM = UserManager() updater = Updater(token=environ['bot_token'], use_context=True) dispatcher = updater.dispatcher # Commands dispatcher.add_handler(CommandHandler('start', start_state)) # help command dispatcher.add_handler(CommandHandler('help', help_state)) # place command dispatcher.add_handler(CommandHandler('place', place_state)) # Location handler dispatcher.add_handler(MessageHandler(Filters.location, location_state))
def test_add_users(self): user_manager = UserManager() self.assertEqual('test', user_manager.add_user('test', 'password').get('name'))