Exemple #1
0
async def test_flow():
    """
    Test the flow for 3.6 and up, async generators are
    not supported in 3.5.
    """

    r = RethinkDB()
    r.set_loop_type("asyncio")

    connection = await r.connect(os.getenv("REBIRTHDB_HOST"))

    try:
        await r.db_create(INTEGRATION_TEST_DB).run(connection)
    except ReqlRuntimeError:
        pass

    connection.use(INTEGRATION_TEST_DB)

    await r.table_create("marvel").run(connection)

    marvel_heroes = r.table('marvel')
    await marvel_heroes.insert({
        'id': 1,
        'name': 'Iron Man',
        'first_appearance': 'Tales of Suspense #39'
    }).run(connection)

    cursor = await marvel_heroes.run(connection)
    async for hero in cursor:
        assert hero['name'] == 'Iron Man'

    await connection.close()
Exemple #2
0
def auto(member: Type[Model]):
    """Automatic database and table creation for the given type (Modelchild)."""
    if not issubclass(member, Model) or member is Model:
        return

    rdb = RethinkDB()
    conn = rdb.connect(
        host=db.HOST,
        port=db.PORT,
        db=db.DB_NAME,
        user=db.USER,
        password=db.PASSWORD,
        ssl=db.SSL,
        timeout=db.TIMEOUT,
    )

    tables = rdb.table_list().run(conn)
    if member.tablename not in tables:
        LOG.info("create table %s", member.tablename)
        rdb.table_create(member.tablename).run(conn)
        indexes = member.get_indexes()
        if indexes:
            # TODO: at this time, it's only working with simple index
            for index in indexes:
                rdb.table(member.tablename).index_create(index).run(conn)
                rdb.table(member.tablename).index_wait(index).run(conn)

    conn.close()
Exemple #3
0
class Iface:
    def __init__(self, host="127.0.0.1", port=28015, db="arcos"):
        self.r = RethinkDB()
        self.conn = self.r.connect(host=host, port=port, db=db)

    def clear_table(self, table: str):
        self.r.table(table).delete().run(self.conn)

    def create_table(self, table: str) -> bool:
        try:
            self.r.table_create(table).run(self.conn)
            return True
        except ReqlOpFailedError:
            return False

    def get_by_uid(self, table: str, uid_name: str, uid_value: str) -> object:
        cursor = self.r.table(table).filter(self.r.row[uid_name] == uid_value).run(self.conn)
        for document in cursor:
            return document
        return None

    def insert(self, table: str, uid_name: str, uid_value: str, data: object) -> bool:
        if self.get_by_uid(table, uid_name, uid_value):
            return False
        data[uid_name] = uid_value
        self.r.table(table).insert(data).run(self.conn)
        return True

    def update(self, table: str, uid_name: str, uid_value: str, data: object) -> bool:
        if not self.get_by_uid(table, uid_name, uid_value):
            self.insert(table, uid_name, uid_value, data)
            return True
        self.r.table(table).filter(self.r.row[uid_name] == uid_value).update(data).run(self.conn)
Exemple #4
0
 def connection(self):
     r = RethinkDB()
     try:
         conn = r.connect(host=self.RDB_HOST, port=self.RDB_PORT)
         return conn
     except RqlDriverError as e:
         raise
Exemple #5
0
class BaneSensor:
    def __init__(self, ip='localhost', bane_navn='Rink A'):
        self.bane_navn = bane_navn
        self.r = RethinkDB()
        self.conn = self.r.connect(ip, 28015)

    def oppdater_sensordata(self, temperatur, luftfuktighet, count=0):
        try:
            self.r.db(db_navn).table(status_tabell_navn).filter({
                'bane':
                self.bane_navn
            }).update({
                'luftfuktighet': luftfuktighet,
                'temperatur': temperatur,
            }).run(self.conn)
        except:
            if (count <= 20):
                count += 1
                self.oppdater_sensordata(temperatur, luftfuktighet, count)
            else:
                print(
                    f'{self.bane_navn}: Feilet med å laste opp data til databasen'
                )

        print(f'{self.bane_navn}: Sensordata oppdatert')
Exemple #6
0
    def __init__(self, **kwargs):
        super().__init__(**kwargs)
        self.prod = True if os.environ.get("pm_id") else False
        self.config = cfg

        logging.basicConfig(
            format="[%(levelname)s] - %(message)s",
            level=logging.INFO if os.environ.get("LOG_INFO") else
            (logging.INFO if self.prod else logging.DEBUG))
        self.logger = logging.getLogger("avabot")
        self.session = aiohttp.ClientSession(loop=self.loop)
        self.start_time = int(round(time.time() * 1000))
        self.uptime = lambda: int(round(time.time() * 1000) - self.start_time)
        self.public_dev = False

        # We only need to connect to rethink once...
        self.r = RethinkDB()
        self.r.set_loop_type("asyncio")
        self.db_connect_task = self.loop.create_task(self._db_connect())

        for cog_name in cog_list:
            try:
                self.logger.info(f"Loading {cog_name}")
                self.load_extension(f"ext.{cog_name}")
            except Exception as err:
                self.logger.error(f"Failed to load {cog_name}!! {err}")
Exemple #7
0
class Database:
    def __init__(self, host, port, user, password, db):
        self.host = host
        self.port = port
        self.user = user
        self.password = password
        self.db = db
        self.r = RethinkDB()
        self.conn = None

    def query_results(self, query):
        """
        queries the database for games matching the given string
        """
        results = []
        if query == "":
            return results

        # query for products
        self.connect()

        cursor = self.r.table('products').filter(
            (self.r.row["app_name"].match("(?i)" + query))
            & (self.r.row["is_dlc"] == False)).limit(20).run(self.conn)

        # build result array
        for item in cursor:
            if item is not None:
                results.append(item)

        # close the database connection
        self.teardown()

        return results

    def connect(self):
        """
        opens a new connection to the database
        """
        try:
            # connect to the database
            self.conn = self.r.connect(
                host=self.host,
                port=self.port,
                db=self.db,
                user=self.user,
                password=self.password,
            )
        except RqlDriverError:
            abort(503, "No database connection could be established.")

    def teardown(self):
        """
        closes an open connection to the database
        """
        try:
            self.conn.close()
        except AttributeError:
            pass
Exemple #8
0
    def __init__(self):

        self.dbname = "DB"
        self.r = RethinkDB()
        self.connection = self.r.connect("127.0.0.1", "28015").repl()
        self.connection.use(self.dbname)

        self._init_db()
Exemple #9
0
 def __init__(self, host, port, user, password, db):
     self.host = host
     self.port = port
     self.user = user
     self.password = password
     self.db = db
     self.r = RethinkDB()
     self.conn = None
def main():
    argument_spec = dict(
        host=dict(required=True),
        port=dict(required=False, type=int, default=28015),
        user=dict(required=False, default="admin"),
        password=dict(required=False, default="", no_log=True),
        ssl=dict(required=False, type=dict, default=None),
        table=dict(
            required=False,
            choices=[
                "table_config",
                "server_config",
                "db_config",
                "cluster_config",
                "table_status",
                "server_status",
                "current_issues",
                "users",
                "permissions",
                "jobs",
                "stats",
                "logs",
            ],
            default="server_status",
        ),
        limit=dict(required=False, type=int, default=10),
    )

    module = AnsibleModule(
        argument_spec=argument_spec,
    )

    client = RethinkDB()
    _params = {
        "host": module.params["host"],
        "port": module.params["port"],
        "user": module.params["user"],
        "password": module.params["password"],
        "ssl": module.params["ssl"],
        "db": "rethinkdb",
    }
    __res = []

    try:
        conn = client.connect(**_params)
        _res = (
            client.table(module.params["table"]).limit(module.params["limit"]).run(conn)
        )
        while True:
            try:
                __res.append(_res.next())
            except DefaultCursorEmpty:
                break
        module.exit_json(result=__res)
    except (ReqlAuthError, ReqlOpFailedError) as e:
        module.fail_json(msg=e.message)
    finally:
        conn.close(noreply_wait=False)
Exemple #11
0
            async def __aenter__(myself):
                r = RethinkDB()
                r.set_loop_type('asyncio')
                if not hasattr(myself, 'conn') or not myself.conn or not myself.conn.is_open():

                    myself.conn = await r.connect(self.host, self.port, self.db, user=self.user, password=self.password)
                    self.log.debug(f"Connecting using connection: {myself} (AsyncIO)")

                if not myself.conn.is_open():
                    raise Exception("Cannot open a new rethinkdb connection...")

                return myself.conn
Exemple #12
0
    def __init__(self, host: str = '127.0.0.1', port: int = 28015) -> None:
        self.log = logging
        self.format = '%(asctime)s.%(msecs)d | \
%(levelname)s | %(module)s.%(funcName)s:%(lineno)d %(message)s'

        self.log.basicConfig(level=logging.DEBUG,
                             format=self.format,
                             datefmt='%Y-%m-%d %H:%M:%S')
        self.db = RethinkDB()
        self._host = host
        self._port = port
        self.conn = None
    def test_database_creation(self):
        """Check, if :meth:rethinkmodel.manage.check_db() creates the database."""
        db_name = "test_creation"
        config(dbname=db_name)
        check_db()

        rdb = RethinkDB()
        conn = rdb.connect()
        dbs = rdb.db_list().run(conn)
        if db_name not in dbs:
            self.fail("Database named {db_name} was not created")
        conn.close()
Exemple #14
0
    def _init_db_connection(self):
        self.rdb = RethinkDB()

        # Create DB connection
        try:
            self.connection = self.rdb.connect(
                host=self.config["data_base"]['host'],
                port=self.config["data_base"]['port'],
                db=self.config["data_base"]['db_name'])
        except RqlDriverError:
            self.logger.exception(
                "No database connection could be established.")
            exit(1)
Exemple #15
0
async def test_tornado_connect(io_loop):
    """
    Test the flow for 3.6 and up, async generators are
    not supported in 3.5.
    """

    r = RethinkDB()
    r.set_loop_type("tornado")

    connection = await r.connect(os.getenv("REBIRTHDB_HOST"))
    dbs = await r.db_list().run(connection)
    assert isinstance(dbs, list)
    await connection.close()
Exemple #16
0
    def __init__(self):
        self.dbname = APP_CONFIG['DB']['DB_NAME']
        self.tbmovie = 'movies'

        self.r = RethinkDB()
        try:
            self.conn = self.r.connect(host=APP_CONFIG['DB']['DB_HOST'],
                                       db=APP_CONFIG['DB']['DB_NAME'],
                                       user=APP_CONFIG['DB']['DB_USER'],
                                       password=APP_CONFIG['DB']['DB_PASS'],
                                       port=APP_CONFIG['DB']['DB_PORT'])
            self.create_db()
        except Exception as ex:
            print('DB_ERROR', ex)
Exemple #17
0
            def __enter__(myself):
                r = RethinkDB()
                if not hasattr(myself, 'conn') or not myself.conn  or not myself.conn.is_open():

                    myself.conn = r.connect(self.host, self.port, self.db, user=self.user, password=self.password)
                    self.log.debug(f"Connecting using connection: {myself}")

                if not myself.conn.is_open():
                    raise Exception("Cannot open a new rethinkdb connection...")

                self.log.debug(f"Repl-ing connection: {myself}")
                myself.conn.repl()

                return myself.conn
Exemple #18
0
def test_flow_couroutine_paradigm():

    r = RethinkDB()
    r.set_loop_type("asyncio")

    connection = yield from r.connect(os.getenv("REBIRTHDB_HOST"))

    try:
        yield from r.db_create(INTEGRATION_TEST_DB).run(connection)
    except ReqlRuntimeError:
        pass

    connection.use(INTEGRATION_TEST_DB)

    yield from r.table_create("marvel").run(connection)

    marvel_heroes = r.table('marvel')
    yield from marvel_heroes.insert({
        'id': 1,
        'name': 'Iron Man',
        'first_appearance': 'Tales of Suspense #39'
    }).run(connection)

    cursor = yield from marvel_heroes.run(connection)

    while (yield from cursor.fetch_next()):
        hero = yield from cursor.__anext__()
        assert hero['name'] == 'Iron Man'

    yield from connection.close()
Exemple #19
0
class PlatformDB(AsyncObject):
    '''
    Platform database module
    '''
    async def __init__(self, config={}):
        # Fetch configuration from Redis server
        host = config.get('host', 'localhost')
        port = config.get('port', 28015)
        db_name = config.get('db_name', 'platform')

        # Connect to RethinkDB
        self.db = RethinkDB()
        self.db.set_loop_type(library='asyncio')
        self.conn = await self.db.connect(host=host,
                                          port=port,
                                          db=db_name)

        # Videos table
        self.videos = self.db.table('videos')

    ###########################################################################
    # Insert a video
    ###########################################################################
    async def insert_video(self, data):
        return await self.videos.insert(data).run(self.conn)

    ###########################################################################
    # Get vidoes - ordered by time
    ###########################################################################
    async def get_videos(self, limit):
        return await self.videos.order_by(self.db.desc('timestamp'))\
            .limit(limit).run(self.conn)

    ###########################################################################
    # Get count of stored videos
    ###########################################################################
    async def get_videos_count(self):
        return await self.videos.count().run(self.conn)

    ###########################################################################
    # Search videos
    ###########################################################################
    async def search_videos(self, query_string, limit):
        query_string = '.*%s.*' % query_string.strip().lower()
        return await self.videos.filter(
            lambda video:
                video['title'].downcase().match(query_string) |
                video['description'].downcase().match(query_string))\
            .limit(limit).order_by(self.db.desc('timestamp')).run(self.conn)
Exemple #20
0
    async def __init__(self, config={}):
        # Fetch configuration from Redis server
        host = config.get('host', 'localhost')
        port = config.get('port', 28015)
        db_name = config.get('db_name', 'platform')

        # Connect to RethinkDB
        self.db = RethinkDB()
        self.db.set_loop_type(library='asyncio')
        self.conn = await self.db.connect(host=host,
                                          port=port,
                                          db=db_name)

        # Videos table
        self.videos = self.db.table('videos')
Exemple #21
0
    def mutate(root, info, id, vms=None, variables=None):
        ctx : ContextProtocol = info.context
        r = RethinkDB()
        table = r.db(opts.database).table(PlaybookLoader.PLAYBOOK_TABLE_NAME)
        data = table.get(id).pluck('id').coerce_to('array').run()
        if not data:
            raise ValueError(f"No such playbook: {id}")
        task_id = str(uuid.uuid4())
        task_list = PlaybookTaskList()
        task_list.upsert_task(ctx.user_authenticator, PlaybookTask(
            id=task_id, playbook_id=id, state=PlaybookTaskState.Preparing, message=""))
        tornado.ioloop.IOLoop.current().run_in_executor(ctx.executor,
                                                        lambda: launch_playbook(ctx, task_id, id, vms, variables))

        return PlaybookLaunchMutation(task_id=task_id)
Exemple #22
0
async def db_pool(nursery):
    r = RethinkDB()
    r.set_loop_type('trio')
    db_config = get_config()['database']
    db_pool = r.ConnectionPool(host=db_config['host'],
                               port=db_config['port'],
                               db='integration_testing',
                               user=db_config['super_user'],
                               password=db_config['super_password'],
                               nursery=nursery)
    async with db_pool.connection() as conn:
        await r.db_create('integration_testing').run(conn)
    yield db_pool
    async with db_pool.connection() as conn:
        await r.db_drop('integration_testing').run(conn)
    await db_pool.close()
Exemple #23
0
    def _db_pool(self, nursery):
        '''
        Create a database connectoin pool.

        :param nursery: A Trio nursery to spawn database connections in.
        :returns: A RethinkDB connection pool.
        '''
        r = RethinkDB()
        r.set_loop_type('trio')
        db_config = self._config['database']
        return r.ConnectionPool(host=db_config['host'],
                                port=db_config['port'],
                                db=db_config['db'],
                                user=db_config['user'],
                                password=db_config['password'],
                                nursery=nursery)
Exemple #24
0
class Database(object):
    def __init__(self, url):
        self.r = RethinkDB()
        self.conn = self.r.connect(url, 28015)

    def client(self):
        return self.r

    def get_table(self, tbl):
        return self.r.db("olympus").table(tbl).run(self.conn)

    def get(self, tbl, key):
        return self.r.db("olympus").table(tbl).get(key).run(self.conn)

    def insert(self, tbl, obj):
        self.r.db("olympus").table(tbl).insert(obj, conflict="replace").run(
            self.conn)
Exemple #25
0
def connect() -> Tuple[RethinkDB, Any]:
    """Return a RethinkDB object + connection.

    You will usually not need to call this function. Rethink:Model use
    this function to internally open and close database connection.
    """
    rdb = RethinkDB()
    connection = rdb.connect(
        host=HOST,
        port=PORT,
        db=DB_NAME,
        user=USER,
        password=PASSWORD,
        timeout=TIMEOUT,
        ssl=SSL,
    )
    return rdb, connection
Exemple #26
0
    def __init__(self, database='apscheduler', table='jobs', client=None,
                 pickle_protocol=pickle.HIGHEST_PROTOCOL, **connect_args):
        super(RethinkDBJobStore, self).__init__()

        if not database:
            raise ValueError('The "database" parameter must not be empty')
        if not table:
            raise ValueError('The "table" parameter must not be empty')

        self.database = database
        self.table_name = table
        self.table = None
        self.client = client
        self.pickle_protocol = pickle_protocol
        self.connect_args = connect_args
        self.r = RethinkDB()
        self.conn = None
Exemple #27
0
class DB():
    def __init__(self):
        self.dbname = APP_CONFIG['DB']['DB_NAME']
        self.tbmovie = 'movies'

        self.r = RethinkDB()
        try:
            self.conn = self.r.connect(host=APP_CONFIG['DB']['DB_HOST'],
                                       db=APP_CONFIG['DB']['DB_NAME'],
                                       user=APP_CONFIG['DB']['DB_USER'],
                                       password=APP_CONFIG['DB']['DB_PASS'],
                                       port=APP_CONFIG['DB']['DB_PORT'])
            self.create_db()
        except Exception as ex:
            print('DB_ERROR', ex)

    def create_db(self):
        if not self.r.db_list().contains(self.dbname).run(self.conn):
            self.r.db_create(self.dbname).run(self.conn)

    def create_table(self):
        if not self.r.db(self.dbname).table_list().contains(self.tbmovie).run(
                self.conn):
            self.r.db(self.dbname).table_create(self.tbmovie).run(self.conn)

    def get_conn(self):
        return self.conn

    def get_r(self):
        return self.r
Exemple #28
0
def execute_query(module):
    results = list()

    try:
        r = RethinkDB()
        r.connect(host=module.params.get('host'),
                  port=module.params.get('port'),
                  user=module.params.get('user'),
                  password=module.params.get('password')).repl()
        query_result = eval("r.{0}.run()".format(module.params.get('query')))
        if type(query_result) is dict:
            results.append(query_result)
        else:
            for document in query_result:
                results.append(document)
        return results
    except Exception as e:
        module.fail_json(msg="Error: {0}".format(e))
Exemple #29
0
def main():
    argument_spec = dict(
        host=dict(required=True),
        port=dict(required=False, type=int, default=28015),
        user=dict(required=False, default="admin"),
        password=dict(required=False, default="", no_log=True),
        ssl=dict(required=False, type=dict, default=None),
        state=dict(required=False,
                   choices=["present", "absent"],
                   default="present"),
        database=dict(required=True),
    )

    module = AnsibleModule(argument_spec=argument_spec, )

    client = RethinkDB()
    _params = {
        "host": module.params["host"],
        "port": module.params["port"],
        "user": module.params["user"],
        "password": module.params["password"],
        "ssl": module.params["ssl"],
    }

    try:
        conn = client.connect(**_params)
        if module.params["state"].lower() == "present":
            _res = client.db_create(module.params["database"]).run(conn)
        else:
            _res = client.db_drop(module.params["database"]).run(conn)
        module.exit_json(changed=True, result=_res)
    except ReqlOpFailedError as e:
        if module.params["state"].lower(
        ) == "present" and "already exists" in e.message:
            module.exit_json(changed=False, result=e.message)
        elif module.params[
                "state"] == "absent" and "does not exist" in e.message:
            module.exit_json(changed=False, result=e.message)
        else:
            module.fail_json(msg=e.message)
    except ReqlAuthError as e:
        module.fail_json(msg=e.message)
    finally:
        conn.close(noreply_wait=False)
Exemple #30
0
def setup_database():
    r = RethinkDB()
    r.connect(host='localhost', port=28015).repl()

    try:
        # Create databases
        r.db_create('platform').run()
        # Create tables
        r.db('platform').table_create('videos', primary_key='video_id').run()
    except Exception as e:
        print('Database is already setup')
    else:
        print('Database setup successful')
Exemple #31
0
class Persistence():
    def __init__(self):
        self.r = RethinkDB()
        self.conn = self.r.connect("localhost", 28015)

    def insert(self, data):
        # add timestamp
        mydata = copy.deepcopy(data)
        mydata["time"] = int(time.time())
        self.last = mydata["time"]
        try:
            self.r.db('makers-covid').table('global').insert(mydata).run(
                self.conn)
        except RqlRuntimeError as err:
            print(err.message)

    def getLast(self):
        res = self.r.db('makers-covid').table('global').filter(
            self.r.row['time'] >= self.last).run(self.conn)
        return res.next()
class RethinkDbDatabaseConnector(DatabaseConnector):
    def __init__(self):
        self.r = RethinkDB()
        self.conn = None

    def disconnect(self):
        self.conn.close()

    def connect(self, hostname, port, database):
        self.conn = self.r.connect(hostname, port, database)
        return self.r, self.conn
Exemple #33
0
class Rethinkdb:
    def __init__(self):
        self.__r = RethinkDB()
        self.__connector = self.__r.connect(config.host, config.port, db=config.db, user=config.user, password=config.password).repl()

    async def closeDB(self):
        if self.__connector:
            self.__connector.close()
    
    def getR(self):
        return self.__r
Exemple #34
0
def launch_playbook(ctx: ContextProtocol, task_id, playbook_id, vms: Optional[List], variables: Optional[Dict[str, Any]]):
    with ReDBConnection().get_connection():
        r = RethinkDB()
        class LaunchPlaybook(Loggable):
            """
            This class is used to provide logger for launch_playbook
            """
            task_list : PlaybookTaskList

            def __init__(self):
                self.task_list = PlaybookTaskList()
                self.init_log()

            def __repr__(self):
                return f'PlaybookLauncher <{task_id} ({playbook_id})>'
        launcher = LaunchPlaybook()
        log = launcher.log
        yaml = ruamel.yaml.YAML()


        log.debug("Checking access rights for VMs")
        def check_access(uuid):
            _vm = VM(ctx.user_authenticator, uuid=uuid)
            try:
                _vm.check_access("playbook")
            except XenAdapterUnauthorizedActionException:
                launcher.task_list.upsert_task(ctx.user_authenticator, PlaybookTask(
                    id=task_id, playbook_id=playbook_id, state=PlaybookTaskState.Error,
                    message=f"VM {_vm}: Access denied (for playbook launcher). Needs 'playbook' access"))
        if vms:
            for uuid in vms:
                check_access(uuid)
        else:
            vms = []
        try:
            table = r.db(opts.database).table(PlaybookLoader.PLAYBOOK_TABLE_NAME)
            playbook = table.get(playbook_id).run()

            temp_dir = tempfile.mkdtemp(prefix='vmemperor-playbook', suffix=playbook_id)
            log.debug(f"Creating temporary directory {temp_dir}")
            from distutils.dir_util import copy_tree
            playbook_dir = playbook['playbook_dir']
            log.debug(f"Copying {playbook_dir} into temporary directory")
            copy_tree(playbook_dir, temp_dir)
            temp_path = Path(temp_dir)
            vms_table = r.db(opts.database).table('vms')
            documents = vms_table.get_all(*vms).coerce_to('array').run()

            if not playbook['inventory']:
                hosts_file = 'hosts'
                yaml_hosts = {'all': {'hosts': {}}}
                for vm in documents:
                    for interface in vm['interfaces'].values():
                        network = Network(ctx.user_authenticator, ref=interface['network'])
                        if network.uuid not in opts.ansible_networks:
                            log.debug(f"{network} is not a network configured for Ansible. This is probably okay")
                            continue
                        if not 'ip' in interface or not interface['ip']:
                            log.warning(f"Could not get an IP to connect to VM {vm['uuid']}: {network}. This is probably not okay, install Xen drivers")
                            continue
                        yaml_hosts['all']['hosts'][vm['name_label']] = {
                            'ansible_user': '******',
                            'ansible_host': interface['ip']
                        }
                        break
                    else:
                        log.warning(
                            f"Ignoring VM {vm['uuid']}: not connected to any of 'ansible_networks'. Check your configuration")
                        launcher.task_list.upsert_task(ctx.user_authenticator, PlaybookTask(
                            id=task_id, playbook_id=playbook_id, state=PlaybookTaskState.ConfigurationWarning,
                            message=f"Could not connect to VM {vm['uuid']}: Not connected to Ansible network")
                        )

                if yaml_hosts['all']['hosts']:
                    # Create ansible execution task
                    with open(temp_path.joinpath(hosts_file), 'w') as file:
                        yaml.dump(yaml_hosts, file)
                        log.debug(f"Hosts file created at {file.name}")
                else:
                    log.error(f"No suitable VMs found")
                    shutil.rmtree(temp_dir, ignore_errors=True)
                    return
            else:
                hosts_file = playbook['inventory']

            log.debug("Patching variables files...")
            for location in playbook['variables_locations']:
                this_variables = {}
                for variable in playbook['variables_locations'][location]:
                    value = variables.get(variable, playbook['variables'][variable]['value'])
                    this_variables[variable] = value
                file_name = temp_path.joinpath(location, 'all')
                if this_variables:
                    log.debug(f"Loading file {file_name}")
                    with open(file_name, 'r') as file:
                        original_variables = yaml.load(file)
                    with open(file_name, 'w') as file:
                        yaml.dump({**original_variables, **this_variables}, file)

                    log.info(f'File {file_name} patched')

            cmd_line = [opts.ansible_playbook, '-i', hosts_file, playbook['playbook']]
            cwd = temp_path

            log_path = Path(opts.ansible_logs).joinpath(cwd.name)
            os.makedirs(log_path)
            with open(log_path.joinpath('stdout'), 'w') as _stdout:
                with open(log_path.joinpath('stderr'), 'w') as _stderr:

                    log.debug(f"Running {cmd_line} in {cwd}. Log path: {log_path}")
                    launcher.task_list.upsert_task(ctx.user_authenticator, PlaybookTask(id=task_id, playbook_id=playbook_id,
                                                                                        state=PlaybookTaskState.Running,
                                                                                        message=f"Task is currently running"))
                    proc = subprocess.run(cmd_line,
                                             cwd=cwd, stdout=_stdout, stderr=_stderr,
                                             env={"ANSIBLE_HOST_KEY_CHECKING": "False"})

                    return_code = proc.returncode
                    launcher.task_list.upsert_task(ctx.user_authenticator,
                                                   PlaybookTask(id=task_id, playbook_id=playbook_id,
                                                                state=PlaybookTaskState.Finished if return_code == 0 else PlaybookTaskState.Error,
                                                                message=f"Task is finished with exit code {return_code}"))

            log.info(f'Finished with return code {return_code}. Logs are available in {log_path}')
        except Exception as e:
            excString = str(e).replace('\n', ' ')
            launcher.task_list.upsert_task(ctx.user_authenticator,
                                           PlaybookTask(id=task_id, playbook_id=playbook_id, state=PlaybookTaskState.Error,
                                                        message=f"Exception: {excString}"))
            log.error(f"Exception: {excString}")
Exemple #35
0
    def __init__(self, playbook_name):
        from tornado.options import options as opts
        self.init_log()

        playbook_dir = Path(playbook_name)
        playbook_name = playbook_dir.name
        self.log.debug(f"Loading playbook {playbook_name} from {playbook_dir}")

        from rethinkdb import RethinkDB
        r = RethinkDB()

        try:
            if not playbook_dir.is_dir():
                raise ValueError(f'"{playbook_dir.absolute()}" does not exist or not a directory!')

            config_file = playbook_dir.joinpath(self._PLAYBOOK_VMEMPEROR_CONF)
            if not config_file.is_file():
                raise ValueError(f'"{config_file.absolute()}" does not exist or not a file!')

            with open(config_file) as file:
                config_dict = yaml.safe_load(file)

                self.config = config_dict

            # Fill optional config parameters
            for k,v  in self._DEFAULT_CONFIG.items():
                if k not in self.config:
                    self.config[k] = v


            # Find variables
            keys = self.config['variables'].keys()
            self.variables_locations = {}
            self.vars = {}
            for var in ('host_vars', 'group_vars'):
                self.variables_locations[var] = []
                host_vars_file = playbook_dir.joinpath(var, 'all')
                if not host_vars_file.is_file():
                    continue
                with open(host_vars_file) as file:
                    host_vars = yaml.safe_load(file)

                self.vars[var] = host_vars
                for key in keys:
                    try:
                        self.config['variables'][key] = {**self.config['variables'][key],
                        **{'value': host_vars[key]}}
                        self.variables_locations[var].append(key)
                    except KeyError:
                        continue

            # Check if playbook file exists
            playbook_file = playbook_dir.joinpath(self.config['playbook'])
            if not playbook_file.is_file():
                raise ValueError(f"Playbook file {playbook_file.absolute()} does not exist")


            self.config['playbook_dir'] = str(playbook_dir.absolute())
            self.config['id'] = playbook_dir.name
            self.config['variables_locations'] = self.variables_locations

            table = r.db(opts.database).table(self.PLAYBOOK_TABLE_NAME)
            table.insert(self.config).run()
            self.log.debug(f"Loaded playbook {self.config['id']}")
        except Exception as e:
            self.log.error(f"Exception: {e} at {traceback.print_exc()}")
Exemple #36
0
class RethinkDBJobStore(BaseJobStore):
    """
    Stores jobs in a RethinkDB database. Any leftover keyword arguments are directly passed to
    rethinkdb's `RethinkdbClient <http://www.rethinkdb.com/api/#connect>`_.

    Plugin alias: ``rethinkdb``

    :param str database: database to store jobs in
    :param str collection: collection to store jobs in
    :param client: a :class:`rethinkdb.net.Connection` instance to use instead of providing
        connection arguments
    :param int pickle_protocol: pickle protocol level to use (for serialization), defaults to the
        highest available
    """

    def __init__(self, database='apscheduler', table='jobs', client=None,
                 pickle_protocol=pickle.HIGHEST_PROTOCOL, **connect_args):
        super(RethinkDBJobStore, self).__init__()

        if not database:
            raise ValueError('The "database" parameter must not be empty')
        if not table:
            raise ValueError('The "table" parameter must not be empty')

        self.database = database
        self.table_name = table
        self.table = None
        self.client = client
        self.pickle_protocol = pickle_protocol
        self.connect_args = connect_args
        self.r = RethinkDB()
        self.conn = None

    def start(self, scheduler, alias):
        super(RethinkDBJobStore, self).start(scheduler, alias)

        if self.client:
            self.conn = maybe_ref(self.client)
        else:
            self.conn = self.r.connect(db=self.database, **self.connect_args)

        if self.database not in self.r.db_list().run(self.conn):
            self.r.db_create(self.database).run(self.conn)

        if self.table_name not in self.r.table_list().run(self.conn):
            self.r.table_create(self.table_name).run(self.conn)

        if 'next_run_time' not in self.r.table(self.table_name).index_list().run(self.conn):
            self.r.table(self.table_name).index_create('next_run_time').run(self.conn)

        self.table = self.r.db(self.database).table(self.table_name)

    def lookup_job(self, job_id):
        results = list(self.table.get_all(job_id).pluck('job_state').run(self.conn))
        return self._reconstitute_job(results[0]['job_state']) if results else None

    def get_due_jobs(self, now):
        return self._get_jobs(self.r.row['next_run_time'] <= datetime_to_utc_timestamp(now))

    def get_next_run_time(self):
        results = list(
            self.table
            .filter(self.r.row['next_run_time'] != None)  # noqa
            .order_by(self.r.asc('next_run_time'))
            .map(lambda x: x['next_run_time'])
            .limit(1)
            .run(self.conn)
        )
        return utc_timestamp_to_datetime(results[0]) if results else None

    def get_all_jobs(self):
        jobs = self._get_jobs()
        self._fix_paused_jobs_sorting(jobs)
        return jobs

    def add_job(self, job):
        job_dict = {
            'id': job.id,
            'next_run_time': datetime_to_utc_timestamp(job.next_run_time),
            'job_state': self.r.binary(pickle.dumps(job.__getstate__(), self.pickle_protocol))
        }
        results = self.table.insert(job_dict).run(self.conn)
        if results['errors'] > 0:
            raise ConflictingIdError(job.id)

    def update_job(self, job):
        changes = {
            'next_run_time': datetime_to_utc_timestamp(job.next_run_time),
            'job_state': self.r.binary(pickle.dumps(job.__getstate__(), self.pickle_protocol))
        }
        results = self.table.get_all(job.id).update(changes).run(self.conn)
        skipped = False in map(lambda x: results[x] == 0, results.keys())
        if results['skipped'] > 0 or results['errors'] > 0 or not skipped:
            raise JobLookupError(job.id)

    def remove_job(self, job_id):
        results = self.table.get_all(job_id).delete().run(self.conn)
        if results['deleted'] + results['skipped'] != 1:
            raise JobLookupError(job_id)

    def remove_all_jobs(self):
        self.table.delete().run(self.conn)

    def shutdown(self):
        self.conn.close()

    def _reconstitute_job(self, job_state):
        job_state = pickle.loads(job_state)
        job = Job.__new__(Job)
        job.__setstate__(job_state)
        job._scheduler = self._scheduler
        job._jobstore_alias = self._alias
        return job

    def _get_jobs(self, predicate=None):
        jobs = []
        failed_job_ids = []
        query = (self.table.filter(self.r.row['next_run_time'] != None).filter(predicate)  # noqa
                 if predicate else self.table)
        query = query.order_by('next_run_time', 'id').pluck('id', 'job_state')

        for document in query.run(self.conn):
            try:
                jobs.append(self._reconstitute_job(document['job_state']))
            except Exception:
                self._logger.exception('Unable to restore job "%s" -- removing it', document['id'])
                failed_job_ids.append(document['id'])

        # Remove all the jobs we failed to restore
        if failed_job_ids:
            self.r.expr(failed_job_ids).for_each(
                lambda job_id: self.table.get_all(job_id).delete()).run(self.conn)

        return jobs

    def __repr__(self):
        connection = self.conn
        return '<%s (connection=%s)>' % (self.__class__.__name__, connection)