예제 #1
0
def mset(data):
    with NamedTemporaryFile('w') as tmp:
        utils.show_data(data)
        json.dump(data, tmp)
        tmp.flush()
        sh(f'{const.CLI} http post --quiet {utils.datastore_url()}/mset -f {tmp.name}'
           )
예제 #2
0
def mset(data):
    with NamedTemporaryFile('w') as tmp:
        utils.show_data(data)
        json.dump(data, tmp)
        tmp.flush()
        sh('{} http post --quiet {}/mset -f {}'.format(const.CLI,
                                                       utils.datastore_url(),
                                                       tmp.name))
예제 #3
0
def test_show_data(mocker):
    m_opts = mocker.patch(TESTED + '.ctx_opts').return_value
    m_secho = mocker.patch(TESTED + '.click.secho')

    utils.show_data('text')
    utils.show_data({'obj': True})
    assert m_secho.call_count == 2
    m_secho.assert_called_with(json.dumps({'obj': True}),
                               fg='blue',
                               color=m_opts.color)

    m_secho.reset_mock()
    m_opts.dry_run = False
    m_opts.verbose = False

    utils.show_data('text')
    assert m_secho.call_count == 0
예제 #4
0
def load(archive,
         load_env,
         load_compose,
         load_datastore,
         load_spark,
         load_node_red,
         update):
    """Load and apply Brewblox settings backup.

    This function uses files generated by `brewblox-ctl backup save` as input.
    You can use the --load-XXXX options to partially load the backup.

    This does not attempt to merge data: it will overwrite current docker-compose.yml,
    datastore entries, and Spark blocks.

    Blocks on Spark services not in the backup file will not be affected.

    If dry-run is enabled, it will echo all configuration from the backup archive.

    Steps:
        - Write .env
        - Read .env values
        - Write docker-compose.yml, run `docker-compose up`.
        - Write all datastore files found in backup.
        - Write all Spark blocks found in backup.
        - Write Node-RED config files found in backup.
        - Run brewblox-ctl update
    """
    utils.check_config()
    utils.confirm_mode()
    urllib3.disable_warnings()

    sudo = utils.optsudo()
    host_url = utils.host_url()
    store_url = utils.datastore_url()

    zipf = zipfile.ZipFile(archive, 'r', zipfile.ZIP_DEFLATED)
    available = zipf.namelist()
    redis_file = 'global.redis.json'
    couchdb_files = [v for v in available if v.endswith('.datastore.json')]
    spark_files = [v for v in available if v.endswith('.spark.json')]
    node_red_files = [v for v in available if v.startswith('node-red/')]

    if load_env and '.env' in available:
        utils.info('Loading .env file')
        with NamedTemporaryFile('w') as tmp:
            data = zipf.read('.env').decode()
            utils.info('Writing .env')
            utils.show_data(data)
            tmp.write(data)
            tmp.flush()
            sh('cp -f {} .env'.format(tmp.name))

        utils.info('Reading .env values')
        load_dotenv(path.abspath('.env'))

    if load_compose:
        if 'docker-compose.yml' in available:
            utils.info('Loading docker-compose.yml')
            config = yaml.safe_load(zipf.read('docker-compose.yml'))
            # Older services may still depend on the `datastore` service
            # The `depends_on` config is useless anyway in a brewblox system
            for svc in config['services'].values():
                with suppress(KeyError):
                    del svc['depends_on']
            utils.write_compose(config)
            sh('{} docker-compose up -d --remove-orphans'.format(sudo))
        else:
            utils.info('docker-compose.yml file not found in backup archive')

    if load_datastore:
        if redis_file in available or couchdb_files:
            utils.info('Waiting for the datastore...')
            sh('{} http wait {}/ping'.format(const.CLI, store_url))
            # Wipe UI/Automation, but leave Spark files
            mdelete_cmd = '{} http post {}/mdelete --quiet -d \'{{"namespace":"{}", "filter":"*"}}\''
            sh(mdelete_cmd.format(const.CLI, store_url, 'brewblox-ui-store'))
            sh(mdelete_cmd.format(const.CLI, store_url, 'brewblox-automation'))
        else:
            utils.info('No datastore files found in backup archive')

        if redis_file in available:
            data = json.loads(zipf.read(redis_file).decode())
            utils.info('Loading {} entries from Redis datastore'.format(len(data['values'])))
            mset(data)

        # Backwards compatibility for UI/automation files from CouchDB
        # The IDs here are formatted as {moduleId}__{objId}
        # The module ID becomes part of the Redis namespace
        for db in ['brewblox-ui-store', 'brewblox-automation']:
            fname = '{}.datastore.json'.format(db)
            if fname not in available:
                continue
            docs = json.loads(zipf.read(fname).decode())
            # Drop invalid names (not prefixed with module ID)
            docs[:] = [d for d in docs if len(d['_id'].split('__', 1)) == 2]
            # Add namespace / ID fields
            for d in docs:
                segments = d['_id'].split('__', 1)
                d['namespace'] = '{}:{}'.format(db, segments[0])
                d['id'] = segments[1]
                del d['_id']
            utils.info('Loading {} entries from database `{}`'.format(len(docs), db))
            mset({'values': docs})

        # Backwards compatibility for Spark service files
        # There is no module ID field here
        spark_db = 'spark-service'
        spark_fname = '{}.datastore.json'.format(spark_db)
        if spark_fname in available:
            docs = json.loads(zipf.read(spark_fname).decode())
            for d in docs:
                d['namespace'] = spark_db
                d['id'] = d['_id']
                del d['_id']
            utils.info('Loading {} entries from database `{}`'.format(len(docs), spark_db))
            mset({'values': docs})

    if load_spark:
        sudo = utils.optsudo()

        if not spark_files:
            utils.info('No Spark files found in backup archive')

        for f in spark_files:
            spark = f[:-len('.spark.json')]
            utils.info('Writing blocks to Spark service {}'.format(spark))
            with NamedTemporaryFile('w') as tmp:
                data = json.loads(zipf.read(f).decode())
                utils.show_data(data)
                json.dump(data, tmp)
                tmp.flush()
                sh('{} http post {}/{}/blocks/backup/load -f {}'.format(const.CLI, host_url, spark, tmp.name))
                sh('{} docker-compose restart {}'.format(sudo, spark))

    if load_node_red and node_red_files:
        sudo = ''
        if [getgid(), getuid()] != [1000, 1000]:
            sudo = 'sudo '

        with TemporaryDirectory() as tmpdir:
            zipf.extractall(tmpdir, members=node_red_files)
            sh('mkdir -p ./node-red')
            sh('{}chown 1000:1000 ./node-red/'.format(sudo))
            sh('{}chown -R 1000:1000 {}'.format(sudo, tmpdir))
            sh('{}cp -rfp {}/node-red/* ./node-red/'.format(sudo, tmpdir))

    zipf.close()

    if update:
        utils.info('Updating brewblox...')
        sh('{} update'.format(const.CLI))

    utils.info('Done!')
예제 #5
0
def load(archive, load_env, load_compose, load_datastore, load_spark, update):
    """Load and apply Brewblox settings backup.

    This function uses files generated by `brewblox-ctl backup save` as input.
    You can use the --load-XXXX options to partially load the backup.

    This does not attempt to merge data: it will overwrite current docker-compose.yml,
    datastore databases, and Spark blocks.

    Blocks on Spark services not in the backup file will not be affected.

    If dry-run is enabled, it will echo all configuration from the backup archive.

    Steps:
        - Write .env
        - Read .env values
        - Write docker-compose.yml, run `docker-compose up`.
        - Write all datastore files found in backup.
        - Write all Spark blocks found in backup.
        - Run brewblox-ctl update
    """
    utils.check_config()
    utils.confirm_mode()
    urllib3.disable_warnings()

    sudo = utils.optsudo()
    host_url = utils.host_url()
    store_url = utils.datastore_url()

    zipf = zipfile.ZipFile(archive, 'r', zipfile.ZIP_DEFLATED)
    available = zipf.namelist()
    datastore_files = [v for v in available if v.endswith('.datastore.json')]
    spark_files = [v for v in available if v.endswith('.spark.json')]

    if load_env and '.env' in available:
        with NamedTemporaryFile('w') as tmp:
            data = zipf.read('.env').decode()
            utils.info('Writing .env')
            utils.show_data(data)
            tmp.write(data)
            tmp.flush()
            sh('cp -f {} .env'.format(tmp.name))

        utils.info('Reading .env values')
        load_dotenv(path.abspath('.env'))

    if load_compose:
        if 'docker-compose.yml' in available:
            utils.info('Writing docker-compose.yml')
            utils.write_compose(yaml.safe_load(
                zipf.read('docker-compose.yml')))
            sh('{} docker-compose up -d --remove-orphans'.format(sudo))
        else:
            utils.info('docker-compose.yml file not found in backup archive')

    if load_datastore:
        if datastore_files:
            utils.info('Waiting for the datastore...')
            sh('{} http wait {}'.format(const.CLI, store_url))
        else:
            utils.info('No datastore files found in backup archive')

        for f in datastore_files:
            db = f[:-len('.datastore.json')]

            utils.info('Recreating database {}'.format(db))
            sh('{} http delete {}/{} --allow-fail'.format(
                const.CLI, store_url, db))
            sh('{} http put {}/{}'.format(const.CLI, store_url, db))

            utils.info('Writing database {}'.format(db))
            with NamedTemporaryFile('w') as tmp:
                data = {'docs': json.loads(zipf.read(f).decode())}
                utils.show_data(data)
                json.dump(data, tmp)
                tmp.flush()
                sh('{} http post {}/{}/_bulk_docs -f {}'.format(
                    const.CLI, store_url, db, tmp.name))

    if load_spark:
        sudo = utils.optsudo()

        if not spark_files:
            utils.info('No Spark files found in backup archive')

        for f in spark_files:
            spark = f[:-len('.spark.json')]
            utils.info('Writing blocks to Spark service {}'.format(spark))
            with NamedTemporaryFile('w') as tmp:
                data = json.loads(zipf.read(f).decode())
                utils.show_data(data)
                json.dump(data, tmp)
                tmp.flush()
                sh('{} http post {}/{}/import_objects -f {}'.format(
                    const.CLI, host_url, spark, tmp.name))
                sh('{} docker-compose restart {}'.format(sudo, spark))

    zipf.close()

    if update:
        utils.info('Updating brewblox...')
        sh('{} update'.format(const.CLI))

    utils.info('Done!')