Пример #1
0
def mset(data):
    with NamedTemporaryFile('w') as tmp:
        utils.show_data(data)
        json.dump(data, tmp)
        tmp.flush()
        sh('{} http post --quiet {}/mset -f {}'.format(const.CLI,
                                                       utils.datastore_url(),
                                                       tmp.name))
Пример #2
0
def test_get_urls(m_getenv):
    m_getenv.side_effect = [
        '1234',
        '4321',
    ]
    assert utils.history_url() == '{}:1234/history/history'.format(HOST)
    assert utils.datastore_url() == '{}:4321/history/datastore'.format(HOST)

    assert m_getenv.call_args_list == [
        call(HTTPS_PORT_KEY, '443'),
        call(HTTPS_PORT_KEY, '443'),
    ]
Пример #3
0
def upped_migrate(prev_version):
    """Migration commands to be executed after the services have been started"""
    # Always run history configure
    history_url = utils.history_url()
    sh('{} http wait {}/ping'.format(const.CLI, history_url))
    sh('{} http post {}/query/configure'.format(const.CLI, history_url))

    # Ensure datastore system databases
    datastore_url = utils.datastore_url()
    sh('{} http wait {}'.format(const.CLI, datastore_url))
    sh('{} http put --allow-fail --quiet {}/_users'.format(
        const.CLI, datastore_url))
    sh('{} http put --allow-fail --quiet {}/_replicator'.format(
        const.CLI, datastore_url))
    sh('{} http put --allow-fail --quiet {}/_global_changes'.format(
        const.CLI, datastore_url))
Пример #4
0
def save(save_compose, ignore_spark_error):
    """Create a backup of Brewblox settings.

    A zip archive containing JSON/YAML files is created in the ./backup/ directory.
    The archive name will include current date and time to ensure uniqueness.

    The backup is not exported to any kind of remote/cloud storage.

    To use this command in scripts, run it as `brewblox-ctl --quiet backup save`.
    Its only output to stdout will be the absolute path to the created backup.

    The command will fail if any of the Spark services could not be contacted.

    As it does not make any destructive changes to configuration,
    this command is not affected by --dry-run.

    \b
    Stored data:
    - .env
    - docker-compose.yml.   (Optional)
    - Datastore databases.
    - Spark service blocks.
    - Node-RED data.

    \b
    NOT stored:
    - History data.

    """
    utils.check_config()
    urllib3.disable_warnings()

    file = 'backup/brewblox_backup_{}.zip'.format(datetime.now().strftime('%Y%m%d_%H%M'))
    with suppress(FileExistsError):
        mkdir(path.abspath('backup/'))

    store_url = utils.datastore_url()

    utils.info('Waiting for the datastore...')
    http.wait(store_url + '/ping', info_updates=True)

    config = utils.read_compose()
    sparks = [
        k for k, v in config['services'].items()
        if v.get('image', '').startswith('brewblox/brewblox-devcon-spark')
    ]
    zipf = zipfile.ZipFile(file, 'w', zipfile.ZIP_DEFLATED)

    # Always save .env
    utils.info('Exporting .env')
    zipf.write('.env')

    # Always save datastore
    utils.info('Exporting datastore')
    resp = requests.post(store_url + '/mget',
                         json={'namespace': '', 'filter': '*'},
                         verify=False)
    resp.raise_for_status()
    zipf.writestr('global.redis.json', resp.text)

    if save_compose:
        utils.info('Exporting docker-compose.yml')
        zipf.write('docker-compose.yml')

    for spark in sparks:
        utils.info("Exporting Spark blocks from '{}'".format(spark))
        resp = requests.post('{}/{}/blocks/backup/save'.format(utils.host_url(), spark), verify=False)
        try:
            resp.raise_for_status()
            zipf.writestr(spark + '.spark.json', resp.text)
        except Exception as ex:
            if ignore_spark_error:
                utils.info("Skipping Spark '{}' due to error: {}".format(spark, str(ex)))
            else:
                raise ex

    for fname in [*glob('node-red/*.js*'), *glob('node-red/lib/**/*.js*')]:
        zipf.write(fname)

    zipf.close()
    click.echo(path.abspath(file))
    utils.info('Done!')
Пример #5
0
def load(archive,
         load_env,
         load_compose,
         load_datastore,
         load_spark,
         load_node_red,
         update):
    """Load and apply Brewblox settings backup.

    This function uses files generated by `brewblox-ctl backup save` as input.
    You can use the --load-XXXX options to partially load the backup.

    This does not attempt to merge data: it will overwrite current docker-compose.yml,
    datastore entries, and Spark blocks.

    Blocks on Spark services not in the backup file will not be affected.

    If dry-run is enabled, it will echo all configuration from the backup archive.

    Steps:
        - Write .env
        - Read .env values
        - Write docker-compose.yml, run `docker-compose up`.
        - Write all datastore files found in backup.
        - Write all Spark blocks found in backup.
        - Write Node-RED config files found in backup.
        - Run brewblox-ctl update
    """
    utils.check_config()
    utils.confirm_mode()
    urllib3.disable_warnings()

    sudo = utils.optsudo()
    host_url = utils.host_url()
    store_url = utils.datastore_url()

    zipf = zipfile.ZipFile(archive, 'r', zipfile.ZIP_DEFLATED)
    available = zipf.namelist()
    redis_file = 'global.redis.json'
    couchdb_files = [v for v in available if v.endswith('.datastore.json')]
    spark_files = [v for v in available if v.endswith('.spark.json')]
    node_red_files = [v for v in available if v.startswith('node-red/')]

    if load_env and '.env' in available:
        utils.info('Loading .env file')
        with NamedTemporaryFile('w') as tmp:
            data = zipf.read('.env').decode()
            utils.info('Writing .env')
            utils.show_data(data)
            tmp.write(data)
            tmp.flush()
            sh('cp -f {} .env'.format(tmp.name))

        utils.info('Reading .env values')
        load_dotenv(path.abspath('.env'))

    if load_compose:
        if 'docker-compose.yml' in available:
            utils.info('Loading docker-compose.yml')
            config = yaml.safe_load(zipf.read('docker-compose.yml'))
            # Older services may still depend on the `datastore` service
            # The `depends_on` config is useless anyway in a brewblox system
            for svc in config['services'].values():
                with suppress(KeyError):
                    del svc['depends_on']
            utils.write_compose(config)
            sh('{} docker-compose up -d --remove-orphans'.format(sudo))
        else:
            utils.info('docker-compose.yml file not found in backup archive')

    if load_datastore:
        if redis_file in available or couchdb_files:
            utils.info('Waiting for the datastore...')
            sh('{} http wait {}/ping'.format(const.CLI, store_url))
            # Wipe UI/Automation, but leave Spark files
            mdelete_cmd = '{} http post {}/mdelete --quiet -d \'{{"namespace":"{}", "filter":"*"}}\''
            sh(mdelete_cmd.format(const.CLI, store_url, 'brewblox-ui-store'))
            sh(mdelete_cmd.format(const.CLI, store_url, 'brewblox-automation'))
        else:
            utils.info('No datastore files found in backup archive')

        if redis_file in available:
            data = json.loads(zipf.read(redis_file).decode())
            utils.info('Loading {} entries from Redis datastore'.format(len(data['values'])))
            mset(data)

        # Backwards compatibility for UI/automation files from CouchDB
        # The IDs here are formatted as {moduleId}__{objId}
        # The module ID becomes part of the Redis namespace
        for db in ['brewblox-ui-store', 'brewblox-automation']:
            fname = '{}.datastore.json'.format(db)
            if fname not in available:
                continue
            docs = json.loads(zipf.read(fname).decode())
            # Drop invalid names (not prefixed with module ID)
            docs[:] = [d for d in docs if len(d['_id'].split('__', 1)) == 2]
            # Add namespace / ID fields
            for d in docs:
                segments = d['_id'].split('__', 1)
                d['namespace'] = '{}:{}'.format(db, segments[0])
                d['id'] = segments[1]
                del d['_id']
            utils.info('Loading {} entries from database `{}`'.format(len(docs), db))
            mset({'values': docs})

        # Backwards compatibility for Spark service files
        # There is no module ID field here
        spark_db = 'spark-service'
        spark_fname = '{}.datastore.json'.format(spark_db)
        if spark_fname in available:
            docs = json.loads(zipf.read(spark_fname).decode())
            for d in docs:
                d['namespace'] = spark_db
                d['id'] = d['_id']
                del d['_id']
            utils.info('Loading {} entries from database `{}`'.format(len(docs), spark_db))
            mset({'values': docs})

    if load_spark:
        sudo = utils.optsudo()

        if not spark_files:
            utils.info('No Spark files found in backup archive')

        for f in spark_files:
            spark = f[:-len('.spark.json')]
            utils.info('Writing blocks to Spark service {}'.format(spark))
            with NamedTemporaryFile('w') as tmp:
                data = json.loads(zipf.read(f).decode())
                utils.show_data(data)
                json.dump(data, tmp)
                tmp.flush()
                sh('{} http post {}/{}/blocks/backup/load -f {}'.format(const.CLI, host_url, spark, tmp.name))
                sh('{} docker-compose restart {}'.format(sudo, spark))

    if load_node_red and node_red_files:
        sudo = ''
        if [getgid(), getuid()] != [1000, 1000]:
            sudo = 'sudo '

        with TemporaryDirectory() as tmpdir:
            zipf.extractall(tmpdir, members=node_red_files)
            sh('mkdir -p ./node-red')
            sh('{}chown 1000:1000 ./node-red/'.format(sudo))
            sh('{}chown -R 1000:1000 {}'.format(sudo, tmpdir))
            sh('{}cp -rfp {}/node-red/* ./node-red/'.format(sudo, tmpdir))

    zipf.close()

    if update:
        utils.info('Updating brewblox...')
        sh('{} update'.format(const.CLI))

    utils.info('Done!')
Пример #6
0
def migrate_couchdb():
    urllib3.disable_warnings()
    sudo = utils.optsudo()
    opts = utils.ctx_opts()
    redis_url = utils.datastore_url()
    couch_url = 'http://localhost:5984'

    utils.info('Migrating datastore from CouchDB to Redis...')

    if opts.dry_run:
        utils.info('Dry run. Skipping migration...')
        return

    if not utils.path_exists('./couchdb/'):
        utils.info('couchdb/ dir not found. Skipping migration...')
        return

    utils.info('Starting a temporary CouchDB container on port 5984...')
    sh(f'{sudo}docker rm -f couchdb-migrate', check=False)
    sh(f'{sudo}docker run --rm -d'
       ' --name couchdb-migrate'
       ' -v "$(pwd)/couchdb/:/opt/couchdb/data/"'
       ' -p "5984:5984"'
       ' treehouses/couchdb:2.3.1')
    sh(f'{const.CLI} http wait {couch_url}')
    sh(f'{const.CLI} http wait {redis_url}/ping')

    resp = requests.get(f'{couch_url}/_all_dbs')
    resp.raise_for_status()
    dbs = resp.json()

    for db in ['brewblox-ui-store', 'brewblox-automation']:
        if db in dbs:
            resp = requests.get(f'{couch_url}/{db}/_all_docs',
                                params={'include_docs': True})
            resp.raise_for_status()
            docs = [v['doc'] for v in resp.json()['rows']]
            # Drop invalid names
            docs[:] = [d for d in docs if len(d['_id'].split('__', 1)) == 2]
            for d in docs:
                segments = d['_id'].split('__', 1)
                d['namespace'] = f'{db}:{segments[0]}'
                d['id'] = segments[1]
                del d['_rev']
                del d['_id']
            resp = requests.post(f'{redis_url}/mset',
                                 json={'values': docs},
                                 verify=False)
            resp.raise_for_status()
            utils.info(f'Migrated {len(docs)} entries from {db}')

    if 'spark-service' in dbs:
        resp = requests.get(f'{couch_url}/spark-service/_all_docs',
                            params={'include_docs': True})
        resp.raise_for_status()
        docs = [v['doc'] for v in resp.json()['rows']]
        for d in docs:
            d['namespace'] = 'spark-service'
            d['id'] = d['_id']
            del d['_rev']
            del d['_id']
        resp = requests.post(f'{redis_url}/mset',
                             json={'values': docs},
                             verify=False)
        resp.raise_for_status()
        utils.info(f'Migrated {len(docs)} entries from spark-service')

    sh(f'{sudo}docker stop couchdb-migrate')
    sh('sudo mv couchdb/ couchdb-migrated-' +
       datetime.now().strftime('%Y%m%d'))
Пример #7
0
def setup(port_check):
    """Run first-time setup in Brewblox directory.

    Run after brewblox-ctl install, in the newly created Brewblox directory.
    This will create all required configuration files for your system.

    You can safely use this command to partially reset your system.
    Before making any changes, it will check for existing files,
    and prompt if any are found. It will do so separately for docker-compose,
    datastore, history, and gateway files.
    Choose to skip any, and the others will still be created and configured.

    \b
    Steps:
        - Check whether files already exist.
        - Set .env values.
        - Create docker-compose configuration files. (Optional)
        - Pull docker images.
        - Create datastore (CouchDB) directory.      (Optional)
        - Create history (InfluxDB) directory.       (Optional)
        - Create gateway (Traefik) directory.        (Optional)
        - Create SSL certificates.                   (Optional)
        - Start and configure services.              (Optional)
        - Stop all services.
        - Set version number in .env.
    """
    utils.check_config()
    utils.confirm_mode()

    sudo = utils.optsudo()
    datastore_url = utils.datastore_url()
    history_url = utils.history_url()
    upped_services = ['traefik', 'influx', 'history']
    preset_modules = ['services', 'dashboards', 'dashboard-items']

    if port_check:
        check_ports()

    skip_compose = \
        utils.path_exists('./docker-compose.yml') \
        and utils.confirm('This directory already contains a docker-compose.yml file. ' +
                          'Do you want to keep it?')

    skip_datastore = \
        utils.path_exists('./couchdb/') \
        and utils.confirm('This directory already contains Couchdb datastore files. ' +
                          'Do you want to keep them?')

    skip_history = \
        utils.path_exists('./influxdb/') \
        and utils.confirm('This directory already contains Influx history files. ' +
                          'Do you want to keep them?')

    skip_gateway = \
        utils.path_exists('./traefik/') \
        and utils.confirm('This directory already contains Traefik gateway files. ' +
                          'Do you want to keep them?')

    utils.info('Setting .env values...')
    for key, default_val in const.ENV_DEFAULTS.items():
        utils.setenv(key, utils.getenv(key, default_val))

    if not skip_compose:
        utils.info('Copying configuration...')
        sh('cp -f {}/* ./'.format(const.CONFIG_DIR))

    # Stop and pull after we're sure we have a compose file
    utils.info('Stopping services...')
    sh('{}docker-compose down --remove-orphans'.format(sudo))
    utils.info('Pulling docker images...')
    sh('{}docker-compose pull'.format(sudo))

    if not skip_datastore:
        utils.info('Creating datastore directory...')
        upped_services.append('datastore')
        sh('sudo rm -rf ./couchdb/; mkdir ./couchdb/')

    if not skip_history:
        utils.info('Creating history directory...')
        sh('sudo rm -rf ./influxdb/; mkdir ./influxdb/')

    if not skip_gateway:
        utils.info('Creating gateway directory...')
        sh('sudo rm -rf ./traefik/; mkdir ./traefik/')

        utils.info('Creating SSL certificate...')
        sh('{}docker run --rm -v "$(pwd)"/traefik/:/certs/ '.format(sudo) +
           'brewblox/omgwtfssl:{}'.format(utils.docker_tag()))
        sh('sudo chmod 644 traefik/brewblox.crt')
        sh('sudo chmod 600 traefik/brewblox.key')

    # Bring images online that we will send configuration
    utils.info('Starting configured services...')
    sh('{}docker-compose up -d --remove-orphans {}'.format(
        sudo, ' '.join(upped_services)))

    if not skip_datastore:
        # Generic datastore setup
        utils.info('Configuring datastore settings...')
        sh('{} http wait {}'.format(const.CLI, datastore_url))
        sh('{} http put {}/_users'.format(const.CLI, datastore_url))
        sh('{} http put {}/_replicator'.format(const.CLI, datastore_url))
        sh('{} http put {}/_global_changes'.format(const.CLI, datastore_url))
        sh('{} http put {}/{}'.format(const.CLI, datastore_url,
                                      const.UI_DATABASE))
        # Load presets
        utils.info('Loading preset data...')
        for mod in preset_modules:
            sh('{} http post {}/{}/_bulk_docs -f {}/{}.json'.format(
                const.CLI, datastore_url, const.UI_DATABASE, const.PRESETS_DIR,
                mod))

    # Always setup history
    utils.info('Configuring history settings...')
    sh('{} http wait {}/ping'.format(const.CLI, history_url))
    sh('{} http post {}/query/configure'.format(const.CLI, history_url))

    # Setup is done - leave system in stable state
    utils.info('Stopping services...')
    sh('{}docker-compose down'.format(utils.optsudo()))

    # Setup is complete and ok - now set CFG version
    utils.setenv(const.CFG_VERSION_KEY, const.CURRENT_VERSION)
    utils.info('All done!')
Пример #8
0
def save(save_compose, ignore_spark_error):
    """Create a backup of Brewblox settings.

    A zip archive containing JSON/YAML files is created in the ./backup/ directory.
    The archive name will include current date and time to ensure uniqueness.

    The backup is not exported to any kind of remote/cloud storage.

    To use this command in scripts, run it as `brewblox-ctl --quiet backup save`.
    Its only output to stdout will be the absolute path to the created backup.

    The command will fail if any of the Spark services could not be contacted.

    As it does not make any destructive changes to configuration,
    this command is not affected by --dry-run.

    \b
    Stored data:
    - .env
    - docker-compose.yml.   (Optional)
    - Datastore databases.
    - Spark service blocks.

    \b
    NOT stored:
    - History data.

    """
    utils.check_config()
    urllib3.disable_warnings()

    file = 'backup/brewblox_backup_{}.zip'.format(
        datetime.now().strftime('%Y%m%d_%H%M'))
    with suppress(FileExistsError):
        mkdir(path.abspath('backup/'))

    url = utils.datastore_url()

    utils.info('Waiting for the datastore...')
    http.wait(url, info_updates=True)
    resp = requests.get(url + '/_all_dbs', verify=False)
    resp.raise_for_status()
    dbs = [v for v in resp.json() if not v.startswith('_')]

    config = utils.read_compose()
    sparks = [
        k for k, v in config['services'].items()
        if v.get('image', '').startswith('brewblox/brewblox-devcon-spark')
    ]
    zipf = zipfile.ZipFile(file, 'w', zipfile.ZIP_DEFLATED)

    utils.info('Exporting .env')
    zipf.write('.env')

    if save_compose:
        utils.info('Exporting docker-compose.yml')
        zipf.write('docker-compose.yml')

    utils.info('Exporting databases: {}'.format(', '.join(dbs)))
    for db in dbs:
        resp = requests.get('{}/{}/_all_docs'.format(url, db),
                            params={'include_docs': True},
                            verify=False)
        resp.raise_for_status()
        docs = [v['doc'] for v in resp.json()['rows']]
        for d in docs:
            del d['_rev']
        zipf.writestr(db + '.datastore.json', json.dumps(docs))

    for spark in sparks:
        utils.info("Exporting Spark blocks from '{}'".format(spark))
        resp = requests.get('{}/{}/export_objects'.format(
            utils.host_url(), spark),
                            verify=False)
        try:
            resp.raise_for_status()
            zipf.writestr(spark + '.spark.json', resp.text)
        except Exception as ex:
            if ignore_spark_error:
                utils.info("Skipping '{}' due to error: {}".format(
                    spark, str(ex)))
            else:
                raise ex

    zipf.close()
    click.echo(path.abspath(file))
    utils.info('Done!')
Пример #9
0
def load(archive, load_env, load_compose, load_datastore, load_spark, update):
    """Load and apply Brewblox settings backup.

    This function uses files generated by `brewblox-ctl backup save` as input.
    You can use the --load-XXXX options to partially load the backup.

    This does not attempt to merge data: it will overwrite current docker-compose.yml,
    datastore databases, and Spark blocks.

    Blocks on Spark services not in the backup file will not be affected.

    If dry-run is enabled, it will echo all configuration from the backup archive.

    Steps:
        - Write .env
        - Read .env values
        - Write docker-compose.yml, run `docker-compose up`.
        - Write all datastore files found in backup.
        - Write all Spark blocks found in backup.
        - Run brewblox-ctl update
    """
    utils.check_config()
    utils.confirm_mode()
    urllib3.disable_warnings()

    sudo = utils.optsudo()
    host_url = utils.host_url()
    store_url = utils.datastore_url()

    zipf = zipfile.ZipFile(archive, 'r', zipfile.ZIP_DEFLATED)
    available = zipf.namelist()
    datastore_files = [v for v in available if v.endswith('.datastore.json')]
    spark_files = [v for v in available if v.endswith('.spark.json')]

    if load_env and '.env' in available:
        with NamedTemporaryFile('w') as tmp:
            data = zipf.read('.env').decode()
            utils.info('Writing .env')
            utils.show_data(data)
            tmp.write(data)
            tmp.flush()
            sh('cp -f {} .env'.format(tmp.name))

        utils.info('Reading .env values')
        load_dotenv(path.abspath('.env'))

    if load_compose:
        if 'docker-compose.yml' in available:
            utils.info('Writing docker-compose.yml')
            utils.write_compose(yaml.safe_load(
                zipf.read('docker-compose.yml')))
            sh('{} docker-compose up -d --remove-orphans'.format(sudo))
        else:
            utils.info('docker-compose.yml file not found in backup archive')

    if load_datastore:
        if datastore_files:
            utils.info('Waiting for the datastore...')
            sh('{} http wait {}'.format(const.CLI, store_url))
        else:
            utils.info('No datastore files found in backup archive')

        for f in datastore_files:
            db = f[:-len('.datastore.json')]

            utils.info('Recreating database {}'.format(db))
            sh('{} http delete {}/{} --allow-fail'.format(
                const.CLI, store_url, db))
            sh('{} http put {}/{}'.format(const.CLI, store_url, db))

            utils.info('Writing database {}'.format(db))
            with NamedTemporaryFile('w') as tmp:
                data = {'docs': json.loads(zipf.read(f).decode())}
                utils.show_data(data)
                json.dump(data, tmp)
                tmp.flush()
                sh('{} http post {}/{}/_bulk_docs -f {}'.format(
                    const.CLI, store_url, db, tmp.name))

    if load_spark:
        sudo = utils.optsudo()

        if not spark_files:
            utils.info('No Spark files found in backup archive')

        for f in spark_files:
            spark = f[:-len('.spark.json')]
            utils.info('Writing blocks to Spark service {}'.format(spark))
            with NamedTemporaryFile('w') as tmp:
                data = json.loads(zipf.read(f).decode())
                utils.show_data(data)
                json.dump(data, tmp)
                tmp.flush()
                sh('{} http post {}/{}/import_objects -f {}'.format(
                    const.CLI, host_url, spark, tmp.name))
                sh('{} docker-compose restart {}'.format(sudo, spark))

    zipf.close()

    if update:
        utils.info('Updating brewblox...')
        sh('{} update'.format(const.CLI))

    utils.info('Done!')