Example #1
0
def log(add_compose, add_system, upload):
    """Generate and share log file for bug reports.

    This command generates a comprehensive report on current system state and logs.
    When reporting bugs, a termbin blink to the output is often the first thing asked for.

    For best results, run when the services are still active.
    Service logs are discarded after `brewblox-ctl down`.

    Care is taken to prevent accidental leaks of confidential information.
    Only known variables are read from .env,
    and the `--no-add-compose` flag allows skipping compose configuration.
    The latter is useful if the configuration contains passwords or tokens.

    To review or edit the output, use the `--no-upload` flag.
    The output will include instructions on how to manually upload the file.

    \b
    Steps:
        - Create ./brewblox.log file.
        - Append Brewblox .env variables.
        - Append software version info.
        - Append service logs.
        - Append content of docker-compose.yml (optional).
        - Append content of docker-compose.shared.yml (optional).
        - Append blocks from Spark services.
        - Append system diagnostics.
        - Upload file to termbin.com for shareable link (optional).
    """
    utils.check_config()
    utils.confirm_mode()
    sudo = utils.optsudo()

    # Create log
    utils.info(f"Log file: {path.abspath('./brewblox.log')}")
    create()
    append('date')

    # Add .env values
    utils.info('Writing Brewblox .env values...')
    header('.env')
    for key in ENV_KEYS:
        append(f'echo "{key}={utils.getenv(key)}"')

    # Add version info
    utils.info('Writing software version info...')
    header('Versions')
    append('uname -a')
    append(f'{const.PY} --version')
    append(f'{sudo}docker --version')
    append(f'{sudo}docker-compose --version')

    # Add active containers
    utils.info('Writing active containers...')
    header('Containers')
    append(f'{sudo}docker-compose ps -a')

    # Add service logs
    try:
        config_names = list(utils.read_compose()['services'].keys())
        shared_names = list(utils.read_shared_compose()['services'].keys())
        names = [n for n in config_names if n not in shared_names] + shared_names
        for name in names:
            utils.info(f'Writing {name} service logs...')
            header(f'Service: {name}')
            append(f'{sudo}docker-compose logs --timestamps --no-color --tail 200 {name}')
    except Exception as ex:
        append('echo ' + shlex.quote(type(ex).__name__ + ': ' + str(ex)))

    # Add compose config
    if add_compose:
        utils.info('Writing docker-compose configuration...')
        header('docker-compose.yml')
        append('cat docker-compose.yml')
        header('docker-compose.shared.yml')
        append('cat docker-compose.shared.yml')
    else:
        utils.info('Skipping docker-compose configuration...')

    # Add blocks
    host_url = utils.host_url()
    services = utils.list_services('brewblox/brewblox-devcon-spark')
    for svc in services:
        utils.info(f'Writing {svc} blocks...')
        header(f'Blocks: {svc}')
        append(f'{const.CLI} http post --pretty {host_url}/{svc}/blocks/all/read')

    # Add system diagnostics
    if add_system:
        utils.info('Writing system diagnostics...')
        header('docker info')
        append(f'{sudo}docker info')
        header('disk usage')
        append('df -hl')
        header('/proc/net/dev')
        append('column -t /proc/net/dev')
        header('/var/log/syslog')
        append('sudo tail -n 500 /var/log/syslog')
        header('dmesg')
        append('dmesg -T')
    else:
        utils.info('Skipping system diagnostics...')

    # Upload
    if upload:
        utils.info('Uploading brewblox.log to termbin.com...')
        sh('cat brewblox.log | nc termbin.com 9999')
    else:
        utils.info('Skipping upload. If you want to manually upload the log, run: ' +
                   click.style('cat brewblox.log | nc termbin.com 9999', fg='green'))
Example #2
0
def log(add_compose, upload):
    """Generate and share log file for bug reports.

    This command generates a comprehensive report on current system state and logs.
    When reporting bugs, a termbin blink to the output is often the first thing asked for.

    For best results, run when the services are still active.
    Service logs are discarded after `brewblox-ctl down`.

    Care is taken to prevent accidental leaks of confidential information.
    Only known variables are read from .env,
    and the `--no-add-compose` flag allows skipping compose configuration.
    The latter is useful if the configuration contains passwords or tokens.

    To review or edit the output, use the `--no-upload` flag.
    The output will include instructions on how to manually upload the file.

    \b
    Steps:
        - Create ./brewblox.log file.
        - Append Brewblox .env variables.
        - Append service logs.
        - Append content of docker-compose.yml (optional).
        - Append content of docker-compose.shared.yml (optional).
        - Append blocks from Spark services.
        - Upload file to termbin.com for shareable link (optional).
    """
    utils.check_config()
    utils.confirm_mode()
    sudo = utils.optsudo()

    # Create log
    utils.info('Log file: {}'.format(path.abspath('./brewblox.log')))
    sh('echo "BREWBLOX DIAGNOSTIC DUMP" > brewblox.log')
    sh('date >> brewblox.log')

    # Add .env values
    utils.info('Writing Brewblox .env values...')
    sh('echo "==============VARS==============" >> brewblox.log')
    sh('echo "$(uname -a)" >> brewblox.log')
    sh('echo "$({}docker --version)" >> brewblox.log'.format(sudo))
    sh('echo "$({}docker-compose --version)" >> brewblox.log'.format(sudo))
    sh('echo "{}={}" >> brewblox.log'.format(key, utils.getenv(key)) for key in ENV_KEYS)

    # Add service logs
    utils.info('Writing service logs...')
    sh('echo "==============LOGS==============" >> brewblox.log')
    try:
        config_names = list(utils.read_compose()['services'].keys())
        shared_names = list(utils.read_shared_compose()['services'].keys())
        names = [n for n in config_names if n not in shared_names] + shared_names
        raw_cmd = '{}docker-compose logs --timestamps --no-color --tail 200 {} >> brewblox.log; ' + \
            "echo '\\n' >> brewblox.log"
        sh(raw_cmd.format(sudo, name) for name in names)
    except Exception as ex:
        sh('echo {} >> brewblox.log'.format(shlex.quote(type(ex).__name__ + ': ' + str(ex))))

    # Add compose config
    if add_compose:
        utils.info('Writing docker-compose configuration...')
        sh('echo "==============COMPOSE==============" >> brewblox.log')
        sh('cat docker-compose.yml >> brewblox.log')
        sh('echo "==============SHARED===============" >> brewblox.log')
        sh('cat docker-compose.shared.yml >> brewblox.log')
    else:
        utils.info('Skipping docker-compose configuration...')

    # Add blocks
    utils.info('Writing Spark blocks...')
    sh('echo "==============BLOCKS==============" >> brewblox.log')
    host_url = utils.host_url()
    services = utils.list_services('brewblox/brewblox-devcon-spark')
    query = '{} http get --pretty {}/{}/objects >> brewblox.log || echo "{} not found" >> brewblox.log'
    sh(query.format(const.CLI, host_url, svc, svc) for svc in services)

    # Upload
    if upload:
        utils.info('Uploading brewblox.log to termbin.com...')
        sh('cat brewblox.log | nc termbin.com 9999')
    else:
        utils.info('Skipping upload. If you want to manually upload the log, run: ' +
                   click.style('cat brewblox.log | nc termbin.com 9999', fg='green'))
Example #3
0
def save(save_compose, ignore_spark_error):
    """Create a backup of Brewblox settings.

    A zip archive containing JSON/YAML files is created in the ./backup/ directory.
    The archive name will include current date and time to ensure uniqueness.

    The backup is not exported to any kind of remote/cloud storage.

    To use this command in scripts, run it as `brewblox-ctl --quiet backup save`.
    Its only output to stdout will be the absolute path to the created backup.

    The command will fail if any of the Spark services could not be contacted.

    As it does not make any destructive changes to configuration,
    this command is not affected by --dry-run.

    \b
    Stored data:
    - .env
    - docker-compose.yml.   (Optional)
    - Datastore databases.
    - Spark service blocks.
    - Node-RED data.

    \b
    NOT stored:
    - History data.

    """
    utils.check_config()
    urllib3.disable_warnings()

    file = 'backup/brewblox_backup_{}.zip'.format(datetime.now().strftime('%Y%m%d_%H%M'))
    with suppress(FileExistsError):
        mkdir(path.abspath('backup/'))

    store_url = utils.datastore_url()

    utils.info('Waiting for the datastore...')
    http.wait(store_url + '/ping', info_updates=True)

    config = utils.read_compose()
    sparks = [
        k for k, v in config['services'].items()
        if v.get('image', '').startswith('brewblox/brewblox-devcon-spark')
    ]
    zipf = zipfile.ZipFile(file, 'w', zipfile.ZIP_DEFLATED)

    # Always save .env
    utils.info('Exporting .env')
    zipf.write('.env')

    # Always save datastore
    utils.info('Exporting datastore')
    resp = requests.post(store_url + '/mget',
                         json={'namespace': '', 'filter': '*'},
                         verify=False)
    resp.raise_for_status()
    zipf.writestr('global.redis.json', resp.text)

    if save_compose:
        utils.info('Exporting docker-compose.yml')
        zipf.write('docker-compose.yml')

    for spark in sparks:
        utils.info("Exporting Spark blocks from '{}'".format(spark))
        resp = requests.post('{}/{}/blocks/backup/save'.format(utils.host_url(), spark), verify=False)
        try:
            resp.raise_for_status()
            zipf.writestr(spark + '.spark.json', resp.text)
        except Exception as ex:
            if ignore_spark_error:
                utils.info("Skipping Spark '{}' due to error: {}".format(spark, str(ex)))
            else:
                raise ex

    for fname in [*glob('node-red/*.js*'), *glob('node-red/lib/**/*.js*')]:
        zipf.write(fname)

    zipf.close()
    click.echo(path.abspath(file))
    utils.info('Done!')
Example #4
0
def load(archive,
         load_env,
         load_compose,
         load_datastore,
         load_spark,
         load_node_red,
         update):
    """Load and apply Brewblox settings backup.

    This function uses files generated by `brewblox-ctl backup save` as input.
    You can use the --load-XXXX options to partially load the backup.

    This does not attempt to merge data: it will overwrite current docker-compose.yml,
    datastore entries, and Spark blocks.

    Blocks on Spark services not in the backup file will not be affected.

    If dry-run is enabled, it will echo all configuration from the backup archive.

    Steps:
        - Write .env
        - Read .env values
        - Write docker-compose.yml, run `docker-compose up`.
        - Write all datastore files found in backup.
        - Write all Spark blocks found in backup.
        - Write Node-RED config files found in backup.
        - Run brewblox-ctl update
    """
    utils.check_config()
    utils.confirm_mode()
    urllib3.disable_warnings()

    sudo = utils.optsudo()
    host_url = utils.host_url()
    store_url = utils.datastore_url()

    zipf = zipfile.ZipFile(archive, 'r', zipfile.ZIP_DEFLATED)
    available = zipf.namelist()
    redis_file = 'global.redis.json'
    couchdb_files = [v for v in available if v.endswith('.datastore.json')]
    spark_files = [v for v in available if v.endswith('.spark.json')]
    node_red_files = [v for v in available if v.startswith('node-red/')]

    if load_env and '.env' in available:
        utils.info('Loading .env file')
        with NamedTemporaryFile('w') as tmp:
            data = zipf.read('.env').decode()
            utils.info('Writing .env')
            utils.show_data(data)
            tmp.write(data)
            tmp.flush()
            sh('cp -f {} .env'.format(tmp.name))

        utils.info('Reading .env values')
        load_dotenv(path.abspath('.env'))

    if load_compose:
        if 'docker-compose.yml' in available:
            utils.info('Loading docker-compose.yml')
            config = yaml.safe_load(zipf.read('docker-compose.yml'))
            # Older services may still depend on the `datastore` service
            # The `depends_on` config is useless anyway in a brewblox system
            for svc in config['services'].values():
                with suppress(KeyError):
                    del svc['depends_on']
            utils.write_compose(config)
            sh('{} docker-compose up -d --remove-orphans'.format(sudo))
        else:
            utils.info('docker-compose.yml file not found in backup archive')

    if load_datastore:
        if redis_file in available or couchdb_files:
            utils.info('Waiting for the datastore...')
            sh('{} http wait {}/ping'.format(const.CLI, store_url))
            # Wipe UI/Automation, but leave Spark files
            mdelete_cmd = '{} http post {}/mdelete --quiet -d \'{{"namespace":"{}", "filter":"*"}}\''
            sh(mdelete_cmd.format(const.CLI, store_url, 'brewblox-ui-store'))
            sh(mdelete_cmd.format(const.CLI, store_url, 'brewblox-automation'))
        else:
            utils.info('No datastore files found in backup archive')

        if redis_file in available:
            data = json.loads(zipf.read(redis_file).decode())
            utils.info('Loading {} entries from Redis datastore'.format(len(data['values'])))
            mset(data)

        # Backwards compatibility for UI/automation files from CouchDB
        # The IDs here are formatted as {moduleId}__{objId}
        # The module ID becomes part of the Redis namespace
        for db in ['brewblox-ui-store', 'brewblox-automation']:
            fname = '{}.datastore.json'.format(db)
            if fname not in available:
                continue
            docs = json.loads(zipf.read(fname).decode())
            # Drop invalid names (not prefixed with module ID)
            docs[:] = [d for d in docs if len(d['_id'].split('__', 1)) == 2]
            # Add namespace / ID fields
            for d in docs:
                segments = d['_id'].split('__', 1)
                d['namespace'] = '{}:{}'.format(db, segments[0])
                d['id'] = segments[1]
                del d['_id']
            utils.info('Loading {} entries from database `{}`'.format(len(docs), db))
            mset({'values': docs})

        # Backwards compatibility for Spark service files
        # There is no module ID field here
        spark_db = 'spark-service'
        spark_fname = '{}.datastore.json'.format(spark_db)
        if spark_fname in available:
            docs = json.loads(zipf.read(spark_fname).decode())
            for d in docs:
                d['namespace'] = spark_db
                d['id'] = d['_id']
                del d['_id']
            utils.info('Loading {} entries from database `{}`'.format(len(docs), spark_db))
            mset({'values': docs})

    if load_spark:
        sudo = utils.optsudo()

        if not spark_files:
            utils.info('No Spark files found in backup archive')

        for f in spark_files:
            spark = f[:-len('.spark.json')]
            utils.info('Writing blocks to Spark service {}'.format(spark))
            with NamedTemporaryFile('w') as tmp:
                data = json.loads(zipf.read(f).decode())
                utils.show_data(data)
                json.dump(data, tmp)
                tmp.flush()
                sh('{} http post {}/{}/blocks/backup/load -f {}'.format(const.CLI, host_url, spark, tmp.name))
                sh('{} docker-compose restart {}'.format(sudo, spark))

    if load_node_red and node_red_files:
        sudo = ''
        if [getgid(), getuid()] != [1000, 1000]:
            sudo = 'sudo '

        with TemporaryDirectory() as tmpdir:
            zipf.extractall(tmpdir, members=node_red_files)
            sh('mkdir -p ./node-red')
            sh('{}chown 1000:1000 ./node-red/'.format(sudo))
            sh('{}chown -R 1000:1000 {}'.format(sudo, tmpdir))
            sh('{}cp -rfp {}/node-red/* ./node-red/'.format(sudo, tmpdir))

    zipf.close()

    if update:
        utils.info('Updating brewblox...')
        sh('{} update'.format(const.CLI))

    utils.info('Done!')
Example #5
0
def save(save_compose, ignore_spark_error):
    """Create a backup of Brewblox settings.

    A zip archive containing JSON/YAML files is created in the ./backup/ directory.
    The archive name will include current date and time to ensure uniqueness.

    The backup is not exported to any kind of remote/cloud storage.

    To use this command in scripts, run it as `brewblox-ctl --quiet backup save`.
    Its only output to stdout will be the absolute path to the created backup.

    The command will fail if any of the Spark services could not be contacted.

    As it does not make any destructive changes to configuration,
    this command is not affected by --dry-run.

    \b
    Stored data:
    - .env
    - docker-compose.yml.   (Optional)
    - Datastore databases.
    - Spark service blocks.

    \b
    NOT stored:
    - History data.

    """
    utils.check_config()
    urllib3.disable_warnings()

    file = 'backup/brewblox_backup_{}.zip'.format(
        datetime.now().strftime('%Y%m%d_%H%M'))
    with suppress(FileExistsError):
        mkdir(path.abspath('backup/'))

    url = utils.datastore_url()

    utils.info('Waiting for the datastore...')
    http.wait(url, info_updates=True)
    resp = requests.get(url + '/_all_dbs', verify=False)
    resp.raise_for_status()
    dbs = [v for v in resp.json() if not v.startswith('_')]

    config = utils.read_compose()
    sparks = [
        k for k, v in config['services'].items()
        if v.get('image', '').startswith('brewblox/brewblox-devcon-spark')
    ]
    zipf = zipfile.ZipFile(file, 'w', zipfile.ZIP_DEFLATED)

    utils.info('Exporting .env')
    zipf.write('.env')

    if save_compose:
        utils.info('Exporting docker-compose.yml')
        zipf.write('docker-compose.yml')

    utils.info('Exporting databases: {}'.format(', '.join(dbs)))
    for db in dbs:
        resp = requests.get('{}/{}/_all_docs'.format(url, db),
                            params={'include_docs': True},
                            verify=False)
        resp.raise_for_status()
        docs = [v['doc'] for v in resp.json()['rows']]
        for d in docs:
            del d['_rev']
        zipf.writestr(db + '.datastore.json', json.dumps(docs))

    for spark in sparks:
        utils.info("Exporting Spark blocks from '{}'".format(spark))
        resp = requests.get('{}/{}/export_objects'.format(
            utils.host_url(), spark),
                            verify=False)
        try:
            resp.raise_for_status()
            zipf.writestr(spark + '.spark.json', resp.text)
        except Exception as ex:
            if ignore_spark_error:
                utils.info("Skipping '{}' due to error: {}".format(
                    spark, str(ex)))
            else:
                raise ex

    zipf.close()
    click.echo(path.abspath(file))
    utils.info('Done!')
Example #6
0
def load(archive, load_env, load_compose, load_datastore, load_spark, update):
    """Load and apply Brewblox settings backup.

    This function uses files generated by `brewblox-ctl backup save` as input.
    You can use the --load-XXXX options to partially load the backup.

    This does not attempt to merge data: it will overwrite current docker-compose.yml,
    datastore databases, and Spark blocks.

    Blocks on Spark services not in the backup file will not be affected.

    If dry-run is enabled, it will echo all configuration from the backup archive.

    Steps:
        - Write .env
        - Read .env values
        - Write docker-compose.yml, run `docker-compose up`.
        - Write all datastore files found in backup.
        - Write all Spark blocks found in backup.
        - Run brewblox-ctl update
    """
    utils.check_config()
    utils.confirm_mode()
    urllib3.disable_warnings()

    sudo = utils.optsudo()
    host_url = utils.host_url()
    store_url = utils.datastore_url()

    zipf = zipfile.ZipFile(archive, 'r', zipfile.ZIP_DEFLATED)
    available = zipf.namelist()
    datastore_files = [v for v in available if v.endswith('.datastore.json')]
    spark_files = [v for v in available if v.endswith('.spark.json')]

    if load_env and '.env' in available:
        with NamedTemporaryFile('w') as tmp:
            data = zipf.read('.env').decode()
            utils.info('Writing .env')
            utils.show_data(data)
            tmp.write(data)
            tmp.flush()
            sh('cp -f {} .env'.format(tmp.name))

        utils.info('Reading .env values')
        load_dotenv(path.abspath('.env'))

    if load_compose:
        if 'docker-compose.yml' in available:
            utils.info('Writing docker-compose.yml')
            utils.write_compose(yaml.safe_load(
                zipf.read('docker-compose.yml')))
            sh('{} docker-compose up -d --remove-orphans'.format(sudo))
        else:
            utils.info('docker-compose.yml file not found in backup archive')

    if load_datastore:
        if datastore_files:
            utils.info('Waiting for the datastore...')
            sh('{} http wait {}'.format(const.CLI, store_url))
        else:
            utils.info('No datastore files found in backup archive')

        for f in datastore_files:
            db = f[:-len('.datastore.json')]

            utils.info('Recreating database {}'.format(db))
            sh('{} http delete {}/{} --allow-fail'.format(
                const.CLI, store_url, db))
            sh('{} http put {}/{}'.format(const.CLI, store_url, db))

            utils.info('Writing database {}'.format(db))
            with NamedTemporaryFile('w') as tmp:
                data = {'docs': json.loads(zipf.read(f).decode())}
                utils.show_data(data)
                json.dump(data, tmp)
                tmp.flush()
                sh('{} http post {}/{}/_bulk_docs -f {}'.format(
                    const.CLI, store_url, db, tmp.name))

    if load_spark:
        sudo = utils.optsudo()

        if not spark_files:
            utils.info('No Spark files found in backup archive')

        for f in spark_files:
            spark = f[:-len('.spark.json')]
            utils.info('Writing blocks to Spark service {}'.format(spark))
            with NamedTemporaryFile('w') as tmp:
                data = json.loads(zipf.read(f).decode())
                utils.show_data(data)
                json.dump(data, tmp)
                tmp.flush()
                sh('{} http post {}/{}/import_objects -f {}'.format(
                    const.CLI, host_url, spark, tmp.name))
                sh('{} docker-compose restart {}'.format(sudo, spark))

    zipf.close()

    if update:
        utils.info('Updating brewblox...')
        sh('{} update'.format(const.CLI))

    utils.info('Done!')