예제 #1
0
def regenerate(args, update_catalog=False, mongodb=None):

    if settings_module.parse_boolean(os.environ.get('MAKETESTS', '0')):
        DESTPATH = os.path.join(tempfile.mkdtemp(),
                                'experiment_reference.json')
    else:
        DESTPATH = os.path.join(os.getcwd(), 'datacatalog', 'definitions',
                                'jsondocs', 'experiment_reference.json')
        update_catalog = True

    logger.debug('Project config: ' + PARENT + '/config.yml')
    project_settings = config.read_config(places_list=[PARENT])
    logger.debug('Local config:' + THIS + '/config.yml')
    bootstrap_settings = config.read_config(places_list=[THIS])
    settings = dicthelpers.data_merge(project_settings, bootstrap_settings)

    env = args.environment
    if env is None:
        env = 'development'
    db = settings.get(env)

    schema = {
        'description': 'Experiment reference enumeration',
        'type': 'string',
        'enum': []
    }

    challenges = ChallengeMapping(settings['experiment_reference'],
                                  settings['google_client'])
    challenges.populate()
    for cp in challenges.filescache:
        if cp.get('uri', None) is not None:
            google_sheets_id = os.path.basename(cp.get('uri', None))
            cp_uuid = identifiers.typeduuid.catalog_uuid(
                cp.get('id'), 'challenge_problem')
            cp_settings = copy.deepcopy(settings['experiment_reference'])
            cp_settings['google_sheets_id'] = google_sheets_id

            # Generate the experiment designs for each CP
            mapping = ExperimentReferenceMapping(cp_settings,
                                                 settings['google_client'])
            mapping.populate()
            if update_catalog:
                if mongodb is None:
                    mongodb = db['mongodb']
                store = linkedstores.experiment_design.ExperimentDesignStore(
                    mongodb)
                for doc in mapping.filescache:
                    # print(doc)
                    if doc['experiment_design_id'] != 'Unknown':
                        doc['child_of'].append(cp_uuid)
                    logger.info('SYNCING {}'.format(doc.get('title', None)))
                    store.add_update_document(doc)

            for rec in mapping.filescache:
                if rec['experiment_design_id'] not in schema['enum']:
                    schema['enum'].append(rec['experiment_design_id'])

    json.dump(schema, open(DESTPATH, 'w'), indent=2)
    return True
예제 #2
0
def main(args):

    logger.debug('Project config: ' + PARENT + '/config.yml')
    project_settings = config.read_config(places_list=[PARENT])
    logger.debug('Local config:' + THIS + '/config.yml')
    bootstrap_settings = config.read_config(places_list=[THIS])
    settings = dicthelpers.data_merge(project_settings, bootstrap_settings)

    env = args.environment
    if env is None:
        env = 'localhost'
    if args.verbose is True:
        settings['verbose'] = True
    else:
        settings['verbose'] = False

    mongodb = settings.get(env).get('mongodb')

    if args.command == 'list':
        dblist(mongodb, settings)
    elif args.command == 'auto':
        autobuild(mongodb, settings)
    elif args.command == 'create':
        raise NotImplementedError()
    elif args.command == 'delete':
        raise NotImplementedError()
예제 #3
0
def main(args):

    logger.debug('Project config: ' + PARENT + '/config.yml')
    project_settings = config.read_config(places_list=[PARENT])
    logger.debug('Local config:' + THIS + '/config.yml')
    bootstrap_settings = config.read_config(places_list=[THIS])
    settings = datacatalog.dicthelpers.data_merge(project_settings,
                                                  bootstrap_settings)

    env = args.environment
    if env is None:
        env = 'production'
    if args.verbose is True:
        settings['verbose'] = True
    mongodb = settings.get(env).get('mongodb')
    agave_client = Agave.restore()

    mgr = datacatalog.managers.common.Manager(mongodb, agave_client)
    resp = mgr.stores[args.collection].query(query={}, attr_dict=True)
    json.dump(resp,
              args.output,
              sort_keys=True,
              indent=2,
              separators=(',', ':'),
              default=json_datetime_serializer)
예제 #4
0
파일: test_yaml.py 프로젝트: TACC/tacconfig
def test_invalid_filetype():
    '''test that filetype is among the ones accepted'''
    testdata = os.path.join(HERE, 'data', '1')
    fname = 'config.yml'
    with pytest.raises(AssertionError):
        config.read_config(places_list=[testdata],
                           config_filename=fname,
                           permissive=False,
                           filetype="JSON")
예제 #5
0
def main(filename):

    settings = config.read_config(namespace="_REACTOR")
    jobstore = PipelineJobStore(settings.mongodb)
    jobdef = json.load(open(filename, 'r'))
    resp = jobstore.create(jobdef)
    if resp is not None:
        print('Loaded job {}'.format(resp['uuid']))
예제 #6
0
파일: test_yaml.py 프로젝트: TACC/tacconfig
def test_invalid_yml():
    '''robust to invalid YML by path as a dict'''
    testdata = os.path.join(HERE, 'data', '1')
    fname = 'invalid.yml'
    settings = config.read_config(places_list=[testdata],
                                  config_filename=fname,
                                  permissive=True)
    assert settings == {}
예제 #7
0
def main(filename):

    settings = config.read_config(namespace="_REACTOR")
    pipestore = PipelineStore(settings.mongodb)
    pipedef = json.load(open(filename, 'r'))
    resp = pipestore.add_update_document(pipedef)
    if resp is not None:
        print('Loaded pipeline {}'.format(resp['uuid']))
예제 #8
0
파일: test_yaml.py 프로젝트: TACC/tacconfig
def test_load_yml():
    '''test load valid YML by path as a dict'''
    testdata = os.path.join(HERE, 'data', '1')
    fname = 'config.yml'
    settings = config.read_config(places_list=[testdata],
                                  config_filename=fname,
                                  permissive=False)
    assert isinstance(settings, dict), "Expected to get a 'dict' back"
    assert 'filter' in settings, "Expected 'filter' in response"
예제 #9
0
파일: utils.py 프로젝트: SD2E/base-images
def read_config(namespace=NAMESPACE, places_list=CONFIG_LOCS,
                update=True, env=True):
    """Override tacconfig's broken right-favoring merge"""
    master_config = None
    for place in places_list:
        new_config = config.read_config(namespace=namespace,
                                        places_list=[place],
                                        env=env)
        if isinstance(new_config, dict) and master_config is None:
            master_config = new_config.copy()
        master_config = master_config + new_config
    return master_config
예제 #10
0
파일: test_yaml.py 프로젝트: TACC/tacconfig
def test_specified_load_order():
    '''test that config later in list takes precedence'''
    testdata = []
    testdata.append(os.path.join(HERE, 'data', '1'))
    testdata.append(os.path.join(HERE, 'data', '2'))
    fname = 'config.yml'
    settings = config.read_config(places_list=testdata,
                                  config_filename=fname,
                                  permissive=True)
    assert settings['logs']['level'] == 'INFO', \
        "Expected setting from second location to apply"
    assert isinstance(settings['tacos'], list), \
        "Expected setting to contain a list named 'tacos'"
예제 #11
0
def main(args):

    logger.debug('Project config: ' + PARENT + '/config.yml')
    project_settings = config.read_config(places_list=[PARENT])
    logger.debug('Local config:' + THIS + '/config.yml')
    bootstrap_settings = config.read_config(places_list=[THIS])
    settings = dicthelpers.data_merge(project_settings, bootstrap_settings)

    env = args.environment
    if env is None:
        env = 'localhost'

    if args.verbose is True:
        settings['verbose'] = True
    else:
        settings['verbose'] = False

    mongodb = settings.get(env).get('mongodb')
    mongodb_uri = mongo.get_mongo_uri(mongodb)
    logger.debug('URI: {}'.format(mongodb_uri))
    database_name = None
    if args.database is not None:
        database_name = args.database
    else:
        database_name = settings.get(env).get('mongodb',
                                              {}).get('database', None)
    logger.debug('DB: {}'.format(database_name))

    myclient = MongoClient(mongodb_uri)
    idb = myclient[database_name]

    if args.command == 'discover':
        autodiscover(idb, settings)
    elif args.command == 'auto':
        autobuild(idb, settings)
    elif args.command == 'create':
        raise NotImplementedError()
    elif args.command == 'delete':
        raise NotImplementedError()
def regenerate(args, update_catalog=False, mongodb=None):

    if settings_module.parse_boolean(os.environ.get('MAKETESTS', '0')):
        DESTPATH = os.path.join(tempfile.mkdtemp(),
                                'challenge_problem_id.json')
    else:
        DESTPATH = os.path.join(os.getcwd(), 'datacatalog', 'definitions',
                                'jsondocs', 'challenge_problem_id.json')
        update_catalog = True

    logger.debug('Project config: ' + PARENT + '/config.yml')
    project_settings = config.read_config(places_list=[PARENT])
    logger.debug('Local config:' + THIS + '/config.yml')
    bootstrap_settings = config.read_config(places_list=[THIS])
    settings = dicthelpers.data_merge(project_settings, bootstrap_settings)

    env = args.environment
    if env is None:
        env = 'development'
    db = settings.get(env)

    mapping = ChallengeMapping(settings['experiment_reference'],
                               settings['google_client'])
    mapping.populate()

    # # Experiment records: Insert into experiment_reference collection
    # # FIXME - We don't know which challenge_problem they are children of
    schemadef = mapping.populate().generate_schema_definitions()
    json.dump(schemadef, open(DESTPATH, 'w'), indent=2)

    if update_catalog:
        if mongodb is None:
            mongodb = db['mongodb']
        store = linkedstores.challenge_problem.ChallengeStore(mongodb)
        for doc in mapping.filescache:
            logger.info('SYNCING {}'.format(doc.get('title', None)))
            store.add_update_document(doc)

    return True
예제 #13
0
파일: test_yaml.py 프로젝트: TACC/tacconfig
def test_environment_override():
    '''ensure environment override works as intended'''
    testdata = []
    testdata.append(os.path.join(HERE, 'data', '1'))
    testdata.append(os.path.join(HERE, 'data', '2'))
    fname = 'config.yml'
    namespace = 'TACCONFIG'
    env_var_name = config.variablize(keys=['logs', 'level'],
                                     namespace=namespace)
    os.environ[env_var_name] = 'WARNING'
    # override logs.level with WARNING
    settings = config.read_config(places_list=testdata,
                                  config_filename=fname,
                                  namespace=namespace,
                                  permissive=True,
                                  env=True)
    assert settings['logs']['level'] == 'WARNING', 'Override failed'
    # turn off override by setting env to False
    settings = config.read_config(places_list=testdata,
                                  config_filename=fname,
                                  namespace=namespace,
                                  permissive=True,
                                  env=False)
    assert settings['logs']['level'] == 'INFO', 'Ignore env failed'
예제 #14
0
def read_config(namespace=None, places_list=None, update=True, env=True):
    """Override tacconfig's broken right-favoring merge"""
    if namespace is None:
        namespace = NAMESPACE
    if places_list is None:
        places_list = CONFIG_LOCS

    merged_config = None

    # Override tacconfig's broken right-favoring merge
    for place in places_list:
        new_config = tacconfig.read_config(namespace=namespace,
                                           places_list=[place],
                                           env=env)
        if isinstance(new_config, dict) and merged_config is None:
            merged_config = new_config.copy()
        merged_config = merged_config + new_config

    return merged_config
예제 #15
0
        spaces='drive',
        fields='files(id, name)',
        includeTeamDriveItems=True,
        supportsTeamDrives=True).execute()
    return response['files']


def rationalize(textstring):
    sep = settings['slugify']['separator']
    return sep.join(slug for slug in slugify(
        textstring,
        stopwords=settings['slugify']['stopwords'],
        lowercase=settings['slugify']['case_insensitive']).split('-'))


settings = config.read_config()

records = []
for file in get_files('/', settings['google']['sheets_id']):
    key = rationalize(file['name'])
    if key != '':
        record = {
            'title': file['name'],
            'status': settings['schema']['default_status'],
            'uri': 'https://docs.google.com/document/d/{}'.format(file['id']),
            'id': key,
            'type': 'experiment_reference'
        }
        records.append(record)

# Placeholder for Unknown mapping
예제 #16
0
def main(args):

    logger.setLevel(logging.DEBUG)

       def get_v1_items(filter={}):
            """Returns a cursor of v1 items"""
            return v1_stores['pipelinejob'].find(filter=filter)

        def get_v2_items():
            """Returns a cursor of v1 items"""
            v2_stores['pipelinejob'].find(filter)

        settings = config.read_config()
        mongodb_v2 = settings.get('mongodb')
        mongodb_v1 = copy.copy(mongodb_v2)
        # Make overridable

        mongodb_v1['database'] = 'catalog'
        db1 = datacatalog.mongo.db_connection(mongodb_v1)
        v1_stores = dict()
        v1_stores['pipeline'] = db1['pipelines']
        v1_stores['pipelinejob'] = db1['jobs']

        v2_stores = dict()
        v2_stores['pipeline'] = datacatalog.linkedstores.pipeline.PipelineStore(mongodb_v2)
        v2_stores['pipelinejob'] = datacatalog.linkedstores.pipelinejob.PipelineJobStore(mongodb_v2)

        jobs = get_v1_items()
        jc = 0
        logger.info('Jobs found: %s', jobs.count())

        for job in jobs:

            job_doc = dict()
            jc = jc + 1
            logger.debug('Processing job %s', jc)
            # Lift over UUID
            try:
                ouuid = str(job['uuid'])
                nuuid = typeduuid.catalog_uuid_from_v1_uuid(ouuid, uuid_type='pipelinejob')
            except Exception:
                logger.critical('Unable to translate %s. Skipping.', ouuid)
                continue

            try:
                opuuid = str(job['pipeline_uuid'])
                npuuid = typeduuid.catalog_uuid_from_v1_uuid(opuuid, uuid_type='pipeline')
            except Exception:
                logger.critical('Unable to translate %s. Skipping.', opuuid)
                continue

            logger.info('UUID %s remapped to %s', ouuid, nuuid)

            # Don't overwrite previously migrated jobs
            if v2_stores['pipelinejob'].coll.find_one({'uuid': nuuid}) is not None:
                logger.critical('Destination job exists. Skipping.')
                continue

            job_doc['uuid'] = nuuid
            job_doc['archive_path'] = os.path.join('/', job['path'])
            job_doc['archive_system'] = 'data-sd2e-community'
            job_doc['session'] = job.get('session',
                                         interestinganimal.generate(
                                             timestamp=False))
            job_doc['updated'] = job.get('updated')
            job_doc['state'] = job.get('status', 'CREATED')
            job_doc['last_event'] = job.get('last_event', 'update').lower()
            job_doc['pipeline_uuid'] = npuuid
            # Linkages
            job_doc['generated_by'] = [npuuid]
            job_doc['child_of'] = list()
            job_doc['derived_from'] = list()

            # Agent/task
            if 'actor_id' in job:
                job_doc['agent'] = 'https://api.sd2e.org/actors/v2/' + job.get('actor_id')
            else:
                job_doc['agent'] = 'https://api.sd2e.org/actors/v2/MEzqaw4rkWZoK'
            job_doc['task'] = None

            # Lift over top-level data
            old_data = job.get('data', dict())
            new_data = dict()

            # Lift over parameters
            # Also establish derived_from params
            for oldkey, newkey, uuid_type in [
                ('sample_id', 'sample_id', 'sample'),
                ('experiment_reference', 'experiment_design_id', 'experiment'),
                    ('measurement_id', 'measurement_id', 'measurement')]:
                old_data_filtered = copy.deepcopy(old_data)
                if oldkey in old_data:
                    new_data[newkey] = old_data[oldkey]
                    old_data_filtered.pop(oldkey)
                    value_uuid = typeduuid.catalog_uuid(
                        old_data[oldkey], uuid_type=uuid_type)
                    job_doc['derived_from'].append(value_uuid)

            # Merge lifted data and other data fields
            new_data = data_merge(old_data_filtered, new_data)
            if new_data is None:
                new_data = dict()
            job_doc['data'] = new_data

            # Port job history
            v2_history = list()
            for v1_event in job.get('history', []):
                v2_name = list(v1_event.keys())[0]
                v2_event = {'date': v1_event.get(v2_name).get('date'),
                            'data': v1_event.get(v2_name, {}).get('data', dict()),
                            'name': v2_name.lower(),
                            'uuid': typeduuid.generate(
                                uuid_type='pipelinejob_event',
                                binary=False)}
                if v2_event['data'] is None:
                    v2_event['data'] = dict()
                v2_history.append(v2_event)
            v2_history = sorted(v2_history, key=lambda k: k['date'])
            job_doc['history'] = v2_history

            # Set system-managed keys
            job_doc = v2_stores['pipelinejob'].set_private_keys(
                job_doc, source=SELF)

            if args.verbose:
                pprint(job_doc)

            resp = v2_stores['pipelinejob'].coll.insert_one(job_doc)
            logger.debug('Inserted document {}'.format(
                resp.inserted_id))
예제 #17
0
    def __init__(self):
        '''Initialize class with a valid Agave API client'''
        self.nickname = petname.Generate(3, '-')
        self.context = get_context_with_mock_support()
        self.client = get_client_with_mock_support()
        self.uid = self.context.get('actor_id')
        self.execid = self.context.get('execution_id')
        self.state = self.context.get('state')
        self.aliases = alias.AliasStore(self.client)

        localonly = str(os.environ.get('LOCALONLY', 0))
        if localonly == '1':
            self.local = True
        else:
            self.local = False

        try:
            self.username = self.client.username.encode("utf-8", "strict")
        except Exception:
            self.username = '******'
            pass

        # bootstrap configuration via tacconfig module
        self.settings = config.read_config(namespace=NAMESPACE)

        # list of text strings to redact in all logs - in this case, all
        # variables passed in as env overrides since we assume those are
        # intended to be secret (or at least not easily discoverable).
        try:
            envstrings = config.get_env_config_vals(namespace=NAMESPACE)
        except Exception:
            envstrings = []

        # add in nonce to the redact list via some heuristic measures
        envstrings.extend(self._get_nonce_vals())

        # Set up logging
        #
        # Get logging level
        log_level = LOG_LEVEL
        try:
            _log_level = self.settings.get('logs').get('level')
            if isinstance(_log_level, str):
                log_level = _log_level
        except Exception:
            pass
        # Optional log file (relative to cwd())
        log_file = LOG_FILE
        try:
            _log_file = self.settings.get('logs').get('file')
            if isinstance(_log_file, str):
                log_file = _log_file
        except Exception:
            pass
        # Use 'redactions' from above to define a banlist of strings
        #   These will be replaced with * characters in all logs
        self.logger = logs.get_logger(self.uid,
                                      self.execid,
                                      log_level=log_level,
                                      log_file=log_file,
                                      redactions=envstrings)
예제 #18
0
ENVIRONMENT = os.environ.get('DB_ENV', 'localhost')

HERE = os.getcwd()
SELF = __file__
THIS = os.path.dirname(SELF)
PARENT = os.path.dirname(THIS)
GPARENT = os.path.dirname(PARENT)

# Use local not installed install of datacatalog
if HERE not in sys.path:
    sys.path.insert(0, HERE)
from datacatalog.identifiers import abaco
from datacatalog import dicthelpers

project_settings = config.read_config(places_list=[PARENT])
bootstrap_settings = config.read_config(places_list=[THIS])
settings = dicthelpers.data_merge(project_settings, bootstrap_settings)

settings = AttrDict({
    'mongodb': settings.get(ENVIRONMENT, {}).get('mongodb'),
    'pipelines': {'pipeline_uuid': '106c46ff-8186-5756-a934-071f4497b58d',
                  'pipeline_manager_id': abaco.actorid.mock(),
                  'pipeline_manager_nonce': abaco.nonceid.mock(),
                  'job_manager_id': abaco.actorid.mock(),
                  'job_manager_nonce': abaco.nonceid.mock(),
                  'job_indexer_id': abaco.actorid.mock(),
                  'job_indexer_nonce': abaco.nonceid.mock()}

})
예제 #19
0
def main(args):

    logger.setLevel(logging.DEBUG)

       def get_v1_items(filter={}):
            """Returns a cursor of v1 items"""
            return v1_stores['pipeline'].find(filter=filter)

        def get_v2_items():
            """Returns a cursor of v1 items"""
            v2_stores['pipeline'].find(filter)

        settings = config.read_config()
        mongodb_v2 = settings.get('mongodb')
        mongodb_v1 = copy.copy(mongodb_v2)
        # Make overridable

        mongodb_v1['database'] = 'catalog'
        db1 = datacatalog.mongo.db_connection(mongodb_v1)
        v1_stores = dict()
        v1_stores['pipeline'] = db1['pipelines']

        v2_stores = dict()
        v2_stores['pipeline'] = datacatalog.linkedstores.pipeline.PipelineStore(mongodb_v2)

        logger.debug('Processing pipeline %s', args.uuid1)

        # Lift over UUID
        try:
            opuuid = str(args.uuid1)
            npuuid = typeduuid.catalog_uuid_from_v1_uuid(opuuid, uuid_type='pipeline')
        except Exception:
            logger.critical('Unable to translate %s. Skipping.', opuuid)
            raise

        # Fetch pipeline reference
        v1_pipeline = v1_stores['pipeline'].find_one({'_uuid': args.uuid1})
        if v1_pipeline is None:
            raise ValueError(
                'No such job {} found in v0.1.4 database'.format(args.uuid1))

        # Don't overwrite previously migrated jobs
        if v2_stores['pipeline'].coll.find_one({'uuid': npuuid}) is not None:
            logger.critical('Destination pipeline exists. Skipping.')
            sys.exit(0)

        v2_pipeline = dict()
        v2_pipeline['uuid'] = npuuid

        for key in ('accepts', 'name', 'description', 'components', 'produces',
                    'collections_levels', 'processing_levels'):
            v2_pipeline[key] = v1_pipeline.get(key)

        if args.type is not None:
            v2_pipeline['pipeline_type'] = args.type
        else:
            v2_pipeline['pipeline_type'] = 'primary-etl'
        if args.id is not None:
            v2_pipeline['id'] = args.id
        else:
            v2_pipeline['id'] = safen.encode_title(v2_pipeline['description'])
            logger.debug('Created pipeline.id {}'.format(v2_pipeline['id']))

        # Set managed keys
        v2_pipeline = v2_stores['pipeline'].set_private_keys(
            v2_pipeline, source=SELF)

        if args.verbose:
            pprint(v2_pipeline)

        resp = v2_stores['pipeline'].coll.insert_one(v2_pipeline)
        logger.info('Inserted pipeline document {}'.format(
            resp.inserted_id))

        sys.exit(0)
예제 #20
0
def main(args):

    logger.debug('Project config: ' + PARENT + '/config.yml')
    project_settings = config.read_config(places_list=[PARENT])
    logger.debug('Local config:' + THIS + '/config.yml')
    bootstrap_settings = config.read_config(places_list=[THIS])
    settings = dicthelpers.data_merge(project_settings, bootstrap_settings)

    # mongodb = project_settings.get('mongodb')
    # mongodb_uri = datacatalog.mongo.get_mongo_uri(mongodb)
    # myclient = MongoClient(mongodb_uri)

    env = args.environment
    if env is None:
        env = 'localhost'
    if args.verbose is True:
        settings['verbose'] = True
    else:
        settings['verbose'] = False

    mongodb = settings.get(env).get('mongodb')
    mongodb_root = {
        'host': mongodb['host'],
        'port': mongodb['port'],
        'username': '******',
        'password': mongodb['root_password']
    }
    mongodb_uri = mongo.get_mongo_uri(mongodb_root)
    logger.debug('MongoDB: {}'.format(mongodb_uri))
    myclient = MongoClient(mongodb_uri)
    database_name = mongodb.get('database', args.database)

    if database_name is not None:
        logger.info('Ensuring existing of {}'.format(database_name))
        myclient[database_name]
        myclient[database_name]['_keep'].insert_one(
            {'note': 'database provisioned'})
        roles = [{'role': 'dbOwner', 'db': database_name}]
        try:
            myclient['admin'].command("createUser",
                                      mongodb['username'],
                                      pwd=mongodb['password'],
                                      roles=roles)
        except OperationFailure:
            myclient['admin'].command("updateUser",
                                      mongodb['username'],
                                      pwd=mongodb['password'],
                                      roles=roles)
        except Exception as opf:
            logger.warning(opf)
        try:
            myclient[database_name].command("createUser",
                                            mongodb['username'],
                                            pwd=mongodb['password'],
                                            roles=roles)
        except OperationFailure:
            myclient[database_name].command("updateUser",
                                            mongodb['username'],
                                            pwd=mongodb['password'],
                                            roles=roles)
        except Exception as opf:
            logger.warning(opf)
        # except OperationFailure:
        #     pass
    else:
        raise Exception(
            'Failed to find database name in config or command line options')
예제 #21
0
def main():
    settings = config.read_config(namespace='BMW')
    logger = logging.getLogger(__file__)
    logger.debug(settings)
    logger.setLevel(settings.logs.level)
    logger.addHandler(logging.StreamHandler())

    try:
        reg = get_region_from_name(settings.account.country)
    except Exception as e:
        logger.error("Failed to get API URL", e)
        sys.exit(1)

    try:
        bc = ConnectedDriveAccount(username=settings.account.username,
                                   password=settings.account.password,
                                   region=reg)
    except Exception as e:
        logger.error("Failed to connect to BMW servers", e)
        sys.exit(1)

    try:
        for v in bc.vehicles:

            v.state.update_data()

            ts = v.state.timestamp
            last_update_reason = v.state.last_update_reason

            pos = v.state.gps_position
            charging_status = v.state.charging_status.name
            charging_status_human = MAPPINGS[charging_status]
            # cosmetic tweak to status
            if charging_status == 'INVALID':
                if last_update_reason == 'VEHICLE_SHUTDOWN':
                    charging_status_human = 'not charging though the car is parked'
                if last_update_reason == 'VEHICLE_MOVING':
                    charging_status_human = 'not charging as it is being driven'

            battery_level = v.state.charging_level_hv
            time_remaining = v.state.charging_time_remaining
            if time_remaining is None:
                time_remaining = 'Unknown'

            # print(json.dumps(v.state.attributes, indent=4))

            driver_door_status = v.state.attributes["STATUS"][
                "doorDriverFront"]
            passenger_door_status = v.state.attributes["STATUS"][
                "doorPassengerFront"]
            door_locks_status = v.state.door_lock_state
            fuel_level_pct = int(
                (v.state.attributes["STATUS"]["remainingFuel"] /
                 v.state.attributes["STATUS"]["maxFuel"]) * 100)

            range_miles = int(v.state.attributes["STATUS"]["remainingRangeFuelMls"]) + \
                int(v.state.attributes["STATUS"]["maxRangeElectricMls"])

            logger.info('Last update: {}'.format(ts))
            logger.info('Last reason: {}'.format(last_update_reason))
            logger.info('Position: {}, {}'.format(pos[0], pos[1]))
            logger.info('Charging status: {}'.format(charging_status))
            logger.info('Charge time remaining: {}'.format(time_remaining))
            logger.info('Charge percentage: {}'.format(battery_level))
            logger.info('Driver door: {}'.format(driver_door_status.title()))
            logger.info('Passenger door: {}'.format(
                passenger_door_status.title()))
            logger.info('Door locks: {}'.format(
                str(door_locks_status).replace('LockState.', '').title()))
            logger.info('Fuel level: {}%'.format(fuel_level_pct))
            logger.info('Range: {} miles'.format(range_miles))

            if settings.actions.ifttt_notify_not_charging:
                if charging_status in settings.bad_charge_status:
                    logger.warning("Battery is not charging for some reason")
                    send_event(settings.ifttt.api_key,
                               settings.ifttt.event,
                               value1=charging_status_human,
                               value2=battery_level,
                               value3=time_remaining)

            if settings.actions.slack_notify_charging_status:
                try:
                    slack_icon = ':electric_plug:'
                    if battery_level < 50:
                        slack_icon = ':warning:'
                    slack_message = "Your BMW is {}.".format(
                        charging_status_human)
                    slack_message = slack_message + \
                        " Its battery is {}% full.".format(battery_level)
                    if battery_level < 95:
                        slack_message = slack_message + \
                            " {} remains till fully charged.".format(
                                time_remaining)
                    slack_message = slack_message + " Its maximum drivable range is {} mi".format(
                        range_miles)

                    to_slack(settings.slack, slack_message, icon=slack_icon)

                except Exception as e:
                    logger.warning("Failed to post to Slack: {}".format(e))

            if door_locks_status not in (LockState.LOCKED, LockState.SECURED):
                if settings.actions.slack_notify_door_unlocked:
                    try:
                        slack_message = ':unlock: Your BMW i3 was found to be *UNLOCKED*!'
                        to_slack(settings.slack,
                                 slack_message,
                                 icon=slack_icon)
                    except Exception as e:
                        logger.warning("Failed to post to Slack: {}".format(e))

                if settings.actions.bmw_trigger_remote_door_lock:
                    try:
                        logger.info('Remotely locking doors!')
                        v.remote_services.trigger_remote_door_lock()
                    except Exception as e:
                        logger.error(
                            '"Failed to remotely lock doors: {}'.format(e))

            if fuel_level_pct <= settings.low_fuel_pct:
                if settings.actions.slack_notify_low_fuel:
                    try:
                        slack_message = ':fuelpump: Your BMW i3 is low on gas ({}%)'.format(
                            fuel_level_pct)
                        to_slack(settings.slack,
                                 slack_message,
                                 icon=slack_icon)
                    except Exception as e:
                        logger.warning("Failed to post to Slack: {}".format(e))

    except Exception as e:
        logger.error("Failed to iterate over vehicle state", e)
        sys.exit(1)

    exit(0)