def get_multiple_vm_details(conn, uuids, count):
    vm_list = []

    try:
        multi_vm_results = conn.get_supplychains(
            uuids,
            types=['VirtualMachine'],
            detail='aspects',
            aspects=['virtualMachineAspect'])[0]
        for vm in multi_vm_results['seMap']['VirtualMachine'][
                'instances'].values():
            try:
                vm_details = get_vm_info(vm)
                vm_list.append({
                    'uuid': vm_details[0],
                    'name': vm_details[1],
                    'ip_address': vm_details[2]
                })
            except KeyError:
                continue

    except vc.HTTP500Error as e:
        umsg.log(
            'Problem retrieving bulk VM information, trying individual VMs',
            level='error')
        umsg.log(e)
        vm_list.extend(get_individual_vm_details(conn, uuids, count))

    return vm_list
def get_turbo_vms(conn, start=None, end=None, step=100):
    vm_list = []
    uuids = [
        x['uuid']
        for x in conn.search(types=['VirtualMachine'], detail_type='compact')
    ]
    if not start:
        start = 0
    if not end:
        end = len(uuids)

    while end < len(uuids):
        umsg.log(
            f'Getting VMs between {start} and {end}, out of a total {len(uuids)}',
            level='debug')
        uuid_subset = uuids[start:end]

        vm_details = get_multiple_vm_details(conn, uuid_subset, start)
        vm_list.extend(vm_details)
        start += step
        end += step

    else:
        if start < len(uuids):
            umsg.log(
                f'Getting VMs between {start} and {len(uuids)}, out of a total {len(uuids)}',
                level='debug')
            uuid_subset = uuids[start:len(uuids)]
            vm_details = get_multiple_vm_details(conn, uuid_subset, start)
            vm_list.extend(vm_details)

    return vm_list
def get_individual_vm_details(conn, uuids, count):
    vm_list = []

    for vm_uuid in uuids:
        umsg.log(f'Processing VM {count}')
        count += 1

        try:
            single_vm_results = conn.get_entities(uuid=vm_uuid, detail=True)[0]

        except vc.HTTP500Error:
            umsg.log(
                f'Error getting details for VM with UUID: {vm_uuid}, skipping',
                level='error')
            continue

        try:
            vm_details = get_vm_info(single_vm_results)

        except KeyError:
            continue

        vm_list.append({
            'uuid': vm_details[0],
            'name': vm_details[1],
            'ip_address': vm_details[2]
        })
    return vm_list
def parse_csv_into_apps(csv_data, prefix=''):
    """Parse input CSV into dictionary of UserDefinedApps
    Parameters:
        csv_data - list - List of dicts from read_csv
        prefix - str - Optional prefix for user-defined app name
    """
    app_dict = {}
    row_count = 1

    umsg.log('Looking for apps and associated VMs...')

    for row in csv_data:
        row_count += 1
        if not row['app_name']:
            umsg.log(
                f'No application defined on row {row_count} of input CSV, skipping',
                level='warn')
            continue

        app_name = f"{prefix}{row['app_name']}"

        if app_name in app_dict.keys():
            app = app_dict[app_name]

        else:
            app = UserDefinedApp(app_name)
            app_dict[app_name] = app

        app.add_member(member_name=row['entity_name'],
                       member_ip=row.get('entity_ip'))

    return app_dict
    def read_csv(self, csv_str_io):
        """Parse CSV StringIO to dict
        Parameters:
            filename - StringIO - IO data from CSV file

        Returns:
            List of dicts, where each dict is a row in the input CSV
        """
        data = []
        csv_data = csv.DictReader(csv_str_io)

        row_count = 1
        for row in csv_data:
            row_count += 1

            if not row[self.entity_headers['app_name']]:
                umsg.log(
                    f'No application defined on row {row_count} of input CSV, skipping',
                    level='warn')
                continue

            try:
                data.append((self._process_entity_headers(row),
                             self._process_metric_headers(row)))

            except KeyError:
                umsg.log(
                    f'Something went wrong on line {row_count} while processing CSV'
                )
                raise

        return data
        def create_topology(self):
            data = self.get_csv_data()
            topology = Topology()
            apps = parse_data_into_apps(data, topology, self.app_prefix)
            umsg.log('Building DIF topology...')
            build_dif_topology(apps)

            return topology
    def test_prefix_inline(self, caplog):
        prefix = 'default'
        msg = 'txt'
        umsg.log(msg, prefix=prefix)

        expected = '[{}] {}'.format(prefix, msg)

        assert_that(caplog.record_tuples[0][2]).is_equal_to(expected)
def read_config_file(config_file):
    """Read JSON config file"""
    with open(config_file, 'r') as fp:
        try:
            return json.loads(fp.read())

        except TypeError:
            umsg.log(f'{config_file} must be JSON format', level='error')
    def test_verbose_end(self, capsys):
        msg = 'print this'
        end = '.'
        umsg.log(msg, 'verbose', end=end)

        expected = '{}{}'.format(msg, end)
        captured = capsys.readouterr()

        assert_that(captured.out).is_equal_to(expected)
    def test_prefix_setting(self, caplog):
        prefix = 'default'
        msg = 'text'
        umsg.set_attr('msg_prefix', prefix)
        umsg.log(msg)

        expected = '[{}] {}'.format(prefix, msg)

        assert_that(caplog.record_tuples[0][2]).is_equal_to(expected)
    def test_prefix_formatting(self, caplog):
        prefix = 'inline'
        msg = 'text'
        umsg.set_attr('msg_prefix_format', '<{prefix}> ')
        umsg.log(msg, prefix=prefix)

        expected = '<{}> {}'.format(prefix, msg)

        assert_that(caplog.record_tuples[0][2]).is_equal_to(expected)
Пример #12
0
    def _msg(self, *args, **kwargs):
        """Inteface for module level :py:func:`umsg._msg` function."""
        if 'prefix' not in kwargs:
            kwargs['prefix'] = self._umsg_log_prefix

        if 'logger' not in kwargs:
            kwargs['logger'] = self._umsg_logger

        umsg.log(*args, **kwargs)
    def create_dif_entities(self):
        for member_info in self.members.values():
            try:
                self.add_member_to_dif_topo(member_info)

            except ParentEntityNotFoundError:
                msg = f"No parent found for entity named: {member_info['name']} with type: {member_info['type']}, skipping"
                umsg.log(msg, level='error')

        return self.dif_topology
    def test_verbose_prefix_ignored(self, capsys):
        prefix = 'inline'
        msg = 'print this'
        end = '\n'
        umsg.log(msg, 'verbose', prefix=prefix)

        expected = '{}{}'.format(msg, end)
        captured = capsys.readouterr()

        assert_that(captured.out).is_equal_to(expected)
 def _make_dummy_levels(self):
     umsg.log(f'Creating dummy application and service for {self.name}',
              level='debug')
     service_name = f'{self.name}_service'
     app_name = f'{self.name}_application'
     dummy_service = self._make_dummy_info('service', self.name,
                                           'businessApplication')
     dummy_app = self._make_dummy_info('application', service_name,
                                       'service')
     self.members[(service_name, 'service')] = dummy_service
     self.members[(app_name, 'application')] = dummy_app
def get_csv_data(filename, csv_location, entity_headers, match_ip):
    reader = DifCsvReader(filename, csv_location, entity_headers, match_ip)
    while not reader.file_downloaded:
        try:
            data = reader.download_csv_data()

        except CsvFileNotFoundError:
            umsg.log('No CSV found, waiting and then retrying')
            time.sleep(60)

    return reader.read_csv(data)
Пример #17
0
def actions(source, config, logger):
    umsg.log(f"Retrieving data from {config['resource']}", logger=logger)

    # source.connect() gives us the vmt-connect.Connection instance returned by
    # vmt-report; here we use the get_actions() method inline.
    # Another use would be the standard vmt-connect idiom:
    #   vmt = source.connection()
    #   res = vmt.get_action()
    res = source.connect().get_actions()

    fields = ['createTime', 'actionType', 'details']
    return [{x: res[x]} for x in res if x in fields]
    def test_formatter(self, capsys):
        prefix = 'fmt'
        msg = 'stream it'
        umsg.add_handler(logging.StreamHandler)
        umsg.log(msg, prefix=prefix)
        date = datetime.date.today().strftime('%Y-%m-%d')

        expected = '{} ([\\d]{{2}}:?){{3}} - INFO - <{}> {}'.format(
            date, prefix, msg)
        captured = capsys.readouterr()

        assert (re.match(expected, captured.err))
    def _process_entity_headers(self, row):
        entities = {}
        for k, v in self.entity_headers.items():
            try:
                entities[k] = row[v]

            except KeyError:
                umsg.log(
                    f'Incorrect entity field map entry: key: {k}, value: {v}',
                    level='error')
                raise

        return entities
def read_config_file(config_file):
    """Read JSON config file
    Parameters:
        config_file - str - Name of JSON config file
    Output:
        Dict - Dict representation of JSON config file
    """
    with open(config_file, 'r') as fp:
        try:
            return json.loads(fp.read())

        except TypeError:
            umsg.log(f'{config_file} must be JSON format', level='error')
    def _process_metric_headers(self, row):
        metrics = {}
        for k, v in self.metric_headers.items():
            try:
                metrics[k] = row[v]

            except KeyError:
                if v:
                    umsg.log(
                        f'Incorrect metric field map entry: key: {k}, value: {v}',
                        level='error')
                    raise

        return metrics
def main(config_file):
    args = read_config_file(config_file)

    requests.packages.urllib3.disable_warnings(
        requests.packages.urllib3.exceptions.InsecureRequestWarning)
    umsg.init(level=args.get('LOG_LEVEL', 'info'))
    log_file = os.path.join(args.get('LOG_DIR', ''), args.get('LOG_FILE', ''))

    if log_file:
        handler = logging.handlers.RotatingFileHandler(log_file,
                                                       mode='a',
                                                       maxBytes=10 * 1024 *
                                                       1024,
                                                       backupCount=1,
                                                       encoding=None,
                                                       delay=0)
        umsg.add_handler(handler)

    else:
        umsg.add_handler(logging.StreamHandler())

    umsg.log('Starting script')
    csv_data = get_csv_data(filename=args['INPUT_CSV_NAME'],
                            csv_location=args['CSV_LOCATION'],
                            entity_headers=args.get('ENTITY_FIELD_MAP'),
                            match_ip=args.get('MATCH_IP', False))
    apps = parse_csv_into_apps(csv_data, args.get('APP_PREFIX', ''))

    if args.get('IGNORE_TURBO_VERSION'):
        spec = vc.VersionSpec(versions=[], required=False)
        vmt_conn = vc.Connection(os.environ['TURBO_ADDRESS'],
                                 os.environ['TURBO_USERNAME'],
                                 os.environ['TURBO_PASSWORD'],
                                 req_versions=spec)

    else:
        vmt_conn = vc.Connection(os.environ['TURBO_ADDRESS'],
                                 os.environ['TURBO_USERNAME'],
                                 os.environ['TURBO_PASSWORD'])
    turbo_vms = get_turbo_vms(vmt_conn, start=0, end=500, step=500)
    apps = match_apps_to_turbo_vms(apps, turbo_vms,
                                   args.get('MATCH_IP', False))
    make_apps_thru_atm(vmt_conn, apps)

    umsg.log('Finished script')
        def do_GET(self):
            if self.path == '/dif_metrics':
                try:
                    topology = self.create_topology()
                    umsg.log('Serving DIF topology JSON')
                    self.respond({'status': 200, 'content': topology.ToJSON()})

                except CsvDownloadError as e:
                    self.respond({
                        'status': e.status_code,
                        'content': e.message
                    })

                except Exception as e:
                    self.respond({
                        'status': 500,
                        'content': f'Something went wrong: {e}'
                    })
def parse_data_into_apps(csv_data, dif_topology, prefix):
    """Parse input CSV into dictionary of UserDefinedApps
    Parameters:
        csv_data - list - List of dicts from read_csv
        dif_topology - dif.Topology - DIF Topology object
        entity_headers - dict - Optional mapping for CSV entity column names
        metric_headers - dict - Optional mapping for CSV metric column names
    """
    app_dict = {}
    row_count = 1

    umsg.log('Looking for apps and associated VMs...')

    for entity, metric in csv_data:
        row_count += 1
        app_name = entity['app_name']

        if app_name in app_dict.keys():
            app = app_dict[app_name]

        else:
            app = UserDefinedApp(app_name, dif_topology, prefix)
            app_dict[app_name] = app

        try:
            app.add_member(member_name=entity['entity_name'],
                           member_type=entity['entity_type'],
                           parent_name=entity['parent_name'],
                           parent_type=entity['parent_type'],
                           member_ip=entity['entity_ip'])

        except InvalidParentTypeError:
            msg = f'Invalid parent_type found on row {row_count} of input CSV'
            raise InvalidParentTypeError(msg)

        try:
            app.add_metrics(member_name=entity['entity_name'],
                            member_type=entity['entity_type'],
                            metrics=metric)

        except Exception as e:
            raise e

    return app_dict
    def add_metrics(self, member_name, member_type, metrics):
        for metric_type, metric_val in metrics.items():
            metric_type, cap_or_used = metric_type.split('_')

            if metric_val:
                try:
                    self.is_valid_metric(member_type, metric_type)

                except InvalidMetricTypeError:
                    msg = f'{member_name} has an invalid metric type, skipping metric named: {metric_type}'
                    dbug_msg = (
                        f'Member type: [ {member_type} ], provided metric type: [ {metric_type} ], '
                        +
                        f'valid metric types: [ {", ".join(self.acceptable_metric_types[member_type])} ]'
                    )
                    umsg.log(msg, level='warning')
                    umsg.log(dbug_msg, level='debug')

                self.member_metrics[(member_name, member_type)][(
                    metric_type, cap_or_used)] = float(metric_val)
def make_apps_thru_atm(conn, apps):
    current_apps = {
        app['displayName']: app['uuid']
        for app in conn.request('topologydefinitions')
    }

    for app in apps.values():
        if not app.member_uuids:
            umsg.log(
                f'No matching VMs found for app named: {app.name}, skipping')
            continue

        app.remove_members_without_matches()

        try:
            app.update_appl_topo(conn, current_apps[app.name])

        except KeyError:
            app.create_appl_topo(conn)

    return True
    def process_csv_location(self, provider):
        if provider not in {'AZURE', 'AWS', 'FTP'}:
            umsg.log(
                'Value for CSV_LOCATION is invalid. It must be one of: [ AZURE, AWS, FTP ]',
                level='error')
            raise InvalidConfigError()

        if provider == 'AZURE':
            self.provider = 'AZURE'
            self.connect_str = os.environ['AZURE_CONNECTION_STRING']
            self.container_name = os.environ['AZURE_CONTAINER_NAME']

        if provider == 'AWS':
            self.provider = 'AWS'
            self.access_key_id = os.environ['AWS_ACCESS_KEY_ID']
            self.secret_access_key = os.environ['AWS_SECRET_ACCESS_KEY']
            self.region_name = os.environ['AWS_REGION_NAME']
            self.bucket_name = os.environ['AWS_BUCKET_NAME']

        if provider == 'FTP':
            self.provider = 'FTP'
            self.path = '/opt/turbonomic/data'
    def add_member(self, member_name, member_ip):
        member_info = {
            'name': member_name,
            'ip_address': '',
            'turbo_oid': None
        }

        try:
            member_info['ip_address'] = self._process_ips(member_ip)

        except IpAddressNotFoundError:
            umsg.log(f'No IP address defined for VM {member_name}',
                     level='debug')
            pass

        if member_info in self.members:
            umsg.log(
                f'Member {member_info["name"]} already exists in {self.name} application group',
                level='warn')
            pass

        else:
            self.members.append(member_info)
def main(config_file):
    args = read_config_file(config_file)
    host_name = '0.0.0.0'
    port_number = 8081
    umsg.init(level=args.get('LOG_LEVEL', 'INFO'))
    log_file = os.path.join(args.get('LOG_DIR'), args.get('LOG_FILE'))

    if log_file:
        handler = logging.handlers.RotatingFileHandler(log_file,
                                                       mode='a',
                                                       maxBytes=10 * 1024 *
                                                       1024,
                                                       backupCount=1,
                                                       encoding=None,
                                                       delay=0)
        umsg.add_handler(handler)

    else:
        umsg.add_handler(logging.StreamHandler())

    topology_handler = MakeHandlerClassFromArgs(args['INPUT_CSV_NAME'],
                                                args['CSV_LOCATION'],
                                                args.get('ENTITY_FIELD_MAP'),
                                                args.get('METRIC_FIELD_MAP'),
                                                args.get('APP_PREFIX'))

    httpd = HTTPServer((host_name, port_number), topology_handler)

    umsg.log(f'Server Starts - {host_name}:{port_number}')
    try:
        httpd.serve_forever()

    except KeyboardInterrupt:
        pass

    httpd.server_close()
    umsg.log(f'Server Stops - {host_name}:{port_number}')
    def create_appl_topo(self, conn):
        umsg.log(
            f'Creating application for BusinessApplication named: {self.name}')
        dto = self._prep_app_topo_dto()
        res = conn.request('topologydefinitions', method='POST', dto=dto)
        umsg.log(f'Successfully created app {self.name}.')
        umsg.log(f'Response Details: {res}', level='debug')

        return True