def test_detect_existing(self, mock_subproc_glob): content.Common.config = { "pools": [{ "pool": "rbd", "gateways": [{ "host": "igw1", "tpg": [{ "image": "archive" }] }] }] } mock_subproc_glob.return_value = ["/s/k/c/t/c/BACKSTORE_0/archive"] self.b = host.Backstores(None) assert self.b.selected == "BACKSTORE"
def test_create(self, mock_subproc_popen): content.Common.config = { "pools": [{ "pool": "rbd", "gateways": [{ "host": "igw1", "tpg": [{ "image": "archive" }] }] }] } self.b = host.Backstores("iblock") self.b.create() assert mock_subproc_popen.called
def test_iblock_does_nothing(self, mock_subproc_glob): content.Common.config = { "pools": [{ "pool": "rbd", "gateways": [{ "host": "igw1", "tpg": [{ "image": "archive" }] }] }] } mock_subproc_glob.return_value = "globbed/path/name" self.b = host.Backstores("iblock") assert not self.b.cmds
def test_iblock(self): content.Common.config = { "pools": [{ "pool": "rbd", "gateways": [{ "host": "igw1", "tpg": [{ "image": "archive" }] }] }] } self.b = host.Backstores("iblock") assert self.b.cmds == [[ 'targetcli', '/backstores/iblock', 'create', 'name=rbd-archive', 'dev=/dev/rbd/rbd/archive' ]]
def main(args=None): """LRDB main app. Apply stored configuration by default. Otherwise, execute the alternate path from the specified options. args - expects parse_args() result from argparse """ sys.argv.extend(sysconfig_options()) if args is None: args = parser.parse_args() if args.editor is not None: args.edit = True disable_check() configs = content.Configs(args.config, args.ceph, args.host, args.name, args.pools) logging.basicConfig(format='%(levelname)s: %(message)s') if args.verbose or args.wipe or args.host: logging.getLogger().level = logging.INFO if args.debug: logging.getLogger().level = logging.DEBUG logging.info("Executing {}".format(" ".join(sys.argv))) if args.wipe: configs.wipe(content.Cluster()) elif args.clear: try: configs.clear() except RuntimeError: # Kernel modules are already unloaded pass if args.unmap: images = host.Images() images.unmap() elif args.unmap: images = host.Images() images.unmap() elif args.file: conn = content.Cluster() contents = content.Content() contents.read(args.file) configs.wipe(conn) contents.save(conn) elif args.add: contents = content.Content() contents.read(args.add) contents.save(content.Cluster()) else: sections = { "pools": content.Pools(), "portals": content.PortalSection(), "targets": content.Targets(), "authentications": content.Authentications() } gateways = content.Gateways(sections) if args.output: configs.retrieve(content.Cluster(), sections, gateways) configs.display() elif args.migrate: configs.retrieve(content.Cluster(), sections, gateways) configs.migrate(args.migrate) configs.display() elif args.edit: conn = content.Cluster() configs.retrieve(conn, sections, gateways) contents = content.Content() contents.edit(args.editor) contents.save(conn) elif args.local: gateways.hostonly() configs.retrieve(content.Cluster(), sections, gateways) configs.display() else: gateways.hostonly() configs.retrieve(content.Cluster(), sections, gateways) images = host.Images() images.map() backstores = host.Backstores(args.backstore) backstores.create() backstore_attrs = host.BackstoreAttributes() backstore_attrs.assign() iscsi = host.Iscsi() iscsi.create() lun_assignment = host.LunAssignment() tpgs = host.TPGs(host.TPGCounter(), host.PortalIndex(), lun_assignment) tpgs.create() tpgs.disable_all() tpg_attrs = host.TPGattributes() tpg_attrs.assign() luns = host.Luns(lun_assignment) luns.create() portals = host.Portals() portals.create() acls = host.Acls() acls.create() maps = host.Map() maps.map() auth = host.Auth() auth.create() tpgs.enable_local()