Example #1
0
    def test_film_version_03(self):
        "Update schema to version 0.3"
        self.execute_script(__file__, 'film-schema-0.3a.sql')
        self.execute_script(__file__, 'film-schema-0.3b.sql')

        # Run pg_dump against source database
        srcdump = self.tempfile_path('film-0.3-src.dump')
        self.run_pg_dump(srcdump, True, True)

        # Create source YAML file
        usercfg = self.tempfile_path("usercfg.yaml")
        with open(usercfg, 'w') as f:
            f.write(yamldump({'repository': {'path': self.tempfile_path('')}}))
        os.environ["PYRSEAS_USER_CONFIG"] = usercfg
        with open(self.tempfile_path("config.yaml"), 'w') as f:
            f.write(yamldump({'datacopy': {'schema sd': ['genre']}}))
        srcyaml = self.tempfile_path('film-0.3-src.yaml')
        self.create_yaml(srcyaml, True)

        # Migrate the target database
        self.migrate_target(srcyaml, self.tempfile_path('film-0.3.sql'))

        # Run pg_dump against target database
        targdump = self.tempfile_path('film-0.3.dump')
        self.run_pg_dump(targdump, False, True)

        # Create target YAML file
        targyaml = self.tempfile_path('film-0.3.yaml')
        self.create_yaml(targyaml)

        # diff film-0.3-src.dump against film-0.3.dump
        assert self.lines(srcdump) == self.lines(targdump)
        # diff film-0.3-src.yaml against film-0.3.yaml
        assert self.lines(srcyaml) == self.lines(targyaml)
Example #2
0
    def test_film_version_03(self):
        "Update schema to version 0.3"
        self.execute_script(__file__, 'film-schema-0.3a.sql')
        self.execute_script(__file__, 'film-schema-0.3b.sql')

        # Run pg_dump against source database
        srcdump = self.tempfile_path('film-0.3-src.dump')
        self.run_pg_dump(srcdump, True, True)

        # Create source YAML file
        usercfg = self.tempfile_path("usercfg.yaml")
        with open(usercfg, 'w') as f:
            f.write(yamldump({'repository': {'path': self.tempfile_path('')}}))
        os.environ["PYRSEAS_USER_CONFIG"] = usercfg
        with open(self.tempfile_path("config.yaml"), 'w') as f:
            f.write(yamldump({'datacopy': {'schema sd': ['genre']}}))
        srcyaml = self.tempfile_path('film-0.3-src.yaml')
        self.create_yaml(srcyaml, True)

        # Migrate the target database
        self.migrate_target(srcyaml, self.tempfile_path('film-0.3.sql'))

        # Run pg_dump against target database
        targdump = self.tempfile_path('film-0.3.dump')
        self.run_pg_dump(targdump, False, True)

        # Create target YAML file
        targyaml = self.tempfile_path('film-0.3.yaml')
        self.create_yaml(targyaml)

        # diff film-0.3-src.dump against film-0.3.dump
        assert self.lines(srcdump) == self.lines(targdump)
        # diff film-0.3-src.yaml against film-0.3.yaml
        assert self.lines(srcyaml) == self.lines(targyaml)
Example #3
0
def test_repo_config(tmpdir):
    "Test a repository configuration file"
    ucfg = tmpdir.join(CFG_FILE)
    ucfg.write(yamldump({'repository': {'path': tmpdir.strpath}}))
    f = tmpdir.join("config.yaml")
    f.write(yamldump(CFG_DATA))
    os.environ["PYRSEAS_USER_CONFIG"] = ucfg.strpath
    cfg = Config()
    assert cfg['datacopy'] == CFG_TABLE_DATA
Example #4
0
def test_repo_config(tmpdir):
    "Test a repository configuration file"
    ucfg = tmpdir.join(CFG_FILE)
    ucfg.write(yamldump({'repository': {'path': tmpdir.strpath}}))
    f = tmpdir.join("config.yaml")
    f.write(yamldump(CFG_DATA))
    os.environ["PYRSEAS_USER_CONFIG"] = ucfg.strpath
    cfg = Config()
    assert cfg['datacopy'] == CFG_TABLE_DATA
Example #5
0
def test_repo_user_config(tmpdir):
    "Test a repository path specified in the user config"
    usercfg = {'repository': {'path': tmpdir.strpath}}
    userf = tmpdir.join("usercfg.yaml")
    userf.write(yamldump(usercfg))
    os.environ["PYRSEAS_USER_CONFIG"] = userf.strpath
    repof = tmpdir.join("config.yaml")
    repof.write(yamldump(CFG_DATA))
    cfg = Config()
    assert cfg['datacopy'] == CFG_TABLE_DATA
Example #6
0
def test_repo_user_config(tmpdir):
    "Test a repository path specified in the user config"
    usercfg = {'repository': {'path': tmpdir.strpath}}
    userf = tmpdir.join("usercfg.yaml")
    userf.write(yamldump(usercfg))
    os.environ["PYRSEAS_USER_CONFIG"] = userf.strpath
    repof = tmpdir.join("config.yaml")
    repof.write(yamldump(CFG_DATA))
    cfg = Config()
    assert cfg['datacopy'] == CFG_TABLE_DATA
Example #7
0
def main():
    """Augment database specifications"""
    parser = cmd_parser("Generate a modified schema for a PostgreSQL "
                        "database, in YAML format, augmented with specified "
                        "attributes and procedures", __version__)
    # TODO: processing of multiple files, owner and privileges
    parser.add_argument('-m', '--multiple-files', action='store_true',
                        help='multiple files (metadata directory)')
    parser.add_argument('-O', '--no-owner', action='store_true',
                        help='exclude object ownership information')
    parser.add_argument('-x', '--no-privileges', action='store_true',
                        dest='no_privs',
                        help='exclude privilege (GRANT/REVOKE) information')
    parser.add_argument('spec', nargs='?', type=FileType('r'),
                        default=sys.stdin, help='YAML augmenter specification')
    cfg = parse_args(parser)
    output = cfg['files']['output']
    options = cfg['options']
    augdb = AugmentDatabase(cfg)
    augmap = yaml.safe_load(options.spec)
    try:
        outmap = augdb.apply(augmap)
    except BaseException as exc:
        if type(exc) != KeyError:
            raise
        sys.exit("ERROR: %s" % str(exc))
    print(yamldump(outmap), file=output or sys.stdout)
    if output:
        output.close()
Example #8
0
    def to_map(self, db, opts):
        """Convert the object dictionary to a regular dictionary

        :param db: db used to tie the objects together
        :param opts: options to include/exclude information, etc.
        :return: dictionary

        Invokes the `to_map` method of each object to construct the
        dictionary.  If `opts` specifies a directory, the objects are
        written to files in that directory.
        """
        objdict = {}
        for objkey in sorted(self.keys()):
            obj = self[objkey]
            objmap = obj.to_map(db, opts.no_owner, opts.no_privs)
            if objmap is not None:
                extkey = obj.extern_key()
                outobj = {extkey: objmap}
                if opts.multiple_files:
                    filepath = obj.extern_filename()
                    with open(os.path.join(opts.metadata_dir, filepath),
                              'a') as f:
                        f.write(yamldump(outobj))
                    outobj = {extkey: filepath}
                objdict.update(outobj)
        return objdict
Example #9
0
    def to_map(self,  opts):
        """Convert the object dictionary to a regular dictionary

        :param opts: options to include/exclude information, etc.
        :return: dictionary

        Invokes the `to_map` method of each object to construct the
        dictionary.  If opts specifies a directory, the objects are
        written to files in that directory.
        """
        objdict = {}
        for objkey in sorted(self.keys()):
            obj = self[objkey]
            objmap = obj.to_map(opts.no_owner, opts.no_privs)
            if objmap is not None:
                extkey = obj.extern_key()
                outobj = {extkey: objmap}
                if opts.multiple_files:
                    filepath = os.path.join(opts.metadata_dir,
                                            obj.extern_filename())
                    with open(filepath, 'a') as f:
                        f.write(yamldump(outobj))
                    outobj = {extkey: filepath}
                objdict.update(outobj)
        return objdict
Example #10
0
def test_user_config(tmpdir):
    "Test a user configuration file"
    f = tmpdir.join(CFG_FILE)
    f.write(yamldump(USER_CFG_DATA))
    os.environ["PYRSEAS_USER_CONFIG"] = f.strpath
    cfg = Config()
    assert cfg['database'] == {'port': 5433}
    assert cfg['output'] == {'version_comment': True}
Example #11
0
def test_user_config(tmpdir):
    "Test a user configuration file"
    f = tmpdir.join(CFG_FILE)
    f.write(yamldump(USER_CFG_DATA))
    os.environ["PYRSEAS_USER_CONFIG"] = f.strpath
    cfg = Config()
    assert cfg['database'] == {'port': 5433}
    assert cfg['output'] == {'version_comment': True}
Example #12
0
    def to_map(self):
        """Convert the db maps to a single hierarchy suitable for YAML

        :return: a YAML-suitable dictionary (without Python objects)
        """
        if not self.db:
            self.from_catalog(True)

        opts = self.config['options']

        def mkdir_parents(dir):
            head, tail = os.path.split(dir)
            if head and not os.path.isdir(head):
                mkdir_parents(head)
            if tail:
                os.mkdir(dir)

        if opts.multiple_files:
            opts.metadata_dir = self.config['files']['metadata_path']
            if not os.path.exists(opts.metadata_dir):
                mkdir_parents(opts.metadata_dir)
            dbfilepath = os.path.join(opts.metadata_dir, 'database.%s.yaml' %
                                      self.dbconn.dbname)
            if os.path.exists(dbfilepath):
                with open(dbfilepath, 'r') as f:
                    objmap = yaml.safe_load(f)
                for obj, val in list(objmap.items()):
                    if isinstance(val, dict):
                        dirpath = ''
                        for schobj, fpath in list(val.items()):
                            filepath = os.path.join(opts.metadata_dir, fpath)
                            if os.path.exists(filepath):
                                os.remove(filepath)
                                if schobj == 'schema':
                                    (dirpath, ext) = os.path.splitext(filepath)
                        if os.path.exists(dirpath):
                            os.rmdir(dirpath)
                    else:
                        filepath = os.path.join(opts.metadata_dir, val)
                        if (os.path.exists(filepath)):
                            os.remove(filepath)

        dbmap = self.db.extensions.to_map(self.db, opts)
        dbmap.update(self.db.languages.to_map(self.db, opts))
        dbmap.update(self.db.casts.to_map(self.db, opts))
        dbmap.update(self.db.fdwrappers.to_map(self.db, opts))
        dbmap.update(self.db.eventtrigs.to_map(self.db, opts))
        if 'datacopy' in self.config:
            opts.data_dir = self.config['files']['data_path']
            if not os.path.exists(opts.data_dir):
                mkdir_parents(opts.data_dir)
        dbmap.update(self.db.schemas.to_map(self.db, opts))

        if opts.multiple_files:
            with open(dbfilepath, 'w') as f:
                f.write(yamldump(dbmap))

        return dbmap
Example #13
0
    def to_map(self):
        """Convert the db maps to a single hierarchy suitable for YAML

        :return: a YAML-suitable dictionary (without Python objects)
        """
        if not self.db:
            self.from_catalog()

        opts = self.config['options']

        def mkdir_parents(dir):
            head, tail = os.path.split(dir)
            if head and not os.path.isdir(head):
                mkdir_parents(head)
            if tail:
                os.mkdir(dir)

        if opts.multiple_files:
            opts.metadata_dir = self.config['files']['metadata_path']
            if not os.path.exists(opts.metadata_dir):
                mkdir_parents(opts.metadata_dir)
            dbfilepath = os.path.join(opts.metadata_dir, 'database.%s.yaml' %
                                      self.dbconn.dbname)
            if os.path.exists(dbfilepath):
                with open(dbfilepath, 'r') as f:
                    objmap = yaml.safe_load(f)
                for obj, val in objmap.items():
                    if isinstance(val, dict):
                        dirpath = ''
                        for schobj, fpath in val.items():
                            filepath = os.path.join(opts.metadata_dir, fpath)
                            if os.path.exists(filepath):
                                os.remove(filepath)
                                if schobj == 'schema':
                                    (dirpath, ext) = os.path.splitext(filepath)
                        if os.path.exists(dirpath):
                            os.rmdir(dirpath)
                    else:
                        filepath = os.path.join(opts.metadata_dir, val)
                        if (os.path.exists(filepath)):
                            os.remove(filepath)

        dbmap = self.db.extensions.to_map(opts)
        dbmap.update(self.db.languages.to_map(opts))
        dbmap.update(self.db.casts.to_map(opts))
        dbmap.update(self.db.fdwrappers.to_map(opts))
        dbmap.update(self.db.eventtrigs.to_map(opts))
        if 'datacopy' in self.config:
            opts.data_dir = self.config['files']['data_path']
            if not os.path.exists(opts.data_dir):
                mkdir_parents(opts.data_dir)
        dbmap.update(self.db.schemas.to_map(opts))

        if opts.multiple_files:
            with open(dbfilepath, 'w') as f:
                f.write(yamldump(dbmap))

        return dbmap
Example #14
0
def test_cmd_parser(tmpdir):
    "Test parsing a configuration file specified on the command line"
    f = tmpdir.join(CFG_FILE)
    f.write(yamldump(CFG_DATA))
    sys.argv = ['testprog', 'testdb', '--config', f.strpath]
    os.environ["PYRSEAS_USER_CONFIG"] = ''
    parser = cmd_parser("Test description", '0.0.1')
    cfg = parse_args(parser)
    assert cfg['datacopy'] == CFG_TABLE_DATA
Example #15
0
def test_cmd_parser(tmpdir):
    "Test parsing a configuration file specified on the command line"
    f = tmpdir.join(CFG_FILE)
    f.write(yamldump(CFG_DATA))
    sys.argv = ['testprog', 'testdb', '--config', f.strpath]
    os.environ["PYRSEAS_USER_CONFIG"] = ''
    parser = cmd_parser("Test description", '0.0.1')
    cfg = parse_args(parser)
    assert cfg['datacopy'] == CFG_TABLE_DATA
Example #16
0
def test_parse_repo_config(tmpdir):
    "Test parsing a repository configuration file in the current directory"
    f = tmpdir.join('config.yaml')
    f.write(yamldump(CFG_DATA))
    os.chdir(tmpdir.strpath)
    sys.argv = ['testprog', 'testdb']
    os.environ["PYRSEAS_USER_CONFIG"] = ''
    parser = cmd_parser("Test description", '0.0.1')
    cfg = parse_args(parser)
    assert cfg['datacopy'] == CFG_TABLE_DATA
Example #17
0
def test_parse_repo_config(tmpdir):
    "Test parsing a repository configuration file in the current directory"
    f = tmpdir.join('config.yaml')
    f.write(yamldump(CFG_DATA))
    os.chdir(tmpdir.strpath)
    sys.argv = ['testprog', 'testdb']
    os.environ["PYRSEAS_USER_CONFIG"] = ''
    parser = cmd_parser("Test description", '0.0.1')
    cfg = parse_args(parser)
    assert cfg['datacopy'] == CFG_TABLE_DATA
Example #18
0
def main(schema=None):
    """Convert database table specifications to YAML."""
    parser = cmd_parser("Extract the schema of a PostgreSQL database in "
                        "YAML format", __version__)
    parser.add_argument('-m', '--multiple-files', action='store_true',
                        help='output to multiple files (metadata directory)')
    parser.add_argument('-O', '--no-owner', action='store_true',
                        help='exclude object ownership information')
    parser.add_argument('-x', '--no-privileges', action='store_true',
                        dest='no_privs',
                        help='exclude privilege (GRANT/REVOKE) information')
    group = parser.add_argument_group("Object inclusion/exclusion options",
                                      "(each can be given multiple times)")
    group.add_argument('-n', '--schema', metavar='SCHEMA', dest='schemas',
                       action='append', default=[],
                       help="extract the named schema(s) (default all)")
    group.add_argument('-N', '--exclude-schema', metavar='SCHEMA',
                       dest='excl_schemas', action='append', default=[],
                       help="do NOT extract the named schema(s) "
                       "(default none)")
    group.add_argument('-t', '--table', metavar='TABLE', dest='tables',
                       action='append', default=[],
                       help="extract the named table(s) (default all)")
    group.add_argument('-T', '--exclude-table', metavar='TABLE',
                       dest='excl_tables', action='append', default=[],
                       help="do NOT extract the named table(s) "
                       "(default none)")
    parser.set_defaults(schema=schema)
    cfg = parse_args(parser)
    output = cfg['files']['output']
    options = cfg['options']
    if options.multiple_files and output:
        parser.error("Cannot specify both --multiple-files and --output")

    db = Database(cfg)
    dbmap = db.to_map()

    if not options.multiple_files:
        print(yamldump(dbmap), file=output or sys.stdout)
        if output:
            output.close()
Example #19
0
    def to_map(self, dbschemas, opts):
        """Convert tables, etc., dictionaries to a YAML-suitable format

        :param dbschemas: dictionary of schemas
        :param opts: options to include/exclude schemas/tables, etc.
        :return: dictionary
        """
        if self.name == 'pyrseas':
            return {}
        no_owner = opts.no_owner
        no_privs = opts.no_privs
        schbase = {} if no_owner else {'owner': self.owner}
        if not no_privs and self.privileges:
            schbase.update({'privileges': self.map_privs()})
        if self.description is not None:
            schbase.update(description=self.description)

        schobjs = []
        seltbls = getattr(opts, 'tables', [])
        if hasattr(self, 'tables'):
            for objkey in self.tables:
                if not seltbls or objkey in seltbls:
                    obj = self.tables[objkey]
                    schobjs.append((obj, obj.to_map(dbschemas, opts)))

        def mapper(objtypes):
            if hasattr(self, objtypes):
                schemadict = getattr(self, objtypes)
                for objkey in schemadict:
                    if objtypes == 'sequences' or (
                            not seltbls or objkey in seltbls):
                        obj = schemadict[objkey]
                        schobjs.append((obj, obj.to_map(opts)))

        for objtypes in ['ftables', 'sequences', 'views', 'matviews']:
            mapper(objtypes)

        def mapper2(objtypes):
            if hasattr(self, objtypes):
                schemadict = getattr(self, objtypes)
                for objkey in schemadict:
                    obj = schemadict[objkey]
                    schobjs.append((obj, obj.to_map(no_owner)))

        if hasattr(opts, 'tables') and not opts.tables or \
                not hasattr(opts, 'tables'):
            for objtypes in ['conversions', 'domains',
                             'operators', 'operclasses', 'operfams',
                             'tsconfigs', 'tsdicts', 'tsparsers', 'tstempls',
                             'types', 'collations']:
                mapper2(objtypes)
            if hasattr(self, 'functions'):
                for objkey in self.functions:
                    obj = self.functions[objkey]
                    schobjs.append((obj, obj.to_map(no_owner, no_privs)))

        # special case for pg_catalog schema
        if self.name == 'pg_catalog' and not schobjs:
            return {}

        if hasattr(self, 'datacopy') and self.datacopy:
            dir = self.extern_dir(opts.data_dir)
            if not os.path.exists(dir):
                os.mkdir(dir)
            for tbl in self.datacopy:
                self.tables[tbl].data_export(dbschemas.dbconn, dir)

        if opts.multiple_files:
            dir = self.extern_dir(opts.metadata_dir)
            if not os.path.exists(dir):
                os.mkdir(dir)
            filemap = {}
            for obj, objmap in schobjs:
                if objmap is not None:
                    extkey = obj.extern_key()
                    filepath = os.path.join(dir, obj.extern_filename())
                    with open(filepath, 'a') as f:
                        f.write(yamldump({extkey: objmap}))
                    outobj = {extkey:
                              os.path.relpath(filepath, opts.metadata_dir)}
                    filemap.update(outobj)
            # always write the schema YAML file
            filepath = self.extern_filename()
            extkey = self.extern_key()
            with open(os.path.join(opts.metadata_dir, filepath), 'a') as f:
                f.write(yamldump({extkey: schbase}))
            filemap.update(schema=filepath)
            return {extkey: filemap}

        schmap = dict((obj.extern_key(), objmap) for obj, objmap in schobjs
                  if objmap is not None)
        schmap.update(schbase)
        return {self.extern_key(): schmap}
Example #20
0
    def to_map(self, db, dbschemas, opts):
        """Convert tables, etc., dictionaries to a YAML-suitable format

        :param dbschemas: dictionary of schemas
        :param opts: options to include/exclude schemas/tables, etc.
        :return: dictionary
        """
        if self.name == 'pyrseas':
            return {}
        no_owner = opts.no_owner
        no_privs = opts.no_privs
        schbase = {} if no_owner else {'owner': self.owner}
        if not no_privs:
            schbase.update({'privileges': self.map_privs()})
        if self.description is not None:
            schbase.update(description=self.description)

        schobjs = []
        seltbls = getattr(opts, 'tables', [])
        if hasattr(self, 'tables'):
            for objkey in self.tables:
                if not seltbls or objkey in seltbls:
                    obj = self.tables[objkey]
                    schobjs.append((obj, obj.to_map(db, dbschemas, opts)))

        def mapper(objtypes):
            if hasattr(self, objtypes):
                schemadict = getattr(self, objtypes)
                for objkey in schemadict:
                    if objtypes == 'sequences' or (
                            not seltbls or objkey in seltbls):
                        obj = schemadict[objkey]
                        schobjs.append((obj, obj.to_map(db, opts)))

        for objtypes in ['ftables', 'sequences', 'views', 'matviews']:
            mapper(objtypes)

        def mapper2(objtypes, privs=False):
            if hasattr(self, objtypes):
                schemadict = getattr(self, objtypes)
                for objkey in schemadict:
                    obj = schemadict[objkey]
                    if privs:
                        dct = obj.to_map(db, no_owner, no_privs)
                    else:
                        dct = obj.to_map(db, no_owner)
                    schobjs.append((obj, dct))

        if hasattr(opts, 'tables') and not opts.tables or \
                not hasattr(opts, 'tables'):
            for objtypes in ('conversions', 'collations', 'operators',
                             'operclasses', 'operfams', 'tsconfigs',
                             'tsdicts', 'tsparsers', 'tstempls'):
                mapper2(objtypes)
            for objtypes in ('types', 'domains'):
                mapper2(objtypes, True)
            if hasattr(self, 'functions'):
                for objkey in self.functions:
                    obj = self.functions[objkey]
                    schobjs.append((obj, obj.to_map(db, no_owner, no_privs)))

        # special case for pg_catalog schema
        if self.name == 'pg_catalog' and not schobjs:
            return {}

        if hasattr(self, 'datacopy') and self.datacopy:
            dir = self.extern_dir(opts.data_dir)
            if not os.path.exists(dir):
                os.mkdir(dir)
            for tbl in self.datacopy:
                self.tables[tbl].data_export(dbschemas.dbconn, dir)

        if opts.multiple_files:
            dir = self.extern_dir(opts.metadata_dir)
            if not os.path.exists(dir):
                os.mkdir(dir)
            filemap = {}
            for obj, objmap in schobjs:
                if objmap is not None:
                    extkey = obj.extern_key()
                    filepath = os.path.join(dir, obj.extern_filename())
                    with open(filepath, 'a') as f:
                        f.write(yamldump({extkey: objmap}))
                    outobj = {extkey:
                              os.path.relpath(filepath, opts.metadata_dir)}
                    filemap.update(outobj)
            # always write the schema YAML file
            filepath = self.extern_filename()
            extkey = self.extern_key()
            with open(os.path.join(opts.metadata_dir, filepath), 'a') as f:
                f.write(yamldump({extkey: schbase}))
            filemap.update(schema=filepath)
            return {extkey: filemap}

        schmap = dict((obj.extern_key(), objmap) for obj, objmap in schobjs
                      if objmap is not None)
        schmap.update(schbase)
        return {self.extern_key(): schmap}
Example #21
0
def main(schema=None):
    """Convert database table specifications to YAML."""
    parser = cmd_parser(
        "Extract the schema of a PostgreSQL database in "
        "YAML format", __version__)
    parser.add_argument('-m',
                        '--multiple-files',
                        action='store_true',
                        help='output to multiple files (metadata directory)')
    parser.add_argument('-O',
                        '--no-owner',
                        action='store_true',
                        help='exclude object ownership information')
    parser.add_argument('-x',
                        '--no-privileges',
                        action='store_true',
                        dest='no_privs',
                        help='exclude privilege (GRANT/REVOKE) information')
    group = parser.add_argument_group("Object inclusion/exclusion options",
                                      "(each can be given multiple times)")
    group.add_argument('-n',
                       '--schema',
                       metavar='SCHEMA',
                       dest='schemas',
                       action='append',
                       default=[],
                       help="extract the named schema(s) (default all)")
    group.add_argument('-N',
                       '--exclude-schema',
                       metavar='SCHEMA',
                       dest='excl_schemas',
                       action='append',
                       default=[],
                       help="do NOT extract the named schema(s) "
                       "(default none)")
    group.add_argument('-t',
                       '--table',
                       metavar='TABLE',
                       dest='tables',
                       action='append',
                       default=[],
                       help="extract the named table(s) (default all)")
    group.add_argument('-T',
                       '--exclude-table',
                       metavar='TABLE',
                       dest='excl_tables',
                       action='append',
                       default=[],
                       help="do NOT extract the named table(s) "
                       "(default none)")
    parser.set_defaults(schema=schema)
    cfg = parse_args(parser)
    output = cfg['files']['output']
    options = cfg['options']
    if options.multiple_files and output:
        parser.error("Cannot specify both --multiple-files and --output")

    db = Database(cfg)
    dbmap = db.to_map()

    if not options.multiple_files:
        print(yamldump(dbmap), file=output or sys.stdout)
        if output:
            output.close()