def main(argv=None): usage = """Data Model ERD Generator Generates an entity relationship diagram for the data model specified. If passing a custom URL, the data model returned must be in the JSON format defined by the chop-dbhi/data-models package. The generated diagram is saved to the named output file. Usage: erd.py [options] <model> <version> <outfile> Options: -h --help Show this screen. -u URL --url=URL Retrieve model JSON from this URL instead of the default or environment-variable-passed URL. """ # noqa from docopt import docopt # Ignore command name if called from command line. argv = argv or sys.argv[1:] args = docopt(usage, argv=argv, version=__version__) url = args['--url'] or get_url(args['<model>'], args['<version>']) model_json = requests.get(url).json() metadata = MetaData() make_model(model_json, metadata) render_er(metadata, args['<outfile>'])
def test_pk(): metadata = MetaData() metadata = make_model(model_json, metadata) tbl1 = metadata.tables['test_table_1'] ok_('pk' in tbl1.primary_key.columns)
def test_not_null(): metadata = MetaData() metadata = make_model(model_json, metadata) tbl1 = metadata.tables['test_table_1'] col = tbl1.columns['string'] ok_(not col.nullable)
def test_index(): metadata = MetaData() metadata = make_model(model_json, metadata) tbl2 = metadata.tables['test_table_2'] for idx in tbl2.indexes: ok_('string' in idx.columns) break else: raise AssertionError('Index not found.')
def write(model, model_version, output, service): """Generate entity relationship diagram for the data model specified. Arguments: model Model to generate DDL for. model_version Model version to generate DDL for. output Output file for ERD. service Base URL of the data models service to use. Raises: ImportError, if erlalchemy cannot be imported """ # noqa metadata = MetaData() model_json = get_model_json(model, model_version, service) make_model(model_json, metadata) from eralchemy import render_er render_er(metadata, output)
def test_unique(): metadata = MetaData() metadata = make_model(model_json, metadata) tbl2 = metadata.tables['test_table_2'] for con in tbl2.constraints: if isinstance(con, UniqueConstraint): ok_('integer' in con.columns) break else: raise AssertionError('UniqueConstraint not found.')
def test_foreign_key(): metadata = MetaData() metadata = make_model(model_json, metadata) tbl1 = metadata.tables['test_table_1'] for con in tbl1.constraints: if isinstance(con, ForeignKeyConstraint): ok_('integer' in con.columns) eq_(list(con.columns['integer'].foreign_keys)[0].target_fullname, 'test_table_2.integer') break else: raise AssertionError('ForeignKeyConstraint not found.')
def test_foreign_key(): metadata = MetaData() metadata = make_model(model_json, metadata) tbl1 = metadata.tables['test_table_1'] for con in tbl1.constraints: if isinstance(con, ForeignKeyConstraint): ok_('integer' in con.columns) eq_( list(con.columns['integer'].foreign_keys)[0].target_fullname, 'test_table_2.integer') break else: raise AssertionError('ForeignKeyConstraint not found.')
import requests from sqlalchemy import MetaData from sqlalchemy.ext.declarative import declarative_base from dmsa.settings import get_url from dmsa.makers import make_model url = get_url('i2b2_pedsnet', 'v2') model_json = requests.get(url).json() metadata = MetaData() make_model(model_json, metadata) Base = declarative_base(metadata=metadata) for table in metadata.tables.values(): cls_name = ''.join(i.capitalize() for i in table.name.split('_')) globals()[cls_name] = table
def main(argv=None): usage = """Data Model DDL Generator Generates data definition language for the data model specified in the given DBMS dialect. If passing a custom URL, the data model returned must be in the JSON format defined by the chop-dbhi/data-models package. See http://docs.sqlalchemy.org/en/rel_1_0/dialects/index.html for available dialects. The generated DDL is written to stdout. Usage: ddl.py [options] <model> <version> <dialect> Options: -h --help Show this screen. -t --xtables Exclude tables from the generated DDL. -c --xconstraints Exclude constraints from the generated DDL. -i --xindexes Exclude indexes from the generated DDL. -d --drop Generate DDL to drop, instead of create, objects. -x --delete-data Generate DML to delete data. -u URL --url=URL Retrieve model JSON from this URL instead of the default or environment-variable-passed URL. -r --return Return DDL as python string object instead of printing it to stdout. """ # noqa from docopt import docopt # Ignore command name if called from command line. argv = argv or sys.argv[1:] args = docopt(usage, argv=argv, version=__version__) url = args['--url'] or get_url(args['<model>'], args['<version>']) model_json = requests.get(url).json() metadata = MetaData() make_model(model_json, metadata) engine = create_engine(args['<dialect>'] + '://') output = [] if args['--delete-data']: tables = reversed(metadata.sorted_tables) output.extend(delete_data(tables, engine)) output = ''.join(output) if args['--return']: return output else: sys.stdout.write(output) return if not args['--xtables']: if not args['--drop']: tables = metadata.sorted_tables output.extend(table_ddl(tables, engine, False)) else: tables = reversed(metadata.sorted_tables) output.extend(table_ddl(tables, engine, True)) if not args['--xconstraints']: if not args['--drop']: tables = metadata.sorted_tables output.append('\n') output.extend(constraint_ddl(tables, engine, False)) else: tables = reversed(metadata.sorted_tables) output.insert(0, '\n') output[0:0] = constraint_ddl(tables, engine, True) if not args['--xindexes']: if not args['--drop']: tables = metadata.sorted_tables output.append('\n') output.extend(index_ddl(tables, engine, False)) else: tables = reversed(metadata.sorted_tables) output.insert(0, '\n') output[0:0] = index_ddl(tables, engine, True) output = ''.join(output) if args['--return']: return output else: sys.stdout.write(output)
def generate(model, model_version, dialect, tables=True, constraints=True, indexes=True, drop=False, delete_data=False, nologging=False, logging=False, service='https://data-models-service.research.chop.edu/'): """Generate data definition language for the data model specified in the given DBMS dialect. Arguments: model Model to generate DDL for. model_version Model version to generate DDL for. dialect DBMS dialect to generate DDL in. tables Include tables when generating DDL. constraints Include constraints when generating DDL. indexes Include indexes when generating DDL. drop Generate DDL to drop, instead of create, objects. delete_data Generate DML to delete data from the model. nologging Generate Oracle DDL to make objects "nologging". logging Generate Oracle DDL to make objects "logging". service Base URL of the data models service to use. """ # noqa metadata = MetaData() model_json = get_model_json(model, model_version, service) make_model(model_json, metadata) service_version = get_service_version(service) engine = create_engine(dialect + '://') output = [] INSERT = ("INSERT INTO version_history (operation, model, model_version, " "dms_version, dmsa_version) VALUES ('{operation}', '" + model + "', '" + model_version + "', '" + service_version + "', '" + __version__ + "');\n\n") if dialect.startswith('oracle'): INSERT = INSERT + "COMMIT;\n\n" version_history = Table( 'version_history', MetaData(), Column('datetime', DateTime(), primary_key=True, server_default=text('CURRENT_TIMESTAMP')), Column('operation', String(100)), Column('model', String(16)), Column('model_version', String(50)), Column('dms_version', String(50)), Column('dmsa_version', String(50)) ) version_tbl_ddl = str(CreateTable(version_history). compile(dialect=engine.dialect)).strip() if dialect.startswith('mssql'): version_tbl_ddl = ("IF OBJECT_ID ('version_history', 'U') IS NULL " + version_tbl_ddl + ";") elif dialect.startswith('oracle'): version_tbl_ddl = ("BEGIN\n" + "EXECUTE IMMEDIATE '" + version_tbl_ddl + "';\n" + "EXCEPTION\n" + "WHEN OTHERS THEN\n" + "IF SQLCODE = -955 THEN NULL;\n" + "ELSE RAISE;\n" + "END IF;\nEND;\n/") else: version_tbl_ddl = version_tbl_ddl.replace('CREATE TABLE', 'CREATE TABLE IF NOT EXISTS') version_tbl_ddl = version_tbl_ddl + ";" if delete_data: output.append(INSERT.format(operation='delete data')) tables = reversed(metadata.sorted_tables) output.extend(delete_ddl(tables, engine)) output.insert(0, version_tbl_ddl + '\n\n') output = ''.join(output) return output LOGGING = 'ALTER {type} {name} LOGGING;\n' NOLOGGING = 'ALTER {type} {name} NOLOGGING;\n' if tables: if drop: tables = reversed(metadata.sorted_tables) output.extend(table_ddl(tables, engine, True)) output.insert(0, INSERT.format(operation='drop tables')) elif logging and dialect.startswith('oracle'): output.append(INSERT.format(operation='table logging')) for table in metadata.sorted_tables: output.append(LOGGING.format(type='TABLE', name=table.name)) output.append('\n') elif nologging and dialect.startswith('oracle'): output.append(INSERT.format(operation='table nologging')) for table in metadata.sorted_tables: output.append(NOLOGGING.format(type='TABLE', name=table.name)) output.append('\n') else: output.append(INSERT.format(operation='create tables')) tables = metadata.sorted_tables output.extend(table_ddl(tables, engine, False)) if constraints: if drop and not dialect.startswith('sqlite'): tables = reversed(metadata.sorted_tables) output.insert(0, '\n') output[0:0] = constraint_ddl(tables, engine, True) output.insert(0, INSERT.format(operation='drop constraints')) elif logging: pass elif nologging: pass elif not dialect.startswith('sqlite'): output.append('\n') output.append(INSERT.format(operation='create constraints')) tables = metadata.sorted_tables output.extend(constraint_ddl(tables, engine, False)) if indexes: if drop: tables = reversed(metadata.sorted_tables) output.insert(0, '\n') output[0:0] = index_ddl(tables, engine, True) output.insert(0, INSERT.format(operation='drop indexes')) elif logging and dialect.startswith('oracle'): output.append(INSERT.format(operation='index logging')) for table in metadata.sorted_tables: output.append(LOGGING.format(type='INDEX', name=table.name)) output.append('\n') elif nologging and dialect.startswith('oracle'): output.append(INSERT.format(operation='index nologging')) for table in metadata.sorted_tables: output.append(NOLOGGING.format(type='INDEX', name=table.name)) output.append('\n') else: output.append('\n') output.append(INSERT.format(operation='create indexes')) tables = metadata.sorted_tables output.extend(index_ddl(tables, engine, False)) output.insert(0, version_tbl_ddl + '\n\n') output = ''.join(output) return output