def create_custom_attribute(conn, name, definition_type, for_object, helptext=None, placeholder=None, is_mandatory=False): """ Create custom attribute for given object.""" migrator_id = migrator.get_migration_user_id(conn) conn.execute(sa.text(""" INSERT INTO custom_attribute_definitions( modified_by_id, created_at, updated_at, title, helptext, placeholder, definition_type, attribute_type, mandatory ) VALUES( :modified_by_id, NOW(), NOW(), :title, :helptext, :placeholder, :definition_type, :attribute_type, :mandatory ); """), modified_by_id=migrator_id, title=name, helptext=helptext, placeholder=placeholder, definition_type=for_object, attribute_type=definition_type, mandatory=is_mandatory) cad_id = utils.last_insert_id(conn) utils.add_to_objects_without_revisions(conn, cad_id, "CustomAttributeDefinition")
def create_acrs(): """Create ACRs for KeyReport model""" connection = op.get_bind() migrator_id = migrator.get_migration_user_id(connection) for name, read, update, delete, mandatory in KEY_REPORT_ACRS: query = acr_propagation.ACR_TABLE.insert().values( name=name, object_type="KeyReport", created_at=datetime.datetime.utcnow(), updated_at=datetime.datetime.utcnow(), modified_by_id=migrator_id, internal=False, non_editable=True, mandatory=mandatory, # Mandatory roles are default to user default_to_current_user=mandatory, my_work=1, read=read, update=update, delete=delete, ) result = connection.execute(query) utils.add_to_objects_without_revisions( connection, result.lastrowid, "AccessControlRole" )
def run(): """Run data migration""" connection = op.get_bind() migrator_id = get_migration_user_id(connection) for table, object_type in SCOPING_TABLES: print "Migrate {} objects roles with external data".format(object_type) object_ids = get_object_ids(connection, table) print "Found {} {} objects in DB".format(len(object_ids), object_type) process_table(connection, object_ids, object_type, migrator_id) # Systems print "Migrate System objects roles with external data" object_ids = get_system_ids(connection) print "Found {} System objects in DB".format(len(object_ids)) process_table(connection, object_ids, "System", migrator_id) # Process print "Migrate Process objects roles with external data" object_ids = get_process_ids(connection) print "Found {} Process objects in DB".format(len(object_ids)) process_table(connection, object_ids, "Process", migrator_id) if is_external_service_data_available(connection): sql = """SELECT t.status AS status, count(t.status) AS cnt FROM temp_external_app_roles AS t GROUP BY status;""" print "Summary status of external data:" result = connection.execute(text(sql)) for row in result: print "{} : {}".format(row.cnt, row.status)
def add_to_objects_without_revisions_bulk( connection, obj_ids, obj_type, action='created', modified_by_id=_USE_DEFAULT_MIGRATOR): """Add object to objects_without_revisions table bulk""" rev_table = table('objects_without_revisions', column('obj_id', Integer), column('obj_type', String), column('action', String)) if not _check_modified_by_id_column_exists(connection): # This function is called by older migrations, which don't expect # that column modified_by_id exists data = [{ 'obj_id': obj_id, 'obj_type': obj_type, 'action': action } for obj_id in obj_ids] else: rev_table.append_column(column('modified_by_id', Integer)) if modified_by_id is _USE_DEFAULT_MIGRATOR: modified_by_id = get_migration_user_id(connection) data = [{ 'obj_id': obj_id, 'obj_type': obj_type, 'action': action, 'modified_by_id': modified_by_id } for obj_id in obj_ids] connection.execute(rev_table.insert().prefix_with('IGNORE'), data)
def upgrade(): """Upgrade database schema and/or data""" connection = op.get_bind() # First, allow create corresponding list records in acl table # when creating proposal connection.execute( sa.text("""UPDATE access_control_roles SET internal = 0 WHERE object_type = 'Proposal' AND name in ('ProposalReader', 'ProposalEditor');""") ) # Second, create missing records in access_control_list table role_ids = list(connection.execute( sa.text(""" SELECT id FROM access_control_roles WHERE object_type = 'Proposal' AND name = '{}'""".format(role_name)) ).scalar() for role_name in ('ProposalEditor', 'ProposalReader')) migrator_id = get_migration_user_id(connection) for role_id in role_ids: fix_acl.create_missing_acl(connection, migrator_id, role_id, 'proposals', 'Proposal', 'created') # As the last step, we need give permissions to creators # of existing proposals. id of creator is modified_by_id of first revision fix_acl.create_missing_acp(connection, migrator_id, 'ProposalEditor', 'created')
def _add_roles_for_objects(objects, new_roles): """ Creates new roles in acr for a given list of objects. :param objects: object names for which new roles should be added :param new_roles: list of roles to add into the acr """ connection = op.get_bind() user_id = get_migration_user_id(connection) update_entries = [] for object_name in objects: for role_name in new_roles: update_entries.append( "('{}', '{}', NOW(), NOW(), {}, 1, {}, {})".format( role_name, object_name, user_id, MANDATORY[role_name], DEFAULT_TO_CURRENT_USER[role_name] ) ) insert_sql = """ INSERT INTO access_control_roles ( name, object_type, created_at, updated_at, modified_by_id, non_editable, mandatory, default_to_current_user ) values """ + ", ".join(update_entries) connection.execute(insert_sql)
def upgrade(): """Upgrade database schema and/or data, creating a new revision.""" connection = op.get_bind() user_id = migrator.get_migration_user_id(connection) query = acr_propagation.ACR_TABLE.insert().values( name="Other Contacts", object_type="Control", created_at=datetime.datetime.utcnow(), updated_at=datetime.datetime.utcnow(), modified_by_id=user_id, internal=False, non_editable=True, mandatory=False, read=True, update=True, delete=True, ) result = connection.execute(query) utils.add_to_objects_without_revisions( connection, result.lastrowid, "AccessControlRole" ) acr_propagation.propagate_roles( propagation_rule.GGRC_NEW_ROLES_PROPAGATION, with_update=True ) update_control_recipients(connection)
def fill_programs_recipients(connection): """Fill program recipients with default value.""" migrator_id = migrator.get_migration_user_id(connection) program_table = sa.sql.table( "programs", sa.Column('id', sa.Integer(), nullable=False), sa.Column('recipients', sa.String(length=250), nullable=True), sa.Column('send_by_default', mysql.TINYINT(display_width=1), nullable=True), sa.Column('updated_at', sa.DateTime, nullable=False), sa.Column('modified_by_id', sa.Integer, nullable=True), ) connection.execute( program_table.update().values( recipients=default_recipients, updated_at=datetime.datetime.utcnow(), modified_by_id=migrator_id, send_by_default=True, ) ) programs = connection.execute( program_table.select() ).fetchall() return [program.id for program in programs]
def upgrade(): """Primary upgrade function for upgrading assessments and issues Primarily used for building various caches et al.""" # pylint: disable=too-many-locals connection = op.get_bind() program_sql = select([programs_table]) programs = connection.execute(program_sql) program_contexts = {program.id: program.context_id for program in programs} audit_sql = select([audits_table]) audits = connection.execute(audit_sql).fetchall() if audits: audit_contexts = {audit.id: audit.context_id for audit in audits} audit_programs = {audit.id: audit.program_id for audit in audits} program_cache = get_relationship_cache(connection, "Program", Types.all) audit_cache = get_relationship_cache(connection, "Audit", Types.all) parent_snapshot_cache = get_scope_snapshots(connection) assessments_cache = get_relationship_cache(connection, "Assessment", Types.all | {"Audit"}) issues_cache = get_relationship_cache(connection, "Issue", Types.all | {"Audit"}) all_objects = (program_cache.values() + audit_cache.values() + assessments_cache.values() + issues_cache.values()) revisionable_objects = set() revisionable_objects = revisionable_objects.union(*all_objects) revision_cache = get_revisions(connection, revisionable_objects) caches = { "program_rels": program_cache, "audit_rels": audit_cache, "snapshots": parent_snapshot_cache, "program_contexts": program_contexts, "audit_programs": audit_programs, "audit_contexts": audit_contexts, "revisions": revision_cache } objects = [ { "type": "Assessment", "select_all": assessments_table.select(), "object_relationships": assessments_cache }, { "type": "Issue", "select_all": issues_table.select(), "object_relationships": issues_cache }, ] if assessments_cache or issues_cache: user_id = get_migration_user_id(connection) for object_settings in objects: process_objects(connection, user_id, caches, object_settings)
def _add_roles_for_objects(objects, new_roles): """ Creates new roles in acr for a given list of objects. Args: objects: object names for which new roles should be added new_roles: list of roles to add into the acr """ connection = op.get_bind() user_id = migrator.get_migration_user_id(connection) update_entries = [] for object_name in objects: for role_name in new_roles: update_entries.append({ 'name': role_name, 'object_type': object_name, 'mandatory': False, 'non_editable': True, 'created_at': datetime.datetime.now(), 'updated_at': datetime.datetime.now(), 'default_to_current_user': False, 'modified_by_id': user_id, }) op.bulk_insert(acr_propagation.ACR_TABLE, update_entries)
def add_to_objects_without_revisions_bulk( connection, obj_ids, obj_type, action='created', modified_by_id=_USE_DEFAULT_MIGRATOR ): """Add object to objects_without_revisions table bulk""" rev_table = table('objects_without_revisions', column('obj_id', Integer), column('obj_type', String), column('action', String)) if not _check_modified_by_id_column_exists(connection): # This function is called by older migrations, which don't expect # that column modified_by_id exists data = [{'obj_id': obj_id, 'obj_type': obj_type, 'action': action} for obj_id in obj_ids] else: rev_table.append_column(column('modified_by_id', Integer)) if modified_by_id is _USE_DEFAULT_MIGRATOR: modified_by_id = get_migration_user_id(connection) data = [{'obj_id': obj_id, 'obj_type': obj_type, 'action': action, 'modified_by_id': modified_by_id} for obj_id in obj_ids] connection.execute(rev_table.insert().prefix_with('IGNORE'), data)
def _add_roles_for_objects(objects, new_roles): """ Creates new roles in acr for a given list of objects. Args: objects: object names for which new roles should be added new_roles: list of roles to add into the acr """ connection = op.get_bind() user_id = migrator.get_migration_user_id(connection) update_entries = [] for object_name in objects: for role_name in new_roles: update_entries.append({ 'name': role_name, 'object_type': object_name, 'mandatory': False, 'non_editable': True, 'created_at': datetime.datetime.now(), 'updated_at': datetime.datetime.now(), 'default_to_current_user': False, 'modified_by_id': user_id, }) op.bulk_insert( acr_propagation.ACR_TABLE, update_entries )
def migrate_url_to_reference_url(connection): """After the document epic document object should have 2 kinds REFERENCE_URL - for urls FILE - for gdrive files """ migration_user_id = migrator.get_migration_user_id(connection) doc_ids = connection.execute( text("SELECT d.id FROM documents d WHERE d.kind='URL'")).fetchall() doc_ids = [d.id for d in doc_ids] utils.add_to_objects_without_revisions_bulk(connection, doc_ids, "Document") sql = """ UPDATE documents SET kind='REFERENCE_URL', modified_by_id=:modified_by_id, updated_at=NOW() WHERE kind='URL' """ connection.execute(text(sql), modified_by_id=migration_user_id) connection.execute( text(""" ALTER TABLE documents MODIFY kind enum('FILE','REFERENCE_URL') NOT NULL DEFAULT 'REFERENCE_URL'; """))
def upgrade(): """Upgrade database schema and/or data""" connection = op.get_bind() # First, allow create corresponding list records in acl table # when creating proposal connection.execute( sa.text("""UPDATE access_control_roles SET internal = 0 WHERE object_type = 'Proposal' AND name in ('ProposalReader', 'ProposalEditor');""")) # Second, create missing records in access_control_list table role_ids = list( connection.execute( sa.text(""" SELECT id FROM access_control_roles WHERE object_type = 'Proposal' AND name = '{}'""".format(role_name))).scalar() for role_name in ('ProposalEditor', 'ProposalReader')) migrator_id = get_migration_user_id(connection) for role_id in role_ids: fix_acl.create_missing_acl(connection, migrator_id, role_id, 'proposals', 'Proposal', 'created') # As the last step, we need give permissions to creators # of existing proposals. id of creator is modified_by_id of first revision fix_acl.create_missing_acp(connection, migrator_id, 'ProposalEditor', 'created')
def create_custom_attribute(conn, name, definition_type, for_object, helptext=None, placeholder=None, is_mandatory=False): """ Create custom attribute for given object.""" migrator_id = migrator.get_migration_user_id(conn) conn.execute( sa.text(""" INSERT INTO custom_attribute_definitions( modified_by_id, created_at, updated_at, title, helptext, placeholder, definition_type, attribute_type, mandatory ) VALUES( :modified_by_id, NOW(), NOW(), :title, :helptext, :placeholder, :definition_type, :attribute_type, :mandatory ); """), modified_by_id=migrator_id, title=name, helptext=helptext, placeholder=placeholder, definition_type=for_object, attribute_type=definition_type, mandatory=is_mandatory ) cad_id = utils.last_insert_id(conn) utils.add_to_objects_without_revisions( conn, cad_id, "CustomAttributeDefinition" )
def migrate_url_to_reference_url(connection): """After the document epic document object should have 2 kinds REFERENCE_URL - for urls FILE - for gdrive files """ migration_user_id = migrator.get_migration_user_id(connection) doc_ids = connection.execute( text("SELECT d.id FROM documents d WHERE d.kind='URL'")).fetchall() doc_ids = [d.id for d in doc_ids] utils.add_to_objects_without_revisions_bulk(connection, doc_ids, "Document") sql = """ UPDATE documents SET kind='REFERENCE_URL', modified_by_id=:modified_by_id, updated_at=NOW() WHERE kind='URL' """ connection.execute(text(sql), modified_by_id=migration_user_id) connection.execute(text(""" ALTER TABLE documents MODIFY kind enum('FILE','REFERENCE_URL') NOT NULL DEFAULT 'REFERENCE_URL'; """))
def upgrade(): """Upgrade database schema and/or data, creating a new revision.""" connection = op.get_bind() migrator_id = migrator.get_migration_user_id(connection) update_recipients(connection, migrator_id) update_role_names(connection, migrator_id) propagate_roles(CONTROL_PROPAGATION, with_update=True) update_templates_definitions(connection, migrator_id)
def run_migration(): """Migration runner""" conn = op.get_bind() migrator_id = get_migration_user_id(conn) reviewer_acr_id = get_reviewer_acr_id(conn) process_migrated_reviewed(conn, migrator_id, reviewer_acr_id) process_migrated_unreviewed(conn, migrator_id) process_non_migrated_unreviewed(conn, migrator_id) process_non_migrated_reviewed(conn, migrator_id)
def run_migration(): """Run migration""" connection = op.get_bind() migration_user_id = migrator.get_migration_user_id(connection) count = get_docs_to_migrate_count(connection) docs_to_migrate = get_docs_to_migrate(connection) doc_admin_role_id, evid_admin_role_id = build_acr_mapping(connection) for i, doc in enumerate(docs_to_migrate): print "Processing document {} of {}".format(i, count) process_doc(connection, doc, migration_user_id, doc_admin_role_id, evid_admin_role_id)
def run_migration(): """Run main migration flow""" connection = op.get_bind() multi_rel_document_ids = list_docs_with_multiple_relationships(connection) if multi_rel_document_ids: migration_user_id = get_migration_user_id(connection) event_id = crete_event_for_revision(connection, migration_user_id) docs_to_process = get_docs_to_process(connection, multi_rel_document_ids) # group documents by id for _, group in groupby(docs_to_process, lambda it: it.doc_id): process_group(connection, group, event_id, migration_user_id)
def upgrade(): """Upgrade database schema and/or data, creating a new revision.""" connection = op.get_bind() migrator_id = migrator.get_migration_user_id(connection) event_id = create_event(connection, migrator_id, resource_type="Document") create_temporary_table() save_documents_no_relationships() create_deprecated_revisions(connection, event_id, migrator_id) remove_documents(connection, migrator_id) remove_tmp_table()
def upgrade(): """Upgrade database schema and/or data, creating a new revision.""" connection = op.get_bind() migration_user_id = get_migration_user_id(connection) evidence_admin_role_id = get_evidence_admin_role_id(connection) create_evid_temporary_table() save_evidence_no_admins() create_evidence_missing_admins(connection, migration_user_id, evidence_admin_role_id) add_evidence_to_missing_revisions(connection) op.execute("DROP TABLE evidence_wo_admins") op.execute("SET AUTOCOMMIT = 0")
def upgrade(): """Upgrade database schema and/or data, creating a new revision.""" op.add_column('objects_without_revisions', sa.Column('modified_by_id', sa.Integer, nullable=True)) # It's possible that the table already contains some records added by # previous migration scripts. # We need to set modified_by_id to default migrator for those records rev_table = sa.sql.table('objects_without_revisions', sa.sql.column('modified_by_id', sa.Integer)) migrator_id = get_migration_user_id(op.get_bind()) op.execute(rev_table.update().values(modified_by_id=migrator_id))
def run_data_migration(): """Migration runner""" conn = op.get_bind() migrator_id = migrator.get_migration_user_id(conn) old_comment_data = load_data(conn) comment_admin_role_id = get_comment_admin_role_id(conn) for data in old_comment_data: comment_id = create_comment(conn, data) add_admin_acl(conn, comment_id, data.cte_modified_by_id, comment_admin_role_id) create_relationship(conn, comment_id, data.cgot_id, migrator_id) remove_old_relationship(conn, old_comment_data) remove_old_rel_revisions(conn, old_comment_data) remove_cycle_task_entries(conn, old_comment_data)
def upgrade(): """Upgrade database schema and/or data, creating a new revision.""" connection = op.get_bind() migrator_id = migrator.get_migration_user_id(connection) connection.execute( sa.text(""" INSERT INTO background_operation_types( `name`, modified_by_id, created_at, updated_at ) VALUES('import', :migrator_id, now(), now()), ('export', :migrator_id, now(), now()); """), migrator_id=migrator_id, )
def update_nullable_values(): """update nullable values with id value""" conn = op.get_bind() migrator_id = migrator.get_migration_user_id(conn) events_table = sa.sql.table( 'events', sa.sql.column('modified_by_id', sa.Integer) ) op.execute(events_table.update().where( events_table.c.modified_by_id.is_(None) ).values( modified_by_id=migrator_id ))
def upgrade(): """Upgrade database schema and/or data, creating a new revision.""" connection = op.get_bind() migrator_id = migrator.get_migration_user_id(connection) max_issuetracker_id = connection.execute(""" SELECT max(id) FROM issuetracker_issues; """).fetchone()[0] connection.execute( sa.text(""" INSERT INTO issuetracker_issues( object_id, object_type, enabled, title, component_id, hotlist_id, cc_list, modified_by_id, created_at, updated_at ) SELECT a.id, 'Assessment', :enabled, a.title, :component_id, :hotlist_id, :cc_list, :modified_by_id, NOW(), NOW() FROM assessments a LEFT JOIN issuetracker_issues ii ON ii.object_id = a.id AND ii.object_type = 'Assessment' WHERE ii.id IS NULL UNION ALL SELECT a.id, 'Audit', :enabled, a.title, :component_id, :hotlist_id, :cc_list, :modified_by_id, NOW(), NOW() FROM audits a LEFT JOIN issuetracker_issues ii ON ii.object_id = a.id AND ii.object_type = 'Audit' WHERE ii.id IS NULL; """), enabled=False, component_id=DEFAULT_COMPONENT_ID, hotlist_id=DEFAULT_HOTLIST_ID, cc_list="", modified_by_id=migrator_id, ) connection.execute( sa.text(""" INSERT IGNORE INTO objects_without_revisions( obj_id, obj_type, action ) SELECT id, 'IssuetrackerIssue', 'created' FROM issuetracker_issues WHERE id > :max_issuetracker_id; """), max_issuetracker_id=max_issuetracker_id, )
def run_data_migration(): """Migration runner""" conn = op.get_bind() migrator_id = migrator.get_migration_user_id(conn) old_comment_data = load_data(conn) comment_admin_role_id = get_comment_admin_role_id(conn) for data in old_comment_data: comment_id = create_comment(conn, data) add_admin_acl( conn, comment_id, data.cte_modified_by_id, comment_admin_role_id ) create_relationship(conn, comment_id, data.cgot_id, migrator_id) remove_old_relationship(conn, old_comment_data) remove_old_rel_revisions(conn, old_comment_data) remove_cycle_task_entries(conn, old_comment_data)
def add_missing_slugs(connection): """Generate missing slugs""" migration_user_id = migrator.get_migration_user_id(connection) doc_ids = connection.execute( text("SELECT d.id FROM documents d WHERE d.slug=''")).fetchall() doc_ids = [d.id for d in doc_ids] utils.add_to_objects_without_revisions_bulk(connection, doc_ids, "Document", action='modified') op.execute('SET SESSION SQL_SAFE_UPDATES = 0') sql = """ UPDATE documents SET slug=CONCAT("DOCUMENT-",id), modified_by_id=:modified_by_id, updated_at=NOW() WHERE slug='' """ connection.execute(text(sql), modified_by_id=migration_user_id)
def upgrade(): """Upgrade database schema and/or data, creating a new revision.""" connection = op.get_bind() migration_user_id = get_migration_user_id(connection) current_datetime = datetime.now() op.execute("SET SESSION SQL_SAFE_UPDATES = 0") update_sql = """ UPDATE custom_attribute_values AS cav JOIN custom_attribute_definitions AS cad ON cad.id = cav.custom_attribute_id SET cav.attribute_value = 0, cav.modified_by_id = :modified_by_id, cav.updated_at = :current_datetime WHERE cad.attribute_type = "Checkbox" and cav.attribute_value = ""; """ connection.execute(text(update_sql), modified_by_id=migration_user_id, current_datetime=current_datetime)
def upgrade(): """Upgrade database schema and/or data, creating a new revision.""" connection = op.get_bind() user_id = get_migration_user_id(connection) update_entries = [] for object_name in SCOPING_OBJECTS: for role_name in NEW_ROLES: update_entries.append("('{}', '{}', NOW(), NOW(), {}, 1)".format( role_name, object_name, user_id)) insert_sql = """ INSERT INTO access_control_roles ( name, object_type, created_at, updated_at, modified_by_id, non_editable ) values """ + ", ".join(update_entries) connection.execute(insert_sql)
def run_data_migration(): """Migration runner""" conn = op.get_bind() migrator_id = migrator.get_migration_user_id(conn) data = load_data(conn) update_acr_propagation_tree( acr_propagation_constants.CURRENT_PROPAGATION, new_tree=acr_propagation_constants.WORKFLOW_PROPAGATION ) for tgo in data: create_relationship( conn, tgo.tgo_task_group_id, tgo.tgo_object_id, tgo.tgo_object_type, migrator_id, tgo.tgo_context_id ) create_old_rel_del_revisions(conn, data) remove_old_relationship(conn, data) remove_task_group_objects(conn, data)
def upgrade(): """Upgrade database schema and/or data, creating a new revision.""" op.create_table( "background_operation_types", sa.Column("id", sa.Integer(), primary_key=True), sa.Column("name", sa.String(length=250), nullable=False), sa.Column("modified_by_id", sa.Integer(), nullable=True), sa.Column("created_at", sa.DateTime()), sa.Column("updated_at", sa.DateTime()), ) op.create_table( "background_operations", sa.Column("id", sa.Integer(), primary_key=True), sa.Column("bg_operation_type_id", sa.Integer, nullable=False), sa.Column("bg_task_id", sa.Integer, nullable=False), sa.Column("object_type", sa.String(length=250), nullable=False), sa.Column("object_id", sa.Integer(), nullable=False), sa.Column("modified_by_id", sa.Integer(), nullable=True), sa.Column("created_at", sa.DateTime()), sa.Column("updated_at", sa.DateTime()), sa.ForeignKeyConstraint( ["bg_operation_type_id"], ["background_operation_types.id"], ), sa.ForeignKeyConstraint(["bg_task_id"], ["background_tasks.id"]) ) connection = op.get_bind() migrator_id = migrator.get_migration_user_id(connection) connection.execute( sa.text(""" INSERT INTO background_operation_types( `name`, modified_by_id, created_at, updated_at ) SELECT 'generate_children_issues', :migrator_id, now(), now(); """), migrator_id=migrator_id, )
def upgrade(): """Upgrade database schema and/or data, creating a new revision.""" op.create_table( "background_operation_types", sa.Column("id", sa.Integer(), primary_key=True), sa.Column("name", sa.String(length=250), nullable=False), sa.Column("modified_by_id", sa.Integer(), nullable=True), sa.Column("created_at", sa.DateTime()), sa.Column("updated_at", sa.DateTime()), ) op.create_table( "background_operations", sa.Column("id", sa.Integer(), primary_key=True), sa.Column("bg_operation_type_id", sa.Integer, nullable=False), sa.Column("bg_task_id", sa.Integer, nullable=False), sa.Column("object_type", sa.String(length=250), nullable=False), sa.Column("object_id", sa.Integer(), nullable=False), sa.Column("modified_by_id", sa.Integer(), nullable=True), sa.Column("created_at", sa.DateTime()), sa.Column("updated_at", sa.DateTime()), sa.ForeignKeyConstraint( ["bg_operation_type_id"], ["background_operation_types.id"], ), sa.ForeignKeyConstraint(["bg_task_id"], ["background_tasks.id"])) connection = op.get_bind() migrator_id = migrator.get_migration_user_id(connection) connection.execute( sa.text(""" INSERT INTO background_operation_types( `name`, modified_by_id, created_at, updated_at ) SELECT 'generate_children_issues', :migrator_id, now(), now(); """), migrator_id=migrator_id, )
def create_acrs(): """Create ACRs for KeyReport model""" connection = op.get_bind() migrator_id = migrator.get_migration_user_id(connection) for name, read, update, delete, mandatory in KEY_REPORT_ACRS: query = acr_propagation.ACR_TABLE.insert().values( name=name, object_type="KeyReport", created_at=datetime.datetime.utcnow(), updated_at=datetime.datetime.utcnow(), modified_by_id=migrator_id, internal=False, non_editable=True, mandatory=mandatory, # Mandatory roles are default to user default_to_current_user=mandatory, my_work=1, read=read, update=update, delete=delete, ) result = connection.execute(query) utils.add_to_objects_without_revisions(connection, result.lastrowid, "AccessControlRole")
def upgrade(): """Migrate audit-related data and concepts to audit snapshots""" # pylint: disable=too-many-locals connection = op.get_bind() audits_more, ghost_objects = ( validate_assessment_issue_to_audit_relationships(connection)) if audits_more or ghost_objects: if audits_more: for klass_name, ids in audits_more.items(): logger.warning( "The following %s have more than one Audit: %s", klass_name, ",".join(map(str, ids)) # pylint: disable=bad-builtin ) if ghost_objects: for klass_name, ids in ghost_objects.items(): logger.warning( "The following %s have no Audits mapped to them: %s", klass_name, ",".join(map(str, ids)) # pylint: disable=bad-builtin ) raise Exception("Cannot perform migration. Check logger warnings.") audits = connection.execute(audits_table.select()).fetchall() if audits: program_ids = {audit.program_id for audit in audits} program_sql = select([programs_table ]).where(programs_table.c.id.in_(program_ids)) programs = connection.execute(program_sql) program_contexts = { program.id: program.context_id for program in programs } program_relationships = get_relationship_cache(connection, "Program", Types.all) audit_relationships = get_relationship_cache(connection, "Audit", Types.all) all_objects = (program_relationships.values() + audit_relationships.values()) revisionable_objects = set() revisionable_objects = revisionable_objects.union(*all_objects) revision_cache = get_revisions(connection, revisionable_objects) objects_missing_revision = (revisionable_objects - set(revision_cache.keys())) if objects_missing_revision: missing = ",".join([ "{obj.type}-{obj.id}".format(obj=obj) for obj in objects_missing_revision ]) logger.warning("Phantom objects mapped to program or audit: %s", missing) caches = { "program_contexts": program_contexts, "program_rels": program_relationships, "audit_rels": audit_relationships, "revisions": revision_cache } user_id = get_migration_user_id(connection) process_audits(connection, user_id, caches, audits)
def run_migration(connection): """Run migration""" migration_user_id = migrator.get_migration_user_id(connection) create_new_revisions_table(connection) migrate_docs(connection, migration_user_id) rename_doc_fields()
def upgrade(): """Upgrade database schema and/or data, creating a new revision.""" connection = op.get_bind() migrator_id = migrator.get_migration_user_id(connection) update_recipients(connection, migrator_id) update_role_names(connection, migrator_id)
def upgrade(): """Migrate audit-related data and concepts to audit snapshots""" # pylint: disable=too-many-locals connection = op.get_bind() audits_more, ghost_objects = ( validate_assessment_issue_to_audit_relationships(connection)) if audits_more or ghost_objects: if audits_more: for klass_name, ids in audits_more.items(): logger.warning( "The following %s have more than one Audit: %s", klass_name, ",".join(map(str, ids)) # pylint: disable=bad-builtin ) if ghost_objects: for klass_name, ids in ghost_objects.items(): logger.warning( "The following %s have no Audits mapped to them: %s", klass_name, ",".join(map(str, ids)) # pylint: disable=bad-builtin ) raise Exception("Cannot perform migration. Check logger warnings.") audits = connection.execute(audits_table.select()).fetchall() if audits: program_ids = {audit.program_id for audit in audits} program_sql = select([programs_table]).where( programs_table.c.id.in_(program_ids) ) programs = connection.execute(program_sql) program_contexts = {program.id: program.context_id for program in programs} program_relationships = get_relationship_cache( connection, "Program", Types.all) audit_relationships = get_relationship_cache( connection, "Audit", Types.all) all_objects = (program_relationships.values() + audit_relationships.values()) revisionable_objects = set() revisionable_objects = revisionable_objects.union(*all_objects) revision_cache = get_revisions(connection, revisionable_objects) objects_missing_revision = (revisionable_objects - set(revision_cache.keys())) if objects_missing_revision: missing = ",".join( ["{obj.type}-{obj.id}".format(obj=obj) for obj in objects_missing_revision]) logger.warning( "Phantom objects mapped to program or audit: %s", missing) caches = { "program_contexts": program_contexts, "program_rels": program_relationships, "audit_rels": audit_relationships, "revisions": revision_cache } user_id = get_migration_user_id(connection) process_audits(connection, user_id, caches, audits)