def migration26(tdset): """ Add rawViewSectionRef column to _grist_Tables and new raw view sections for each 'normal' table. """ doc_actions = [ add_column('_grist_Tables', 'rawViewSectionRef', 'Ref:_grist_Views_section') ] tables = list( actions.transpose_bulk_action(tdset.all_tables["_grist_Tables"])) columns = list( actions.transpose_bulk_action( tdset.all_tables["_grist_Tables_column"])) views = { view.id: view for view in actions.transpose_bulk_action( tdset.all_tables["_grist_Views"]) } new_view_section_id = next_id(tdset, "_grist_Views_section") for table in sorted(tables, key=lambda t: t.tableId): old_view = views.get(table.primaryViewId) if not (table.primaryViewId and old_view): continue table_columns = [ col for col in columns if table.id == col.parentId and is_visible_column(col.colId) ] table_columns.sort(key=lambda c: c.parentPos) fields = { "parentId": [new_view_section_id] * len(table_columns), "colRef": [col.id for col in table_columns], "parentPos": [col.parentPos for col in table_columns], } field_ids = [None] * len(table_columns) doc_actions += [ actions.AddRecord( "_grist_Views_section", new_view_section_id, { "tableRef": table.id, "parentId": 0, "parentKey": "record", "title": old_view.name, "defaultWidth": 100, "borderWidth": 1, }), actions.UpdateRecord("_grist_Tables", table.id, { "rawViewSectionRef": new_view_section_id, }), actions.BulkAddRecord("_grist_Views_section_field", field_ids, fields), ] new_view_section_id += 1 return tdset.apply_doc_actions(doc_actions)
def BulkRemoveRecord(self, table_id, row_ids): table = self._engine.tables[table_id] # Ignore records that don't exist in the table. row_ids = [r for r in row_ids if r in table.row_ids] if not row_ids: return # Collect the undo values, and unset all values in the column (i.e. set to defaults), just to # make sure we don't have stale values hanging around. undo_values = {} for column in six.itervalues(table.all_columns): if not column.is_private() and column.col_id != "id": col_values = [column.raw_get(r) for r in row_ids] default = column.getdefault() # If this column had all default values, don't include it into the undo BulkAddRecord. if not all(strict_equal(val, default) for val in col_values): undo_values[column.col_id] = col_values for row_id in row_ids: column.unset(row_id) # Generate the undo action. self._engine.out_actions.undo.append( actions.BulkAddRecord(table_id, row_ids, undo_values).simplify()) self._engine.out_actions.summary.remove_records(table_id, row_ids) # Invalidate the deleted rows, so that anything that depends on them gets recomputed. self._engine.invalidate_records(table_id, row_ids)
def migration25(tdset): """ Add _grist_Filters table and populate based on existing filters stored in _grist_Views_section_field. From this version on, filter in _grist_Views_section_field is deprecated. """ doc_actions = [ actions.AddTable('_grist_Filters', [ schema.make_column("viewSectionRef", "Ref:_grist_Views_section"), schema.make_column("colRef", "Ref:_grist_Tables_column"), schema.make_column("filter", "Text"), ]) ] # Move existing field filters to _grist_Filters. fields = list( actions.transpose_bulk_action( tdset.all_tables['_grist_Views_section_field'])) col_info = {'filter': [], 'colRef': [], 'viewSectionRef': []} for f in fields: if not f.filter: continue col_info['filter'].append(f.filter) col_info['colRef'].append(f.colRef) col_info['viewSectionRef'].append(f.parentId) num_filters = len(col_info['filter']) if num_filters > 0: doc_actions.append( actions.BulkAddRecord('_grist_Filters', [None] * num_filters, col_info)) return tdset.apply_doc_actions(doc_actions)
def RemoveTable(self, table_id): assert table_id in self._engine.tables, "Table %s doesn't exist" % table_id # Create undo actions to restore all the data records of this table. table_data = self._engine.fetch_table(table_id, formulas=True) undo_action = actions.BulkAddRecord(*table_data).simplify() if undo_action: self._engine.out_actions.undo.append(undo_action) # Update schema, and re-generate the module code. schema_table = self._engine.schema.pop(table_id) self._engine.rebuild_usercode() # Generate the undo action. self._engine.out_actions.undo.append( actions.AddTable(table_id, schema.cols_to_dict_list(schema_table.columns))) self._engine.out_actions.summary.remove_table(table_id)
def migration14(tdset): # Create the ACL table AND also the default ACL groups, default resource, and the default rule. # These match the actions applied to new document by 'InitNewDoc' useraction (as of v14). return tdset.apply_doc_actions([ actions.AddTable('_grist_ACLMemberships', [ schema.make_column('parent', 'Ref:_grist_ACLPrincipals'), schema.make_column('child', 'Ref:_grist_ACLPrincipals'), ]), actions.AddTable('_grist_ACLPrincipals', [ schema.make_column('userName', 'Text'), schema.make_column('groupName', 'Text'), schema.make_column('userEmail', 'Text'), schema.make_column('instanceId', 'Text'), schema.make_column('type', 'Text'), ]), actions.AddTable('_grist_ACLResources', [ schema.make_column('colIds', 'Text'), schema.make_column('tableId', 'Text'), ]), actions.AddTable('_grist_ACLRules', [ schema.make_column('aclFormula', 'Text'), schema.make_column('principals', 'Text'), schema.make_column('resource', 'Ref:_grist_ACLResources'), schema.make_column('aclColumn', 'Ref:_grist_Tables_column'), schema.make_column('permissions', 'Int'), ]), # Set up initial ACL data. actions.BulkAddRecord( '_grist_ACLPrincipals', [1, 2, 3, 4], { 'type': ['group', 'group', 'group', 'group'], 'groupName': ['Owners', 'Admins', 'Editors', 'Viewers'], }), actions.AddRecord('_grist_ACLResources', 1, { 'tableId': '', 'colIds': '' }), actions.AddRecord('_grist_ACLRules', 1, { 'resource': 1, 'permissions': 0x3F, 'principals': '[1]' }), ])
def alist(): return [ actions.BulkUpdateRecord("Table1", [1, 2, 3], {'Foo': [10, 20, 30]}), actions.BulkUpdateRecord("Table2", [1, 2, 3], { 'Foo': [10, 20, 30], 'Bar': ['a', 'b', 'c'] }), actions.UpdateRecord("Table1", 17, {'Foo': 10}), actions.UpdateRecord("Table2", 18, { 'Foo': 10, 'Bar': 'a' }), actions.AddRecord("Table1", 17, {'Foo': 10}), actions.BulkAddRecord("Table2", 18, { 'Foo': 10, 'Bar': 'a' }), actions.ReplaceTableData("Table2", 18, { 'Foo': 10, 'Bar': 'a' }), actions.RemoveRecord("Table1", 17), actions.BulkRemoveRecord("Table2", [17, 18]), actions.AddColumn("Table1", "Foo", {"type": "Text"}), actions.RenameColumn("Table1", "Foo", "Bar"), actions.ModifyColumn("Table1", "Foo", {"type": "Text"}), actions.RemoveColumn("Table1", "Foo"), actions.AddTable("THello", [{ "id": "Foo" }, { "id": "Bar" }]), actions.RemoveTable("THello"), actions.RenameTable("THello", "TWorld"), ]
def create_migrations(all_tables, metadata_only=False): """ Creates and returns a list of DocActions needed to bring this document to schema.SCHEMA_VERSION. all_tables: all tables or just the metadata tables (those named with _grist_ prefix) as a dictionary mapping table name to TableData. metadata_only: should be set if only metadata tables are passed in. If ALL tables are required to process migrations, this method will raise a "need all tables..." exception. """ try: doc_version = all_tables['_grist_DocInfo'].columns["schemaVersion"][0] except Exception: doc_version = 0 # We create a TableDataSet, and populate it with the subset of the current schema that matches # all_tables. For missing items, we make up tables and incomplete columns, which should be OK # since we would not be adding new records to deprecated columns. # Note that this approach makes it NOT OK to change column types. tdset = table_data_set.TableDataSet() # For each table in the provided metadata tables, create an AddTable action. user_schema = schema.build_schema(all_tables['_grist_Tables'], all_tables['_grist_Tables_column'], include_builtin=False) for t in six.itervalues(user_schema): tdset.apply_doc_action( actions.AddTable(t.tableId, schema.cols_to_dict_list(t.columns))) # For each old table/column, construct an AddTable action using the current schema. new_schema = {a.table_id: a for a in schema.schema_create_actions()} for table_id, data in sorted(six.iteritems(all_tables)): # User tables should already be in tdset; the rest must be metadata tables. # (If metadata_only is true, there is simply nothing to skip here.) if table_id not in tdset.all_tables: new_col_info = {} if table_id in new_schema: new_col_info = { c['id']: c for c in new_schema[table_id].columns } # Use an incomplete default for unknown (i.e. deprecated) columns; some uses of the column # would be invalid, such as adding a new record with missing values. col_info = sorted([ new_col_info.get(col_id, {'id': col_id}) for col_id in data.columns ], key=lambda c: list(six.iteritems(c))) tdset.apply_doc_action(actions.AddTable(table_id, col_info)) # And load in the original data, interpreting the TableData object as BulkAddRecord action. tdset.apply_doc_action(actions.BulkAddRecord(*data)) migration_actions = [] for version in xrange(doc_version + 1, schema.SCHEMA_VERSION + 1): migration_func = all_migrations.get(version, noop_migration) if migration_func.need_all_tables and metadata_only: raise Exception("need all tables for migration to %s" % version) migration_actions.extend( all_migrations.get(version, noop_migration)(tdset)) # Note that if we are downgrading versions (i.e. doc_version is higher), then the following is # the only action we include into the migration. migration_actions.append( actions.UpdateRecord('_grist_DocInfo', 1, {'schemaVersion': schema.SCHEMA_VERSION})) return migration_actions
def test_prune_actions(self): # prune_actions is in-place, so we make a new list every time. def alist(): return [ actions.BulkUpdateRecord("Table1", [1, 2, 3], {'Foo': [10, 20, 30]}), actions.BulkUpdateRecord("Table2", [1, 2, 3], { 'Foo': [10, 20, 30], 'Bar': ['a', 'b', 'c'] }), actions.UpdateRecord("Table1", 17, {'Foo': 10}), actions.UpdateRecord("Table2", 18, { 'Foo': 10, 'Bar': 'a' }), actions.AddRecord("Table1", 17, {'Foo': 10}), actions.BulkAddRecord("Table2", 18, { 'Foo': 10, 'Bar': 'a' }), actions.ReplaceTableData("Table2", 18, { 'Foo': 10, 'Bar': 'a' }), actions.RemoveRecord("Table1", 17), actions.BulkRemoveRecord("Table2", [17, 18]), actions.AddColumn("Table1", "Foo", {"type": "Text"}), actions.RenameColumn("Table1", "Foo", "Bar"), actions.ModifyColumn("Table1", "Foo", {"type": "Text"}), actions.RemoveColumn("Table1", "Foo"), actions.AddTable("THello", [{ "id": "Foo" }, { "id": "Bar" }]), actions.RemoveTable("THello"), actions.RenameTable("THello", "TWorld"), ] def prune(table_id, col_id): a = alist() actions.prune_actions(a, table_id, col_id) return a self.assertEqual( prune('Table1', 'Foo'), [ actions.BulkUpdateRecord("Table2", [1, 2, 3], { 'Foo': [10, 20, 30], 'Bar': ['a', 'b', 'c'] }), actions.UpdateRecord("Table2", 18, { 'Foo': 10, 'Bar': 'a' }), actions.BulkAddRecord("Table2", 18, { 'Foo': 10, 'Bar': 'a' }), actions.ReplaceTableData("Table2", 18, { 'Foo': 10, 'Bar': 'a' }), actions.RemoveRecord("Table1", 17), actions.BulkRemoveRecord("Table2", [17, 18]), # It doesn't do anything with column renames; it can be addressed if needed. actions.RenameColumn("Table1", "Foo", "Bar"), # It doesn't do anything with AddTable, which is expected. actions.AddTable("THello", [{ "id": "Foo" }, { "id": "Bar" }]), actions.RemoveTable("THello"), actions.RenameTable("THello", "TWorld"), ]) self.assertEqual(prune('Table2', 'Foo'), [ actions.BulkUpdateRecord("Table1", [1, 2, 3], {'Foo': [10, 20, 30]}), actions.BulkUpdateRecord("Table2", [1, 2, 3], {'Bar': ['a', 'b', 'c']}), actions.UpdateRecord("Table1", 17, {'Foo': 10}), actions.UpdateRecord("Table2", 18, {'Bar': 'a'}), actions.AddRecord("Table1", 17, {'Foo': 10}), actions.BulkAddRecord("Table2", 18, {'Bar': 'a'}), actions.ReplaceTableData("Table2", 18, {'Bar': 'a'}), actions.RemoveRecord("Table1", 17), actions.BulkRemoveRecord("Table2", [17, 18]), actions.AddColumn("Table1", "Foo", {"type": "Text"}), actions.RenameColumn("Table1", "Foo", "Bar"), actions.ModifyColumn("Table1", "Foo", {"type": "Text"}), actions.RemoveColumn("Table1", "Foo"), actions.AddTable("THello", [{ "id": "Foo" }, { "id": "Bar" }]), actions.RemoveTable("THello"), actions.RenameTable("THello", "TWorld"), ])