class Migration(migrations.Migration): """Migration to remove entry point groups from process type strings and prefix unknown types with a marker.""" dependencies = [ ('db', '0018_django_1_11'), ] operations = [ # The built in calculation plugins `arithmetic.add` and `templatereplacer` have been moved and their entry point # renamed. In the change the `simpleplugins` namespace was dropped so we migrate the existing nodes. migrations.RunSQL(sql=""" UPDATE db_dbnode SET type = 'calculation.job.arithmetic.add.ArithmeticAddCalculation.' WHERE type = 'calculation.job.simpleplugins.arithmetic.add.ArithmeticAddCalculation.'; UPDATE db_dbnode SET type = 'calculation.job.templatereplacer.TemplatereplacerCalculation.' WHERE type = 'calculation.job.simpleplugins.templatereplacer.TemplatereplacerCalculation.'; UPDATE db_dbnode SET process_type = 'aiida.calculations:arithmetic.add' WHERE process_type = 'aiida.calculations:simpleplugins.arithmetic.add'; UPDATE db_dbnode SET process_type = 'aiida.calculations:templatereplacer' WHERE process_type = 'aiida.calculations:simpleplugins.templatereplacer'; UPDATE db_dbattribute AS a SET tval = 'arithmetic.add' FROM db_dbnode AS n WHERE a.dbnode_id = n.id AND a.key = 'input_plugin' AND a.tval = 'simpleplugins.arithmetic.add' AND n.type = 'data.code.Code.'; UPDATE db_dbattribute AS a SET tval = 'templatereplacer' FROM db_dbnode AS n WHERE a.dbnode_id = n.id AND a.key = 'input_plugin' AND a.tval = 'simpleplugins.templatereplacer' AND n.type = 'data.code.Code.'; """, reverse_sql=""" UPDATE db_dbnode SET type = 'calculation.job.simpleplugins.arithmetic.add.ArithmeticAddCalculation.' WHERE type = 'calculation.job.arithmetic.add.ArithmeticAddCalculation.'; UPDATE db_dbnode SET type = 'calculation.job.simpleplugins.templatereplacer.TemplatereplacerCalculation.' WHERE type = 'calculation.job.templatereplacer.TemplatereplacerCalculation.'; UPDATE db_dbnode SET process_type = 'aiida.calculations:simpleplugins.arithmetic.add' WHERE process_type = 'aiida.calculations:arithmetic.add'; UPDATE db_dbnode SET process_type = 'aiida.calculations:simpleplugins.templatereplacer' WHERE process_type = 'aiida.calculations:templatereplacer'; UPDATE db_dbattribute AS a SET tval = 'simpleplugins.arithmetic.add' FROM db_dbnode AS n WHERE a.dbnode_id = n.id AND a.key = 'input_plugin' AND a.tval = 'arithmetic.add' AND n.type = 'data.code.Code.'; UPDATE db_dbattribute AS a SET tval = 'simpleplugins.templatereplacer' FROM db_dbnode AS n WHERE a.dbnode_id = n.id AND a.key = 'input_plugin' AND a.tval = 'templatereplacer' AND n.type = 'data.code.Code.'; """), upgrade_schema_version(REVISION, DOWN_REVISION) ]
class Migration(migrations.Migration): """Migration of ProcessNode attributes for metadata options whose key changed. Renamed attribute keys: * `custom_environment_variables` -> `environment_variables` (CalcJobNode) * `jobresource_params` -> `resources` (CalcJobNode) * `_process_label` -> `process_label` (ProcessNode) * `parser` -> `parser_name` (CalcJobNode) Deleted attributes: * `linkname_retrieved` (We do not actually delete it just in case some relies on it) """ dependencies = [ ('db', '0022_dbgroup_type_string_change_content'), ] operations = [ migrations.RunSQL(sql=r""" UPDATE db_dbattribute AS attribute SET key = regexp_replace(attribute.key, '^custom_environment_variables', 'environment_variables') FROM db_dbnode AS node WHERE ( attribute.key = 'custom_environment_variables' OR attribute.key LIKE 'custom\_environment\_variables.%' ) AND node.type = 'node.process.calculation.calcjob.CalcJobNode.' AND node.id = attribute.dbnode_id; -- custom_environment_variables -> environment_variables UPDATE db_dbattribute AS attribute SET key = regexp_replace(attribute.key, '^jobresource_params', 'resources') FROM db_dbnode AS node WHERE ( attribute.key = 'jobresource_params' OR attribute.key LIKE 'jobresource\_params.%' ) AND node.type = 'node.process.calculation.calcjob.CalcJobNode.' AND node.id = attribute.dbnode_id; -- jobresource_params -> resources UPDATE db_dbattribute AS attribute SET key = regexp_replace(attribute.key, '^_process_label', 'process_label') FROM db_dbnode AS node WHERE attribute.key = '_process_label' AND node.type LIKE 'node.process.%' AND node.id = attribute.dbnode_id; -- _process_label -> process_label UPDATE db_dbattribute AS attribute SET key = regexp_replace(attribute.key, '^parser', 'parser_name') FROM db_dbnode AS node WHERE attribute.key = 'parser' AND node.type = 'node.process.calculation.calcjob.CalcJobNode.' AND node.id = attribute.dbnode_id; -- parser -> parser_name """, reverse_sql=r""" UPDATE db_dbattribute AS attribute SET key = regexp_replace(attribute.key, '^environment_variables', 'custom_environment_variables') FROM db_dbnode AS node WHERE ( attribute.key = 'environment_variables' OR attribute.key LIKE 'environment\_variables.%' ) AND node.type = 'node.process.calculation.calcjob.CalcJobNode.' AND node.id = attribute.dbnode_id; -- environment_variables -> custom_environment_variables UPDATE db_dbattribute AS attribute SET key = regexp_replace(attribute.key, '^resources', 'jobresource_params') FROM db_dbnode AS node WHERE ( attribute.key = 'resources' OR attribute.key LIKE 'resources.%' ) AND node.type = 'node.process.calculation.calcjob.CalcJobNode.' AND node.id = attribute.dbnode_id; -- resources -> jobresource_params UPDATE db_dbattribute AS attribute SET key = regexp_replace(attribute.key, '^process_label', '_process_label') FROM db_dbnode AS node WHERE attribute.key = 'process_label' AND node.type LIKE 'node.process.%' AND node.id = attribute.dbnode_id; -- process_label -> _process_label UPDATE db_dbattribute AS attribute SET key = regexp_replace(attribute.key, '^parser_name', 'parser') FROM db_dbnode AS node WHERE attribute.key = 'parser_name' AND node.type = 'node.process.calculation.calcjob.CalcJobNode.' AND node.id = attribute.dbnode_id; -- parser_name -> parser """), upgrade_schema_version(REVISION, DOWN_REVISION) ]
class Migration(migrations.Migration): """ This migration changes Django backend to support the JSONB fields. It is a schema migration that removes the DbAttribute and DbExtra tables and their reference to the DbNode tables and adds the corresponding JSONB columns to the DbNode table. It is also a data migration that transforms and adds the data of the DbAttribute and DbExtra tables to the JSONB columns to the DbNode table. """ dependencies = [ ('db', '0036_drop_computer_transport_params'), ] operations = [ # ############################################ # Migration of the Attribute and Extras tables # ############################################ # Create the DbNode.attributes JSONB and DbNode.extras JSONB fields migrations.AddField( model_name='dbnode', name='attributes', field=django.contrib.postgres.fields.jsonb.JSONField(default=dict, null=True), ), migrations.AddField( model_name='dbnode', name='extras', field=django.contrib.postgres.fields.jsonb.JSONField(default=dict, null=True), ), # Migrate the data from the DbAttribute table to the JSONB field migrations.RunPython(transition_attributes_extras, reverse_code=migrations.RunPython.noop), migrations.AlterUniqueTogether( name='dbattribute', unique_together=set([]), ), # Delete the DbAttribute table migrations.DeleteModel(name='DbAttribute', ), migrations.AlterUniqueTogether( name='dbextra', unique_together=set([]), ), # Delete the DbExtra table migrations.DeleteModel(name='DbExtra', ), # ############################### # Migration of the Settings table # ############################### # Create the DbSetting.val JSONB field migrations.AddField( model_name='dbsetting', name='val', field=django.contrib.postgres.fields.jsonb.JSONField(default=None, null=True), ), # Migrate the data from the DbSetting EAV to the JSONB val field migrations.RunPython(transition_settings, reverse_code=migrations.RunPython.noop), # Delete the tval, fval, ival, bval, dval migrations.RemoveField( model_name='dbsetting', name='tval', ), migrations.RemoveField( model_name='dbsetting', name='fval', ), migrations.RemoveField( model_name='dbsetting', name='ival', ), migrations.RemoveField( model_name='dbsetting', name='bval', ), migrations.RemoveField( model_name='dbsetting', name='dval', ), migrations.RemoveField( model_name='dbsetting', name='datatype', ), migrations.AlterField( model_name='dbsetting', name='key', field=models.TextField(), ), migrations.AlterUniqueTogether( name='dbsetting', unique_together=set([]), ), migrations.AlterField( model_name='dbsetting', name='key', field=models.CharField(max_length=1024, db_index=True, unique=True), ), upgrade_schema_version(REVISION, DOWN_REVISION), ]
class Migration(migrations.Migration): dependencies = [ ('db', '0006_delete_dbpath'), ] operations = [ # I am first migrating the wrongly declared returnlinks out of # the InlineCalculations. # This bug is reported #628 https://github.com/aiidateam/aiida-core/issues/628 # There is an explicit check in the code of the inline calculation # ensuring that the calculation returns UNSTORED nodes. # Therefore, no cycle can be created with that migration! # # this command: # 1) selects all links that # - joins an InlineCalculation (or subclass) as input # - joins a Data (or subclass) as output # - is marked as a returnlink. # 2) set for these links the type to 'createlink' migrations.RunSQL(""" UPDATE db_dblink set type='createlink' WHERE db_dblink.id IN ( SELECT db_dblink_1.id FROM db_dbnode AS db_dbnode_1 JOIN db_dblink AS db_dblink_1 ON db_dblink_1.input_id = db_dbnode_1.id JOIN db_dbnode AS db_dbnode_2 ON db_dblink_1.output_id = db_dbnode_2.id WHERE db_dbnode_1.type LIKE 'calculation.inline.%' AND db_dbnode_2.type LIKE 'data.%' AND db_dblink_1.type = 'returnlink' ); """), # Now I am updating the link-types that are null because of either an export and subsequent import # https://github.com/aiidateam/aiida-core/issues/685 # or because the link types don't exist because the links were added before the introduction of link types. # This is reported here: https://github.com/aiidateam/aiida-core/issues/687 # # The following sql statement: # 1) selects all links that # - joins Data (or subclass) or Code as input # - joins Calculation (or subclass) as output. This includes WorkCalculation, InlineCalcuation, JobCalculations... # - has no type (null) # 2) set for these links the type to 'inputlink' migrations.RunSQL(""" UPDATE db_dblink set type='inputlink' where id in ( SELECT db_dblink_1.id FROM db_dbnode AS db_dbnode_1 JOIN db_dblink AS db_dblink_1 ON db_dblink_1.input_id = db_dbnode_1.id JOIN db_dbnode AS db_dbnode_2 ON db_dblink_1.output_id = db_dbnode_2.id WHERE ( db_dbnode_1.type LIKE 'data.%' or db_dbnode_1.type = 'code.Code.' ) AND db_dbnode_2.type LIKE 'calculation.%' AND ( db_dblink_1.type = null OR db_dblink_1.type = '') ); """), # # The following sql statement: # 1) selects all links that # - join JobCalculation (or subclass) or InlineCalculation as input # - joins Data (or subclass) as output. # - has no type (null) # 2) set for these links the type to 'createlink' migrations.RunSQL(""" UPDATE db_dblink set type='createlink' where id in ( SELECT db_dblink_1.id FROM db_dbnode AS db_dbnode_1 JOIN db_dblink AS db_dblink_1 ON db_dblink_1.input_id = db_dbnode_1.id JOIN db_dbnode AS db_dbnode_2 ON db_dblink_1.output_id = db_dbnode_2.id WHERE db_dbnode_2.type LIKE 'data.%' AND ( db_dbnode_1.type LIKE 'calculation.job.%' OR db_dbnode_1.type = 'calculation.inline.InlineCalculation.' ) AND ( db_dblink_1.type = null OR db_dblink_1.type = '') ); """), # The following sql statement: # 1) selects all links that # - join WorkCalculation as input. No subclassing was introduced so far, so only one type string is checked for. # - join Data (or subclass) as output. # - has no type (null) # 2) set for these links the type to 'returnlink' migrations.RunSQL(""" UPDATE db_dblink set type='returnlink' where id in ( SELECT db_dblink_1.id FROM db_dbnode AS db_dbnode_1 JOIN db_dblink AS db_dblink_1 ON db_dblink_1.input_id = db_dbnode_1.id JOIN db_dbnode AS db_dbnode_2 ON db_dblink_1.output_id = db_dbnode_2.id WHERE db_dbnode_2.type LIKE 'data.%' AND db_dbnode_1.type = 'calculation.work.WorkCalculation.' AND ( db_dblink_1.type = null OR db_dblink_1.type = '') ); """), # Now I update links that are CALLS: # The following sql statement: # 1) selects all links that # - join WorkCalculation as input. No subclassing was introduced so far, so only one type string is checked for. # - join Calculation (or subclass) as output. Includes JobCalculation and WorkCalculations and all subclasses. # - has no type (null) # 2) set for these links the type to 'calllink' migrations.RunSQL(""" UPDATE db_dblink set type='calllink' where id in ( SELECT db_dblink_1.id FROM db_dbnode AS db_dbnode_1 JOIN db_dblink AS db_dblink_1 ON db_dblink_1.input_id = db_dbnode_1.id JOIN db_dbnode AS db_dbnode_2 ON db_dblink_1.output_id = db_dbnode_2.id WHERE db_dbnode_1.type = 'calculation.work.WorkCalculation.' AND db_dbnode_2.type LIKE 'calculation.%' AND ( db_dblink_1.type = null OR db_dblink_1.type = '') ); """), upgrade_schema_version(REVISION, DOWN_REVISION) ]
class Migration(migrations.Migration): """Migration to effectuate changes introduced by the provenance redesign This includes in order: * Rename the type column of process nodes * Remove illegal links * Rename link types The exact reverse operation is not possible because the renaming of the type string of `JobCalculation` nodes is done in a lossy way. Originally this type string contained the exact sub class of the `JobCalculation` but in the migration this is changed to always be `node.process.calculation.calcjob.CalcJobNode.`. In the reverse operation, this can then only be reset to `calculation.job.JobCalculation.` but the information on the exact sub class is lost. """ dependencies = [ ('db', '0019_migrate_builtin_calculations'), ] operations = [ migrations.RunPython(migrate_infer_calculation_entry_point, reverse_code=reverse_code, atomic=True), migrations.RunPython(detect_unexpected_links, reverse_code=reverse_code, atomic=True), migrations.RunSQL( """ DELETE FROM db_dblink WHERE db_dblink.id IN ( SELECT db_dblink.id FROM db_dblink INNER JOIN db_dbnode ON db_dblink.input_id = db_dbnode.id WHERE (db_dbnode.type LIKE 'calculation.job%' OR db_dbnode.type LIKE 'calculation.inline%') AND db_dblink.type = 'returnlink' ); -- Delete all outgoing RETURN links from JobCalculation and InlineCalculation nodes DELETE FROM db_dblink WHERE db_dblink.id IN ( SELECT db_dblink.id FROM db_dblink INNER JOIN db_dbnode ON db_dblink.input_id = db_dbnode.id WHERE (db_dbnode.type LIKE 'calculation.job%' OR db_dbnode.type LIKE 'calculation.inline%') AND db_dblink.type = 'calllink' ); -- Delete all outgoing CALL links from JobCalculation and InlineCalculation nodes DELETE FROM db_dblink WHERE db_dblink.id IN ( SELECT db_dblink.id FROM db_dblink INNER JOIN db_dbnode ON db_dblink.input_id = db_dbnode.id WHERE (db_dbnode.type LIKE 'calculation.function%' OR db_dbnode.type LIKE 'calculation.work%') AND db_dblink.type = 'createlink' ); -- Delete all outgoing CREATE links from FunctionCalculation and WorkCalculation nodes UPDATE db_dbnode SET type = 'calculation.work.WorkCalculation.' WHERE type = 'calculation.process.ProcessCalculation.'; -- First migrate very old `ProcessCalculation` to `WorkCalculation` UPDATE db_dbnode SET type = 'node.process.workflow.workfunction.WorkFunctionNode.' FROM db_dbattribute WHERE db_dbattribute.dbnode_id = db_dbnode.id AND type = 'calculation.work.WorkCalculation.' AND db_dbattribute.key = 'function_name'; -- WorkCalculations that have a `function_name` attribute are FunctionCalculations UPDATE db_dbnode SET type = 'node.process.workflow.workchain.WorkChainNode.' WHERE type = 'calculation.work.WorkCalculation.'; -- Update type for `WorkCalculation` nodes - all what is left should be `WorkChainNodes` UPDATE db_dbnode SET type = 'node.process.calculation.calcjob.CalcJobNode.' WHERE type LIKE 'calculation.job.%'; -- Update type for JobCalculation nodes UPDATE db_dbnode SET type = 'node.process.calculation.calcfunction.CalcFunctionNode.' WHERE type = 'calculation.inline.InlineCalculation.'; -- Update type for InlineCalculation nodes UPDATE db_dbnode SET type = 'node.process.workflow.workfunction.WorkFunctionNode.' WHERE type = 'calculation.function.FunctionCalculation.'; -- Update type for FunctionCalculation nodes UPDATE db_dblink SET type = 'create' WHERE type = 'createlink'; -- Rename `createlink` to `create` UPDATE db_dblink SET type = 'return' WHERE type = 'returnlink'; -- Rename `returnlink` to `return` UPDATE db_dblink SET type = 'input_calc' FROM db_dbnode WHERE db_dblink.output_id = db_dbnode.id AND db_dbnode.type LIKE 'node.process.calculation%' AND db_dblink.type = 'inputlink'; -- Rename `inputlink` to `input_calc` if the target node is a calculation type node UPDATE db_dblink SET type = 'input_work' FROM db_dbnode WHERE db_dblink.output_id = db_dbnode.id AND db_dbnode.type LIKE 'node.process.workflow%' AND db_dblink.type = 'inputlink'; -- Rename `inputlink` to `input_work` if the target node is a workflow type node UPDATE db_dblink SET type = 'call_calc' FROM db_dbnode WHERE db_dblink.output_id = db_dbnode.id AND db_dbnode.type LIKE 'node.process.calculation%' AND db_dblink.type = 'calllink'; -- Rename `calllink` to `call_calc` if the target node is a calculation type node UPDATE db_dblink SET type = 'call_work' FROM db_dbnode WHERE db_dblink.output_id = db_dbnode.id AND db_dbnode.type LIKE 'node.process.workflow%' AND db_dblink.type = 'calllink'; -- Rename `calllink` to `call_work` if the target node is a workflow type node """, reverse_sql=""" UPDATE db_dbnode SET type = 'calculation.job.JobCalculation.' WHERE type = 'node.process.calculation.calcjob.CalcJobNode.'; UPDATE db_dbnode SET type = 'calculatison.inline.InlineCalculation.' WHERE type = 'node.process.calculation.calcfunction.CalcFunctionNode.'; UPDATE db_dbnode SET type = 'calculation.function.FunctionCalculation.' WHERE type = 'node.process.workflow.workfunction.WorkFunctionNode.'; UPDATE db_dbnode SET type = 'calculation.work.WorkCalculation.' WHERE type = 'node.process.workflow.workchain.WorkChainNode.'; UPDATE db_dblink SET type = 'inputlink' WHERE type = 'input_call' OR type = 'input_work'; UPDATE db_dblink SET type = 'calllink' WHERE type = 'call_call' OR type = 'call_work'; UPDATE db_dblink SET type = 'createlink' WHERE type = 'create'; UPDATE db_dblink SET type = 'returnlink' WHERE type = 'return'; """ ), upgrade_schema_version(REVISION, DOWN_REVISION) ]
class Migration(migrations.Migration): """ This migration updates the DbLog schema and adds UUID for correct export of the DbLog entries. More specifically, it adds UUIDS, it exports to files the not needed log entries (that correspond to legacy workflows and unknown entities), it creates a foreign key to the dbnode table, it transfers there the objpk data to the new dbnode column (just altering the objpk column and making it a foreign key when containing data, raised problems) and in the end objpk and objname columns are removed. """ dependencies = [ ('db', '0023_calc_job_option_attribute_keys'), ] operations = [ # Export of the logs of the old workflows to a JSON file, there is no re-import # for the reverse migrations migrations.RunPython(export_and_clean_workflow_logs, reverse_code=migrations.RunPython.noop), # Removing objname and objpk from the metadata. The reverse migration adds the # objname and objpk to the metadata migrations.RunPython(clean_dblog_metadata, reverse_code=enrich_dblog_metadata), # The forward migration will not do anything for the objname, the reverse # migration will populate it with correct values migrations.RunSQL( '', reverse_sql='UPDATE db_dblog SET objname=db_dbnode.type ' 'FROM db_dbnode WHERE db_dbnode.id = db_dblog.objpk'), # Removal of the column objname, the reverse migration will add it migrations.RemoveField(model_name='dblog', name='objname'), # Creation of a new column called dbnode which is a foreign key to the dbnode table # The reverse migration will remove this column migrations.AddField( model_name='dblog', name='dbnode', field=models.ForeignKey(on_delete=models.deletion.CASCADE, related_name='dblogs', to='db.DbNode', blank=True, null=True), ), # Transfer of the data from the objpk to the node field # The reverse migration will do the inverse transfer migrations.RunSQL('UPDATE db_dblog SET dbnode_id=objpk', reverse_sql='UPDATE db_dblog SET objpk=dbnode_id'), # Now that all the data have been migrated, make the column not nullable and not blank. # A log record should always correspond to a node record migrations.AlterField( model_name='dblog', name='dbnode', field=models.ForeignKey(on_delete=models.deletion.CASCADE, related_name='dblogs', to='db.DbNode'), ), # Since the new column is created correctly, drop the old objpk column # The reverse migration will add the field migrations.RemoveField(model_name='dblog', name='objpk'), # This is the correct pattern to generate unique fields, see # https://docs.djangoproject.com/en/1.11/howto/writing-migrations/#migrations-that-add-unique-fields # The reverse migration will remove it migrations.AddField( model_name='dblog', name='uuid', field=models.UUIDField(default=get_new_uuid, null=True), ), # Add unique UUIDs to the UUID field. There is no need for a reverse migration for a field # tha will be deleted migrations.RunPython(set_new_uuid, reverse_code=migrations.RunPython.noop), # Changing the column to unique migrations.AlterField( model_name='dblog', name='uuid', field=models.UUIDField(default=get_new_uuid, unique=True), ), upgrade_schema_version(REVISION, DOWN_REVISION) ]
class Migration(migrations.Migration): dependencies = [ ('auth', '0001_initial'), ] operations = [ migrations.CreateModel( name='DbUser', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('password', models.CharField(max_length=128, verbose_name='password')), ('last_login', models.DateTimeField(default=django.utils.timezone.now, verbose_name='last login')), ( 'is_superuser', models.BooleanField( default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status' ) ), ('email', models.EmailField(unique=True, max_length=75, db_index=True)), ('first_name', models.CharField(max_length=254, blank=True)), ('last_name', models.CharField(max_length=254, blank=True)), ('institution', models.CharField(max_length=254, blank=True)), ( 'is_staff', models.BooleanField( default=False, help_text='Designates whether the user can log into this admin site.' ) ), ( 'is_active', models.BooleanField( default=True, help_text= 'Designates whether this user should be treated as active. Unselect this instead of deleting accounts.' ) ), ('date_joined', models.DateTimeField(default=django.utils.timezone.now)), ( 'groups', models.ManyToManyField( related_query_name='user', related_name='user_set', to='auth.Group', blank=True, help_text= 'The groups this user belongs to. A user will get all permissions granted to each of his/her group.', verbose_name='groups' ) ), ( 'user_permissions', models.ManyToManyField( related_query_name='user', related_name='user_set', to='auth.Permission', blank=True, help_text='Specific permissions for this user.', verbose_name='user permissions' ) ), ], options={ 'abstract': False, }, bases=(models.Model,), ), migrations.CreateModel( name='DbAttribute', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('key', models.CharField(max_length=1024, db_index=True)), ( 'datatype', models.CharField( default='none', max_length=10, db_index=True, choices=[('float', 'float'), ('int', 'int'), ('txt', 'txt'), ('bool', 'bool'), ('date', 'date'), ('json', 'json'), ('dict', 'dict'), ('list', 'list'), ('none', 'none')] ) ), ('tval', models.TextField(default='', blank=True)), ('fval', models.FloatField(default=None, null=True)), ('ival', models.IntegerField(default=None, null=True)), ('bval', models.NullBooleanField(default=None)), ('dval', models.DateTimeField(default=None, null=True)), ], options={ 'abstract': False, }, bases=(models.Model,), ), migrations.CreateModel( name='DbAuthInfo', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('auth_params', models.TextField(default='{}')), ('metadata', models.TextField(default='{}')), ('enabled', models.BooleanField(default=True)), ('aiidauser', models.ForeignKey(to='db.DbUser', on_delete=models.CASCADE)), ], options={}, bases=(models.Model,), ), migrations.CreateModel( name='DbCalcState', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ( 'state', models.CharField( db_index=True, max_length=25, choices=[('UNDETERMINED', 'UNDETERMINED'), ('NOTFOUND', 'NOTFOUND'), ('RETRIEVALFAILED', 'RETRIEVALFAILED'), ('COMPUTED', 'COMPUTED'), ('RETRIEVING', 'RETRIEVING'), ('WITHSCHEDULER', 'WITHSCHEDULER'), ('SUBMISSIONFAILED', 'SUBMISSIONFAILED'), ('PARSING', 'PARSING'), ('FAILED', 'FAILED'), ('FINISHED', 'FINISHED'), ('TOSUBMIT', 'TOSUBMIT'), ('SUBMITTING', 'SUBMITTING'), ('IMPORTED', 'IMPORTED'), ('NEW', 'NEW'), ('PARSINGFAILED', 'PARSINGFAILED')] ) ), ('time', models.DateTimeField(default=django.utils.timezone.now, editable=False)), ], options={}, bases=(models.Model,), ), migrations.CreateModel( name='DbComment', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('uuid', models.CharField(editable=False, blank=True, max_length=36)), ('ctime', models.DateTimeField(default=django.utils.timezone.now, editable=False)), ('mtime', models.DateTimeField(auto_now=True)), ('content', models.TextField(blank=True)), ], options={}, bases=(models.Model,), ), migrations.CreateModel( name='DbComputer', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('uuid', models.CharField(max_length=36, editable=False, blank=True)), ('name', models.CharField(unique=True, max_length=255)), ('hostname', models.CharField(max_length=255)), ('description', models.TextField(blank=True)), ('enabled', models.BooleanField(default=True)), ('transport_type', models.CharField(max_length=255)), ('scheduler_type', models.CharField(max_length=255)), ('transport_params', models.TextField(default='{}')), ('metadata', models.TextField(default='{}')), ], options={}, bases=(models.Model,), ), migrations.CreateModel( name='DbExtra', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('key', models.CharField(max_length=1024, db_index=True)), ( 'datatype', models.CharField( default='none', max_length=10, db_index=True, choices=[('float', 'float'), ('int', 'int'), ('txt', 'txt'), ('bool', 'bool'), ('date', 'date'), ('json', 'json'), ('dict', 'dict'), ('list', 'list'), ('none', 'none')] ) ), ('tval', models.TextField(default='', blank=True)), ('fval', models.FloatField(default=None, null=True)), ('ival', models.IntegerField(default=None, null=True)), ('bval', models.NullBooleanField(default=None)), ('dval', models.DateTimeField(default=None, null=True)), ], options={ 'abstract': False, }, bases=(models.Model,), ), migrations.CreateModel( name='DbGroup', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('uuid', models.CharField(max_length=36, editable=False, blank=True)), ('name', models.CharField(max_length=255, db_index=True)), ('type', models.CharField(default='', max_length=255, db_index=True)), ('time', models.DateTimeField(default=django.utils.timezone.now, editable=False)), ('description', models.TextField(blank=True)), ], options={}, bases=(models.Model,), ), migrations.CreateModel( name='DbLink', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('label', models.CharField(max_length=255, db_index=True)), ], options={}, bases=(models.Model,), ), migrations.CreateModel( name='DbLock', fields=[ ('key', models.CharField(max_length=255, serialize=False, primary_key=True)), ('creation', models.DateTimeField(default=django.utils.timezone.now, editable=False)), ('timeout', models.IntegerField(editable=False)), ('owner', models.CharField(max_length=255)), ], options={}, bases=(models.Model,), ), migrations.CreateModel( name='DbLog', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('time', models.DateTimeField(default=django.utils.timezone.now, editable=False)), ('loggername', models.CharField(max_length=255, db_index=True)), ('levelname', models.CharField(max_length=50, db_index=True)), ('objname', models.CharField(db_index=True, max_length=255, blank=True)), ('objpk', models.IntegerField(null=True, db_index=True)), ('message', models.TextField(blank=True)), ('metadata', models.TextField(default='{}')), ], options={}, bases=(models.Model,), ), migrations.CreateModel( name='DbNode', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('uuid', models.CharField(max_length=36, editable=False, blank=True)), ('type', models.CharField(max_length=255, db_index=True)), ('label', models.CharField(db_index=True, max_length=255, blank=True)), ('description', models.TextField(blank=True)), ('ctime', models.DateTimeField(default=django.utils.timezone.now, editable=False)), ('mtime', models.DateTimeField(auto_now=True)), ('nodeversion', models.IntegerField(default=1, editable=False)), ('public', models.BooleanField(default=False)), ], options={}, bases=(models.Model,), ), migrations.CreateModel( name='DbPath', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('depth', models.IntegerField(editable=False)), ('entry_edge_id', models.IntegerField(null=True, editable=False)), ('direct_edge_id', models.IntegerField(null=True, editable=False)), ('exit_edge_id', models.IntegerField(null=True, editable=False)), ( 'child', models.ForeignKey( related_name='parent_paths', editable=False, to='db.DbNode', on_delete=models.CASCADE ) ), ( 'parent', models.ForeignKey( related_name='child_paths', editable=False, to='db.DbNode', on_delete=models.CASCADE ) ), ], options={}, bases=(models.Model,), ), migrations.CreateModel( name='DbSetting', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('key', models.CharField(max_length=1024, db_index=True)), ( 'datatype', models.CharField( default='none', max_length=10, db_index=True, choices=[('float', 'float'), ('int', 'int'), ('txt', 'txt'), ('bool', 'bool'), ('date', 'date'), ('json', 'json'), ('dict', 'dict'), ('list', 'list'), ('none', 'none')] ) ), ('tval', models.TextField(default='', blank=True)), ('fval', models.FloatField(default=None, null=True)), ('ival', models.IntegerField(default=None, null=True)), ('bval', models.NullBooleanField(default=None)), ('dval', models.DateTimeField(default=None, null=True)), ('description', models.TextField(blank=True)), ('time', models.DateTimeField(auto_now=True)), ], options={ 'abstract': False, }, bases=(models.Model,), ), migrations.CreateModel( name='DbWorkflow', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('uuid', models.CharField(max_length=36, editable=False, blank=True)), ('ctime', models.DateTimeField(default=django.utils.timezone.now, editable=False)), ('mtime', models.DateTimeField(auto_now=True)), ('label', models.CharField(db_index=True, max_length=255, blank=True)), ('description', models.TextField(blank=True)), ('nodeversion', models.IntegerField(default=1, editable=False)), ('lastsyncedversion', models.IntegerField(default=0, editable=False)), ( 'state', models.CharField( choices=[('CREATED', 'CREATED'), ('ERROR', 'ERROR'), ('FINISHED', 'FINISHED'), ('INITIALIZED', 'INITIALIZED'), ('RUNNING', 'RUNNING'), ('SLEEP', 'SLEEP')], default='INITIALIZED', max_length=255 ) ), ('report', models.TextField(blank=True)), ('module', models.TextField()), ('module_class', models.TextField()), ('script_path', models.TextField()), ('script_md5', models.CharField(max_length=255)), ('user', models.ForeignKey(to='db.DbUser', on_delete=django.db.models.deletion.PROTECT)), ], options={}, bases=(models.Model,), ), migrations.CreateModel( name='DbWorkflowData', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('name', models.CharField(max_length=255)), ('time', models.DateTimeField(default=django.utils.timezone.now, editable=False)), ('data_type', models.CharField(default='PARAMETER', max_length=255)), ('value_type', models.CharField(default='NONE', max_length=255)), ('json_value', models.TextField(blank=True)), ('aiida_obj', models.ForeignKey(blank=True, to='db.DbNode', null=True, on_delete=models.CASCADE)), ('parent', models.ForeignKey(related_name='data', to='db.DbWorkflow', on_delete=models.CASCADE)), ], options={}, bases=(models.Model,), ), migrations.CreateModel( name='DbWorkflowStep', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('name', models.CharField(max_length=255)), ('time', models.DateTimeField(default=django.utils.timezone.now, editable=False)), ('nextcall', models.CharField(default='none', max_length=255)), ( 'state', models.CharField( choices=[('CREATED', 'CREATED'), ('ERROR', 'ERROR'), ('FINISHED', 'FINISHED'), ('INITIALIZED', 'INITIALIZED'), ('RUNNING', 'RUNNING'), ('SLEEP', 'SLEEP')], default='CREATED', max_length=255 ) ), ('calculations', models.ManyToManyField(related_name='workflow_step', to='db.DbNode')), ('parent', models.ForeignKey(related_name='steps', to='db.DbWorkflow', on_delete=models.CASCADE)), ('sub_workflows', models.ManyToManyField(related_name='parent_workflow_step', to='db.DbWorkflow')), ('user', models.ForeignKey(to='db.DbUser', on_delete=django.db.models.deletion.PROTECT)), ], options={}, bases=(models.Model,), ), migrations.AlterUniqueTogether( name='dbworkflowstep', unique_together=set([('parent', 'name')]), ), migrations.AlterUniqueTogether( name='dbworkflowdata', unique_together=set([('parent', 'name', 'data_type')]), ), migrations.AlterUniqueTogether( name='dbsetting', unique_together=set([('key',)]), ), migrations.AddField( model_name='dbnode', name='children', field=models.ManyToManyField(related_name='parents', through='db.DbPath', to='db.DbNode'), preserve_default=True, ), migrations.AddField( model_name='dbnode', name='dbcomputer', field=models.ForeignKey( related_name='dbnodes', on_delete=django.db.models.deletion.PROTECT, to='db.DbComputer', null=True ), preserve_default=True, ), migrations.AddField( model_name='dbnode', name='outputs', field=models.ManyToManyField(related_name='inputs', through='db.DbLink', to='db.DbNode'), preserve_default=True, ), migrations.AddField( model_name='dbnode', name='user', field=models.ForeignKey( related_name='dbnodes', on_delete=django.db.models.deletion.PROTECT, to='db.DbUser' ), preserve_default=True, ), migrations.AddField( model_name='dblink', name='input', field=models.ForeignKey( related_name='output_links', on_delete=django.db.models.deletion.PROTECT, to='db.DbNode' ), preserve_default=True, ), migrations.AddField( model_name='dblink', name='output', field=models.ForeignKey(related_name='input_links', to='db.DbNode', on_delete=models.CASCADE), preserve_default=True, ), migrations.AlterUniqueTogether( name='dblink', unique_together=set([('input', 'output'), ('output', 'label')]), ), migrations.AddField( model_name='dbgroup', name='dbnodes', field=models.ManyToManyField(related_name='dbgroups', to='db.DbNode'), preserve_default=True, ), migrations.AddField( model_name='dbgroup', name='user', field=models.ForeignKey(related_name='dbgroups', to='db.DbUser', on_delete=models.CASCADE), preserve_default=True, ), migrations.AlterUniqueTogether( name='dbgroup', unique_together=set([('name', 'type')]), ), migrations.AddField( model_name='dbextra', name='dbnode', field=models.ForeignKey(related_name='dbextras', to='db.DbNode', on_delete=models.CASCADE), preserve_default=True, ), migrations.AlterUniqueTogether( name='dbextra', unique_together=set([('dbnode', 'key')]), ), migrations.AddField( model_name='dbcomment', name='dbnode', field=models.ForeignKey(related_name='dbcomments', to='db.DbNode', on_delete=models.CASCADE), preserve_default=True, ), migrations.AddField( model_name='dbcomment', name='user', field=models.ForeignKey(to='db.DbUser', on_delete=models.CASCADE), preserve_default=True, ), migrations.AddField( model_name='dbcalcstate', name='dbnode', field=models.ForeignKey(related_name='dbstates', to='db.DbNode', on_delete=models.CASCADE), preserve_default=True, ), migrations.AlterUniqueTogether( name='dbcalcstate', unique_together=set([('dbnode', 'state')]), ), migrations.AddField( model_name='dbauthinfo', name='dbcomputer', field=models.ForeignKey(to='db.DbComputer', on_delete=models.CASCADE), preserve_default=True, ), migrations.AlterUniqueTogether( name='dbauthinfo', unique_together=set([('aiidauser', 'dbcomputer')]), ), migrations.AddField( model_name='dbattribute', name='dbnode', field=models.ForeignKey(related_name='dbattributes', to='db.DbNode', on_delete=models.CASCADE), preserve_default=True, ), migrations.AlterUniqueTogether( name='dbattribute', unique_together=set([('dbnode', 'key')]), ), upgrade_schema_version(REVISION, DOWN_REVISION) ]
class Migration(migrations.Migration): """Database migration.""" dependencies = [ ('db', '0002_db_state_change'), ] operations = [ migrations.AddField( model_name='dblink', name='type', field=models.CharField(db_index=True, max_length=255, blank=True), preserve_default=True, ), migrations.AlterField( model_name='dbcalcstate', name='time', field=models.DateTimeField(default=aiida.common.timezone.now, editable=False), preserve_default=True, ), migrations.AlterField( model_name='dbcomment', name='ctime', field=models.DateTimeField(default=aiida.common.timezone.now, editable=False), preserve_default=True, ), migrations.AlterField( model_name='dbgroup', name='time', field=models.DateTimeField(default=aiida.common.timezone.now, editable=False), preserve_default=True, ), migrations.AlterField( model_name='dblock', name='creation', field=models.DateTimeField(default=aiida.common.timezone.now, editable=False), preserve_default=True, ), migrations.AlterField( model_name='dblog', name='time', field=models.DateTimeField(default=aiida.common.timezone.now, editable=False), preserve_default=True, ), migrations.AlterField( model_name='dbnode', name='ctime', field=models.DateTimeField(default=aiida.common.timezone.now, editable=False), preserve_default=True, ), migrations.AlterField( model_name='dbuser', name='date_joined', field=models.DateTimeField(default=aiida.common.timezone.now), preserve_default=True, ), migrations.AlterField( model_name='dbworkflow', name='ctime', field=models.DateTimeField(default=aiida.common.timezone.now, editable=False), preserve_default=True, ), migrations.AlterField( model_name='dbworkflowdata', name='time', field=models.DateTimeField(default=aiida.common.timezone.now, editable=False), preserve_default=True, ), migrations.AlterField( model_name='dbworkflowstep', name='time', field=models.DateTimeField(default=aiida.common.timezone.now, editable=False), preserve_default=True, ), migrations.AlterUniqueTogether( name='dblink', unique_together=set([]), ), upgrade_schema_version(REVISION, DOWN_REVISION) ]
class Migration(migrations.Migration): """Migration for upgrade to django 1.11 This migration switches from the django_extensions UUID field to the native UUIDField of django 1.11 It also introduces unique constraints on all uuid columns (previously existed only on dbnode). """ dependencies = [ ('db', '0017_drop_dbcalcstate'), ] operations = [ migrations.RunPython(_verify_uuid_uniqueness, reverse_code=reverse_code), migrations.AlterField( model_name='dbcomment', name='uuid', field=models.UUIDField(unique=True, default=aiida.common.utils.get_new_uuid), ), migrations.AlterField( model_name='dbcomputer', name='uuid', field=models.UUIDField(unique=True, default=aiida.common.utils.get_new_uuid), ), migrations.AlterField( model_name='dbgroup', name='uuid', field=models.UUIDField(unique=True, default=aiida.common.utils.get_new_uuid), ), # first: remove index migrations.AlterField( model_name='dbnode', name='uuid', field=models.CharField(max_length=36, default=aiida.common.utils.get_new_uuid, unique=False), ), # second: switch to UUIDField migrations.AlterField( model_name='dbnode', name='uuid', field=models.UUIDField(default=aiida.common.utils.get_new_uuid, unique=True), ), migrations.AlterField( model_name='dbuser', name='email', field=models.EmailField(db_index=True, max_length=254, unique=True), ), migrations.AlterField( model_name='dbuser', name='groups', field=models.ManyToManyField( blank=True, help_text= 'The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups'), ), migrations.AlterField( model_name='dbworkflow', name='uuid', field=models.UUIDField(unique=True, default=aiida.common.utils.get_new_uuid), ), upgrade_schema_version(REVISION, DOWN_REVISION) ]