def test_serializer_view(self): """Test the view serialization.""" # Try to create a view with a name that already exists. try: views = ViewSerializer( data=[{ "columns": [ {"name": "email"}, {"name": "one"}, {"name": "registered"}, {"name": "when"}], "name": "simple view", "description_text": "", "formula": { "not": False, "rules": [], "valid": True, "condition": "AND"}, }], many=True, context={ 'workflow': self.workflow, 'columns': self.workflow.columns.all() }, ) self.assertTrue(views.is_valid()) views.save() except IntegrityError as exc: self.assertTrue('duplicate key value violates' in str(exc)) else: raise Exception('Incorrect serializer operation.') # Try to create a view with a different name views = ViewSerializer( data=[{ "columns": [ {"name": "email"}, {"name": "one"}, {"name": "registered"}, {"name": "when"}], "name": "simple view 2", "description_text": "", "formula": { "not": False, "rules": [], "valid": True, "condition": "AND"}, }], many=True, context={ 'workflow': self.workflow, 'columns': self.workflow.columns.all() }, ) self.assertTrue(views.is_valid()) views.save() self.assertEqual(self.workflow.views.count(), 2)
def create(self, validated_data, **kwargs): # Initial values workflow_obj = None try: workflow_obj = Workflow( user=self.context['user'], name=self.context['name'], description_text=validated_data['description_text'], nrows=0, ncols=0, attributes=validated_data['attributes'], query_builder_ops=validated_data.get('query_builder_ops', {})) workflow_obj.save() # Create the columns column_data = ColumnSerializer(data=validated_data.get( 'columns', []), many=True, context={'workflow': workflow_obj}) # And save its content if column_data.is_valid(): column_data.save() else: raise Exception('Unable to save column information') # If there is any column with position = 0, recompute (this is to # guarantee backward compatibility. if workflow_obj.columns.filter(position=0).exists(): for idx, c in enumerate(workflow_obj.columns.all()): c.position = idx + 1 c.save() # Load the data frame data_frame = validated_data.get('data_frame', None) if data_frame is not None: ops.store_dataframe_in_db(data_frame, workflow_obj.id) # Reconcile now the information in workflow and columns with the # one loaded workflow_obj.data_frame_table_name = \ pandas_db.create_table_name(workflow_obj.pk) workflow_obj.ncols = validated_data['ncols'] workflow_obj.nrows = validated_data['nrows'] workflow_obj.save() # Create the actions pointing to the workflow action_data = ActionSerializer(data=validated_data.get( 'actions', []), many=True, context={'workflow': workflow_obj}) if action_data.is_valid(): action_data.save() else: raise Exception('Unable to save column information') # Create the views pointing to the workflow view_data = ViewSerializer(data=validated_data.get('views', []), many=True, context={'workflow': workflow_obj}) if view_data.is_valid(): view_data.save() else: raise Exception('Unable to save column information') except Exception: # Get rid of the objects created if workflow_obj: if workflow_obj.has_data_frame(): pandas_db.delete_table(workflow_obj.id) if workflow_obj.id: workflow_obj.delete() raise return workflow_obj
class WorkflowExportSerializer(serializers.ModelSerializer): """ This serializer is use to export Workflows selecting a subset of actions. Since the SerializerMethodField used for the selection is a read_only field, the import is managed by a different serializer that uses a regular one for the action field (see WorkflowImportSerializer) """ actions = serializers.SerializerMethodField('get_filtered_actions') data_frame = DataFramePandasField( required=False, help_text='This field must be the Base64 encoded ' 'result of pandas.to_pickle() function') columns = ColumnSerializer(many=True, required=False) views = ViewSerializer(many=True, required=False) version = serializers.CharField(read_only=True, default='NO VERSION', allow_blank=True, label="OnTask Version", help_text="To guarantee compability") def get_filtered_actions(self, workflow): # Get the subset of actions specified in the context action_list = self.context.get('selected_actions', []) if not action_list: # No action needs to be included, no need to call the action # serializer return [] # Execute the query set query_set = workflow.actions.filter(id__in=action_list) # Serialize the content and return data serializer = ActionSerializer(instance=query_set, many=True, required=False) return serializer.data def create(self, validated_data, **kwargs): # Initial values workflow_obj = None try: workflow_obj = Workflow( user=self.context['user'], name=self.context['name'], description_text=validated_data['description_text'], nrows=0, ncols=0, attributes=validated_data['attributes'], query_builder_ops=validated_data.get('query_builder_ops', {})) workflow_obj.save() # Create the columns column_data = ColumnSerializer(data=validated_data.get( 'columns', []), many=True, context={'workflow': workflow_obj}) # And save its content if column_data.is_valid(): column_data.save() else: raise Exception('Unable to save column information') # If there is any column with position = 0, recompute (this is to # guarantee backward compatibility. if workflow_obj.columns.filter(position=0).exists(): for idx, c in enumerate(workflow_obj.columns.all()): c.position = idx + 1 c.save() # Load the data frame data_frame = validated_data.get('data_frame', None) if data_frame is not None: ops.store_dataframe_in_db(data_frame, workflow_obj.id) # Reconcile now the information in workflow and columns with the # one loaded workflow_obj.data_frame_table_name = \ pandas_db.create_table_name(workflow_obj.pk) workflow_obj.ncols = validated_data['ncols'] workflow_obj.nrows = validated_data['nrows'] workflow_obj.save() # Create the actions pointing to the workflow action_data = ActionSerializer(data=validated_data.get( 'actions', []), many=True, context={'workflow': workflow_obj}) if action_data.is_valid(): action_data.save() else: raise Exception('Unable to save column information') # Create the views pointing to the workflow view_data = ViewSerializer(data=validated_data.get('views', []), many=True, context={'workflow': workflow_obj}) if view_data.is_valid(): view_data.save() else: raise Exception('Unable to save column information') except Exception: # Get rid of the objects created if workflow_obj: if workflow_obj.has_data_frame(): pandas_db.delete_table(workflow_obj.id) if workflow_obj.id: workflow_obj.delete() raise return workflow_obj class Meta: model = Workflow # fields = ('description_text', 'nrows', 'ncols', 'attributes', # 'query_builder_ops', 'columns', 'data_frame', 'actions') exclude = ('id', 'user', 'created', 'modified', 'data_frame_table_name', 'session_key', 'shared')
class WorkflowExportSerializer(serializers.ModelSerializer): """ This serializer is use to export Workflows selecting a subset of actions. Since the SerializerMethodField used for the selection is a read_only field, the import is managed by a different serializer that uses a regular one for the action field (see WorkflowImportSerializer) """ actions = serializers.SerializerMethodField('get_filtered_actions') data_frame = DataFramePandasField( required=False, help_text='This field must be the Base64 encoded ' 'result of pandas.to_pickle() function' ) columns = ColumnSerializer(many=True, required=False) views = ViewSerializer(many=True, required=False) def get_filtered_actions(self, workflow): # Get the subset of actions specified in the context action_list = self.context.get('selected_actions', []) if not action_list: # No action needs to be included, no need to call the action # serializer return [] # Execute the query set query_set = workflow.actions.filter(id__in=action_list) # Serialize the content and return data serializer = ActionSerializer( instance=query_set, many=True, required=False) return serializer.data def create(self, validated_data, **kwargs): workflow_obj = Workflow( user=self.context['user'], name=self.context['name'], description_text=validated_data['description_text'], nrows=0, ncols=0, attributes=validated_data['attributes'], query_builder_ops=validated_data.get('query_builder_ops', {}) ) workflow_obj.save() # Create the columns column_data = ColumnSerializer( data=validated_data.get('columns', []), many=True, context={'workflow': workflow_obj}) # And save its content if column_data.is_valid(): column_data.save() else: workflow_obj.delete() return None # Create the actions pointing to the workflow action_data = ActionSerializer( data=validated_data.get('actions', []), many=True, context={'workflow': workflow_obj} ) if action_data.is_valid(): action_data.save() else: workflow_obj.delete() return None # Create the views pointing to the workflow view_data = ViewSerializer( data=validated_data.get('views', []), many=True, context={'workflow': workflow_obj} ) if view_data.is_valid(): view_data.save() else: workflow_obj.delete() return None # Load the data frame data_frame = validated_data.get('data_frame', None) if data_frame is not None: ops.store_dataframe_in_db(data_frame, workflow_obj.id) # Reconcile now the information in workflow and columns with the # one loaded workflow_obj.data_frame_table_name = \ pandas_db.create_table_name(workflow_obj.pk) workflow_obj.ncols = validated_data['ncols'] workflow_obj.nrows = validated_data['nrows'] workflow_obj.save() return workflow_obj class Meta: model = Workflow # fields = ('description_text', 'nrows', 'ncols', 'attributes', # 'query_builder_ops', 'columns', 'data_frame', 'actions') exclude = ('id', 'user', 'created', 'modified', 'data_frame_table_name', 'session_key', 'shared')
def create(self, validated_data, **kwargs): workflow_obj = Workflow( user=self.context['user'], name=self.context['name'], description_text=validated_data['description_text'], nrows=0, ncols=0, attributes=validated_data['attributes'], query_builder_ops=validated_data.get('query_builder_ops', {}) ) workflow_obj.save() # Create the columns column_data = ColumnSerializer( data=validated_data.get('columns', []), many=True, context={'workflow': workflow_obj}) # And save its content if column_data.is_valid(): column_data.save() else: workflow_obj.delete() return None # Create the actions pointing to the workflow action_data = ActionSerializer( data=validated_data.get('actions', []), many=True, context={'workflow': workflow_obj} ) if action_data.is_valid(): action_data.save() else: workflow_obj.delete() return None # Create the views pointing to the workflow view_data = ViewSerializer( data=validated_data.get('views', []), many=True, context={'workflow': workflow_obj} ) if view_data.is_valid(): view_data.save() else: workflow_obj.delete() return None # Load the data frame data_frame = validated_data.get('data_frame', None) if data_frame is not None: ops.store_dataframe_in_db(data_frame, workflow_obj.id) # Reconcile now the information in workflow and columns with the # one loaded workflow_obj.data_frame_table_name = \ pandas_db.create_table_name(workflow_obj.pk) workflow_obj.ncols = validated_data['ncols'] workflow_obj.nrows = validated_data['nrows'] workflow_obj.save() return workflow_obj
class WorkflowExportSerializer(serializers.ModelSerializer): """Serializer to export the workflow. This serializer is use to export Workflows selecting a subset of actions. Since the SerializerMethodField used for the selection is a read_only field, the import is managed by a different serializer that uses a regular one for the action field (see WorkflowImportSerializer) """ actions = serializers.SerializerMethodField() data_frame = DataFramePandasField( required=False, allow_null=True, help_text=_('This field must be the Base64 encoded ' + 'result of pandas.to_pickle() function'), ) columns = ColumnSerializer(many=True, required=False) views = ViewSerializer(many=True, required=False) version = serializers.CharField(read_only=True, default='NO VERSION', allow_blank=True, label='OnTask Version', help_text=_('To guarantee compability')) def get_actions(self, workflow): """Get the list of selected actions.""" action_list = self.context.get('selected_actions', []) if not action_list: # No action needs to be included, no need to call the action # serializer return [] # Execute the query set query_set = workflow.actions.filter(id__in=action_list) # Serialize the content and return data serializer = ActionSerializer(instance=query_set, many=True, required=False) return serializer.data @profile def create(self, validated_data, **kwargs): """Create the new workflow.""" wflow_name = self.context.get('name') if not wflow_name: wflow_name = self.validated_data.get('name') if not wflow_name: raise Exception(_('Unexpected empty workflow name.')) if Workflow.objects.filter(name=wflow_name, user=self.context['user']).exists(): raise Exception( _('There is a workflow with this name. ' + 'Please provide a workflow name in the import page.')) # Initial values workflow_obj = None try: workflow_obj = Workflow( user=self.context['user'], name=wflow_name, description_text=validated_data['description_text'], nrows=0, ncols=0, attributes=validated_data['attributes'], query_builder_ops=validated_data.get('query_builder_ops', {}), ) workflow_obj.save() # Create the columns column_data = ColumnSerializer(data=validated_data.get( 'columns', []), many=True, context={'workflow': workflow_obj}) # And save its content if column_data.is_valid(): columns = column_data.save() else: raise Exception(_('Unable to save column information')) # If there is any column with position = 0, recompute (this is to # guarantee backward compatibility. if any(col.position == 0 for col in columns): for idx, col in enumerate(columns): col.position = idx + 1 col.save() # Load the data frame data_frame = validated_data.get('data_frame') if data_frame is not None: # Store the table in the DB store_table( data_frame, workflow_obj.get_data_frame_table_name(), dtype={ col.name: col.data_type for col in workflow_obj.columns.all() }, ) # Reconcile now the information in workflow and columns with # the one loaded workflow_obj.ncols = validated_data['ncols'] workflow_obj.nrows = validated_data['nrows'] workflow_obj.save() # Create the actions pointing to the workflow action_data = ActionSerializer(data=validated_data.get( 'actions', []), many=True, context={ 'workflow': workflow_obj, 'columns': columns }) if action_data.is_valid(): action_data.save() else: raise Exception(_('Unable to save column information')) # Create the views pointing to the workflow view_data = ViewSerializer(data=validated_data.get('views', []), many=True, context={ 'workflow': workflow_obj, 'columns': columns }) if view_data.is_valid(): view_data.save() else: raise Exception(_('Unable to save column information')) except Exception: # Get rid of the objects created if workflow_obj: if workflow_obj.id: workflow_obj.delete() raise return workflow_obj class Meta(object): """Select model and fields to exclude.""" model = Workflow exclude = ( 'id', 'user', 'created', 'modified', 'data_frame_table_name', 'session_key', 'shared', 'star', 'luser_email_column', 'luser_email_column_md5', 'lusers', 'lusers_is_outdated', )
def create(self, validated_data, **kwargs): """Create the new workflow.""" wflow_name = self.context.get('name') if not wflow_name: wflow_name = self.validated_data.get('name') if not wflow_name: raise Exception(_('Unexpected empty workflow name.')) if Workflow.objects.filter(name=wflow_name, user=self.context['user']).exists(): raise Exception( _('There is a workflow with this name. ' + 'Please provide a workflow name in the import page.')) # Initial values workflow_obj = None try: workflow_obj = Workflow( user=self.context['user'], name=wflow_name, description_text=validated_data['description_text'], nrows=0, ncols=0, attributes=validated_data['attributes'], query_builder_ops=validated_data.get('query_builder_ops', {}), ) workflow_obj.save() # Create the columns column_data = ColumnSerializer(data=validated_data.get( 'columns', []), many=True, context={'workflow': workflow_obj}) # And save its content if column_data.is_valid(): columns = column_data.save() else: raise Exception(_('Unable to save column information')) # If there is any column with position = 0, recompute (this is to # guarantee backward compatibility. if any(col.position == 0 for col in columns): for idx, col in enumerate(columns): col.position = idx + 1 col.save() # Load the data frame data_frame = validated_data.get('data_frame') if data_frame is not None: # Store the table in the DB store_table( data_frame, workflow_obj.get_data_frame_table_name(), dtype={ col.name: col.data_type for col in workflow_obj.columns.all() }, ) # Reconcile now the information in workflow and columns with # the one loaded workflow_obj.ncols = validated_data['ncols'] workflow_obj.nrows = validated_data['nrows'] workflow_obj.save() # Create the actions pointing to the workflow action_data = ActionSerializer(data=validated_data.get( 'actions', []), many=True, context={ 'workflow': workflow_obj, 'columns': columns }) if action_data.is_valid(): action_data.save() else: raise Exception(_('Unable to save column information')) # Create the views pointing to the workflow view_data = ViewSerializer(data=validated_data.get('views', []), many=True, context={ 'workflow': workflow_obj, 'columns': columns }) if view_data.is_valid(): view_data.save() else: raise Exception(_('Unable to save column information')) except Exception: # Get rid of the objects created if workflow_obj: if workflow_obj.id: workflow_obj.delete() raise return workflow_obj