def workflow_update_lusers(user_id, workflow_id, log_id): """ Recalculate the elements in field lusers of the workflow based on the fields luser_email_column and luser_email_column_MD5 :param user_id: Id of User object that is executing the action :param workflow_id: Id of workflow being processed :param log_id: Id of the log object where the status has to be reflected :return: Nothing, the result is stored in the log with log_id """ # First get the log item to make sure we can record diagnostics log_item = get_log_item(log_id) if not log_item: return False to_return = True try: user, wflow, __ = get_execution_items(user_id=user_id, workflow_id=workflow_id) workflow.ops.do_workflow_update_lusers(wflow, log_item) # Reflect status in the log event log_item.payload['status'] = 'Execution finished successfully' log_item.save() except Exception as e: log_item.payload['status'] = \ ugettext('Error: {0}').format(e) log_item.save() to_return = False return to_return
def run_plugin_task(user_id, workflow_id, plugin_id, input_column_names, output_column_names, output_suffix, merge_key, parameters, log_id): """ Execute the run method in a plugin with the dataframe from the given workflow :param user_id: Id of User object that is executing the action :param workflow_id: Id of workflow being processed :param plugin_id: Id of the plugin being executed :param input_column_names: List of input column names :param output_column_names: List of output column names :param output_suffix: Suffix that is added to the output column names :param merge_key: Key column to use in the merge :param parameters: Dictionary with the parameters to execute the plug in :param log_id: Id of the log object where the status has to be reflected :return: Nothing, the result is stored in the log with log_id """ # First get the log item to make sure we can record diagnostics log_item = get_log_item(log_id) if not log_item: return False to_return = True try: user, workflow, __ = get_execution_items(user_id=user_id, workflow_id=workflow_id) plugin_info = Plugin.objects.filter(pk=plugin_id).first() if not plugin_info: raise Exception( ugettext('Unable to load plugin with id {pid}').format( plugin_id), ) # Set the status to "executing" before calling the function log_item.payload['status'] = 'Executing' log_item.save() # Invoke plugin execution run_plugin(workflow, plugin_info, input_column_names, output_column_names, output_suffix, merge_key, parameters) # Reflect status in the log event log_item.payload['status'] = 'Execution finished successfully' log_item.save() except Exception as exc: log_item.payload['status'] = ugettext('Error: {0}').format(str(exc)) log_item.save() to_return = False return to_return
def send_canvas_email_messages(user_id: int, log_id: int, action_info: CanvasEmailPayload) -> bool: """ This function invokes send_messages in action, gets the message that may be sent as a result, and records the appropriate events. :param user_id: Id of User object that is executing the action :param action_id: Id of Action object from where the messages are taken :param subject: String for the email subject :param email_column: Name of the column to extract email addresses :param exclude_values: List of values to exclude from the mailing :param target_url: The name of the server to use to send email :param log_id: Id of the log object where the status has to be reflected :return: bool stating if execution has been correct """ # First get the log item to make sure we can record diagnostics log_item = get_log_item(log_id) if not log_item: return False try: user, __, action = get_execution_items( user_id=user_id, action_id=action_info['action_id']) # Set the status to "executing" before calling the function log_item.payload['status'] = 'Executing' log_item.save() send_canvas_emails( user, action, log_item, action_info, ) # Reflect status in the log event log_item.payload['status'] = 'Execution finished successfully' log_item.save() except Exception as e: log_item.payload['status'] = \ ugettext('Error: {0}').format(e) log_item.save() return False return True
def send_json_objects( user_id: int, log_id: Log, action_info: JSONPayload, ) -> bool: """Invokes send_json in action Gets the JSON objects that may be sent as a result, and records the appropriate events. :param user_id: Id of User object that is executing the action :param action_id: Id of Action object from where the messages are taken :param token: String to include as authorisation token :param key_column: Key column name to use to exclude elements (if needed) :param exclude_values: List of values to exclude from the mailing :param log_id: Id of the log object where the status has to be reflected :return: Nothing """ # First get the log item to make sure we can record diagnostics log_item = get_log_item(log_id) if not log_item: return False to_return = True try: user, __, action = get_execution_items( user_id=user_id, action_id=action_info['action_id']) # Set the status to "executing" before calling the function log_item.payload['status'] = 'Executing' log_item.save() send_json(user, action, log_item, action_info) # Reflect status in the log event log_item.payload['status'] = 'Execution finished successfully' log_item.save() except Exception as e: log_item.payload['status'] = \ ugettext('Error: {0}').format(e) log_item.save() to_return = False return to_return
def athena_dataupload_task(user_id, workflow_id, conn_id, params, log_id): """Upload or merge data in the current workflow using an Athena connection. :param user_id: Id of User object that is executing the action :param workflow_id: Workflow to upload the data :param conn_id: Athena connection ID :param params: Dictionary with additional parameters or the operation :param log_id: Id of the log object where the status has to be reflected :return: Nothing, the result is stored in the log with log_id """ # First get the log item to make sure we can record diagnostics log_item = get_log_item(log_id) if not log_item: return try: user, workflow, __ = get_execution_items(user_id=user_id, workflow_id=workflow_id) conn = AthenaConnection.objects.filter(enabled=True).filter( pk=conn_id).first() if not conn: raise Exception( ugettext('Unable to find connection with id {0}').format( conn_id)) ontask.dataops.forms.dataframeupload.batch_load_df_from_athenaconnection( workflow, conn, params, log_item) # Reflect status in the log event log_item.payload['status'] = 'Execution finished successfully' log_item.payload['datetime'] = str( datetime.datetime.now(pytz.timezone(settings.TIME_ZONE))) log_item.save() except Exception as e: log_item.payload['status'] = \ ugettext('Error: {0}').format(e) log_item.save() return