Пример #1
0
def create_audit_context(audit):
  # Create an audit context
  context = audit.build_object_context(
      context=audit.context,
      name='Audit Context {timestamp}'.format(
          timestamp=datetime.datetime.now()),
      description='',
  )
  context.modified_by = get_current_user()
  db.session.add(context)
  db.session.flush()

  # Create the program -> audit implication
  db.session.add(ContextImplication(
      source_context=audit.context,
      context=context,
      source_context_scope='Program',
      context_scope='Audit',
      modified_by=get_current_user(),
  ))

  db.session.add(audit)

  # Create the role implication for Auditor from Audit for default context
  db.session.add(ContextImplication(
      source_context=context,
      context=None,
      source_context_scope='Audit',
      context_scope=None,
      modified_by=get_current_user(),
  ))
  db.session.flush()

  # Place the audit in the audit context
  audit.context = context
Пример #2
0
def handle_program_post(sender, obj=None, src=None, service=None):
  if src.get('private', False):
    # get the personal context for this logged in user
    personal_context = service.personal_context()

    # create a context specific to the program
    context = Context(
        context=personal_context,
        name='{object_type} Context {timestamp}'.format(
          object_type=service.model.__name__,
          timestamp=datetime.datetime.now()),
        description='',
        )
    db.session.add(context)
    db.session.flush()
    obj.context = context

    # add a user_roles mapping assigning the user creating the program
    # the ProgramOwner role in the program's context.
    program_owner_role = db.session.query(Role)\
        .filter(Role.name == 'ProgramOwner').first()
    user_role = UserRole(
        person=get_current_user(),
        role=program_owner_role,
        context=context,
        )
    db.session.add(user_role)
    db.session.flush()

    assign_role_reader(get_current_user())
Пример #3
0
  def copy(self, _other=None, **kwargs):
    columns = ['title',
               'description',
               'task_group',
               'start_date',
               'end_date',
               'access_control_list',
               'modified_by',
               'task_type',
               'response_options']

    if kwargs.get('clone_people', False):
      access_control_list = [
          {"ac_role_id": acl.ac_role_id, "person": {"id": person.id}}
          for person, acl in self.access_control_list
      ]
    else:
      role_id = {
          v: k for (k, v) in
          role.get_custom_roles_for(self.type).iteritems()
      }['Task Assignees']
      access_control_list = [
          {"ac_role_id": role_id, "person": {"id": get_current_user().id}}
      ]
    kwargs['modified_by'] = get_current_user()
    return self.copy_into(_other,
                          columns,
                          access_control_list=access_control_list,
                          **kwargs)
Пример #4
0
def _add_task_acl(task):
  """Add ACL entry for the current users background task."""
  roles = role.get_ac_roles_for(task.type)
  admin_role = roles.get("Admin", None)
  if admin_role:
    acl.AccessControlList(
        person=get_current_user(),
        ac_role=admin_role,
        object=task,
    )
  db.session.add(task)
  db.session.commit()
  if admin_role:
    from ggrc.cache.utils import clear_users_permission_cache
    clear_users_permission_cache([get_current_user().id])
Пример #5
0
def handle_export_post(**kwargs):
  """Handle export post"""
  check_import_export_headers()
  objects = request.json.get("objects")
  current_time = request.json.get("current_time")
  user = get_current_user()
  if user.system_wide_role == 'No Access':
    raise Forbidden()
  if not objects or not current_time:
    raise BadRequest("Export failed due incorrect request data")
  try:
    filename = get_export_filename(objects, current_time)
    ie = import_export.create_import_export_entry(
        job_type="Export",
        status="In Progress",
        title=filename,
        start_at=datetime.utcnow(),
    )
    deferred.defer(run_export,
                   objects,
                   ie.id,
                   user.id,
                   get_url_root(),
                   _queue="ggrcImport")
    return make_import_export_response(ie.log_json())
  except Exception as e:
    logger.exception("Export failed due incorrect request data: %s",
                     e.message)
    raise BadRequest("Export failed due incorrect request data")
Пример #6
0
 def _update_new_reviewed_by(self):
   """When create new review with state REVIEWED set last_reviewed_by"""
   # pylint: disable=attribute-defined-outside-init
   from ggrc.models import all_models
   if self.status == all_models.Review.STATES.REVIEWED:
     self.last_reviewed_by = get_current_user()
     self.last_reviewed_at = datetime.datetime.utcnow()
Пример #7
0
def handle_cycle_task_group_object_task_put(
        sender, obj=None, src=None, service=None):  # noqa pylint: disable=unused-argument

  if inspect(obj).attrs.contact.history.has_changes():
    ensure_assignee_is_workflow_member(obj.cycle.workflow, obj.contact)

  if any([inspect(obj).attrs.start_date.history.has_changes(),
          inspect(obj).attrs.end_date.history.has_changes()]):
    update_cycle_dates(obj.cycle)

  if inspect(obj).attrs.status.history.has_changes():
    # TODO: check why update_cycle_object_parent_state destroys object history
    # when accepting the only task in a cycle. The listener below is a
    # workaround because of that.
    Signals.status_change.send(
        obj.__class__,
        obj=obj,
        new_status=obj.status,
        old_status=inspect(obj).attrs.status.history.deleted.pop(),
    )
    update_cycle_task_object_task_parent_state(obj)

  # Doing this regardless of status.history.has_changes() is important in order
  # to update objects that have been declined. It updates the os_last_updated
  # date and last_updated_by.
  if getattr(obj.task_group_task, 'object_approval', None):
    for tgobj in obj.task_group_task.task_group.objects:
      if obj.status == 'Verified':
        tgobj.modified_by = get_current_user()
        tgobj.set_reviewed_state()
        db.session.add(tgobj)
    db.session.flush()
Пример #8
0
 def check_permissions(self):
   if 'permissions' not in session:
     self.load_permissions()
   elif session['permissions'] is None\
       and 'permissions_header_asserted' not in session:
     self.load_permissions()
   elif session['permissions'] is not None\
       and '__header_override' in session['permissions']:
     pass
   elif session['permissions'] is None\
       or '__user' not in session['permissions']\
       or session['permissions']['__user'] != \
           self.get_email_for(get_current_user()):
     self.load_permissions()
   elif 'permissions__ts' in session and not get_current_user().is_anonymous():
     self.load_permissions()
Пример #9
0
def create_task(name, url, queued_callback=None, parameters=None):

  # task name must be unique
  if not parameters:
    parameters = {}
  task = BackgroundTask(name=name + str(int(time())))
  task.parameters = parameters
  task.modified_by = get_current_user()
  db.session.add(task)
  db.session.commit()

  # schedule a task queue
  if getattr(settings, 'APP_ENGINE', False):
    from google.appengine.api import taskqueue
    headers = Headers(request.headers)
    headers.add('x-task-id', task.id)
    taskqueue.add(
        queue_name="ggrc",
        url=url,
        name="{}_{}".format(task.name, task.id),
        params={'task_id': task.id},
        method=request.method,
        headers=headers)
  elif queued_callback:
    queued_callback(task)
  return task
Пример #10
0
def handle_workflow_post(sender, obj=None, src=None, service=None):
  _validate_post_workflow_fields(obj)

  source_workflow = None

  if src.get('clone'):
    source_workflow_id = src.get('clone')
    source_workflow = models.Workflow.query.filter_by(
        id=source_workflow_id
    ).first()
    source_workflow.copy(obj, clone_people=src.get('clone_people', False))
    obj.title = source_workflow.title + ' (copy ' + str(obj.id) + ')'

  # get the personal context for this logged in user
  user = get_current_user()
  personal_context = user.get_or_create_object_context(context=1)
  workflow_context = obj.get_or_create_object_context(personal_context)
  obj.context = workflow_context

  if src.get('clone'):
    source_workflow.copy_task_groups(
        obj,
        clone_people=src.get('clone_people', False),
        clone_tasks=src.get('clone_tasks', False),
        clone_objects=src.get('clone_objects', False)
    )
Пример #11
0
 def copy(self, _other=None, **kwargs):
   """Create a partial copy of the current workflow.
   """
   columns = ['title',
              'description',
              'notify_on_change',
              'notify_custom_message',
              'end_date',
              'start_date',
              'repeat_every',
              'unit',
              'is_verification_needed']
   if kwargs.get('clone_people', False):
     access_control_list = [{"ac_role": acl.ac_role, "person": acl.person}
                            for acl in self.access_control_list]
   else:
     role_id = {
         name: ind
         for (ind, name) in role.get_custom_roles_for(self.type).iteritems()
     }['Admin']
     access_control_list = [{"ac_role_id": role_id,
                             "person": {"id": get_current_user().id}}]
   target = self.copy_into(_other, columns,
                           access_control_list=access_control_list, **kwargs)
   return target
Пример #12
0
  def send_notification(self,
                        parent_type,
                        parent_id,
                        errors=None,
                        failed=False):
    """Send mail notification with information about errors."""
    parent_model = models.get_model(parent_type)
    parent = parent_model.query.get(parent_id)

    data = {"title": parent.title}
    if failed:
      body = settings.EMAIL_BULK_CHILD_SYNC_EXCEPTION.render()
    elif errors:
      data["assessments"] = [
          {
              "url": get_object_url(obj),
              "code": obj.slug,
              "title": obj.title,
          } for (obj, _) in errors
      ]
      body = settings.EMAIL_BULK_CHILD_SYNC_FAILED.render(sync_data=data)
    else:
      body = settings.EMAIL_BULK_CHILD_SYNC_SUCCEEDED.render(sync_data=data)

    receiver = login.get_current_user()
    common.send_email(receiver.email, self.ISSUETRACKER_SYNC_TITLE, body)
Пример #13
0
def ensure_assignee_is_workflow_member(workflow, assignee):
  """Checks what role assignee has in the context of
  a workflow. If he has none he gets the Workflow Member role."""
  if not assignee:
    return

  if any(assignee == wp.person for wp in workflow.workflow_people):
    return

  # Check if assignee is mapped to the Workflow
  workflow_people = models.WorkflowPerson.query.filter(
      models.WorkflowPerson.workflow_id == workflow.id,
      models.WorkflowPerson.person_id == assignee.id).count()
  if not workflow_people:
    workflow_person = models.WorkflowPerson(
        person=assignee,
        workflow=workflow,
        context=workflow.context
    )
    db.session.add(workflow_person)

  # Check if assignee has a role assignment
  user_roles = UserRole.query.filter(
      UserRole.context_id == workflow.context_id,
      UserRole.person_id == assignee.id).count()
  if not user_roles:
    workflow_member_role = _find_role('WorkflowMember')
    user_role = UserRole(
        person=assignee,
        role=workflow_member_role,
        context=workflow.context,
        modified_by=get_current_user(),
    )
    db.session.add(user_role)
Пример #14
0
def handle_export_post(**kwargs):
  """Handle export post"""
  check_import_export_headers()
  request_json = request.json
  objects = request_json.get("objects")
  exportable_objects = request_json.get("exportable_objects", [])
  current_time = request.json.get("current_time")
  user = login.get_current_user()
  if user.system_wide_role == 'No Access':
    raise wzg_exceptions.Forbidden()
  if not objects or not current_time:
    raise wzg_exceptions.BadRequest(
        app_errors.INCORRECT_REQUEST_DATA.format(job_type="Export"))
  try:
    filename = import_helper.get_export_filename(objects,
                                                 current_time,
                                                 exportable_objects)
    ie = import_export.create_import_export_entry(
        job_type="Export",
        status="In Progress",
        title=filename,
        start_at=datetime.utcnow(),
    )
    run_background_export(ie.id, objects, exportable_objects)
    return make_import_export_response(ie.log_json())
  except Exception as e:
    logger.exception(e.message)
    raise wzg_exceptions.BadRequest(
        app_errors.INCORRECT_REQUEST_DATA.format(job_type="Export"))
Пример #15
0
def create_task(name, url, queued_callback=None, parameters=None, method=None):
  """Create a enqueue a bacground task."""
  if not method:
    method = request.method

  # task name must be unique
  if not parameters:
    parameters = {}

  task = BackgroundTask(name=name + str(int(time())))
  task.parameters = parameters
  task.modified_by = get_current_user()
  _add_task_acl(task)

  # schedule a task queue
  if getattr(settings, 'APP_ENGINE', False):
    from google.appengine.api import taskqueue
    headers = collect_task_headers()
    headers.add('X-Task-Id', task.id)
    taskqueue.add(
        queue_name="ggrc",
        url=url,
        name="{}_{}".format(task.name, task.id),
        params={'task_id': task.id},
        method=method,
        headers=headers
    )
  elif queued_callback:
    queued_callback(task)
  return task
Пример #16
0
def handle_cycle_task_group_object_task_put(
        sender, obj=None, src=None, service=None):  # noqa pylint: disable=unused-argument
  if inspect(obj).attrs.status.history.has_changes():
    # TODO: check why update_cycle_object_parent_state destroys object history
    # when accepting the only task in a cycle. The listener below is a
    # workaround because of that.
    Signals.status_change.send(
        obj.__class__,
        objs=[
            Signals.StatusChangeSignalObjectContext(
                instance=obj,
                new_status=obj.status,
                old_status=inspect(obj).attrs.status.history.deleted[0],
            )
        ]
    )

  # Doing this regardless of status.history.has_changes() is important in order
  # to update objects that have been declined. It updates the os_last_updated
  # date and last_updated_by.
  with benchmark("handle CycleTask put"):
    if getattr(obj.task_group_task, 'object_approval', None):
      for tgobj in obj.task_group_task.task_group.objects:
        if obj.status == 'Verified':
          tgobj.modified_by = get_current_user()
          tgobj.set_reviewed_state()
      db.session.flush()
Пример #17
0
def get_user_task_count():
  with benchmark("Get user task count RAW"):
    current_user = get_current_user()

    user_tasks = CycleTaskGroupObjectTask.query.with_entities(
        # prefetch tasks' finishing dates to avoid firing subsequent queries
        CycleTaskGroupObjectTask.end_date
    ).join(
        Cycle
    ).filter(
        CycleTaskGroupObjectTask.contact_id == current_user.id,
        CycleTaskGroupObjectTask.status.in_(
            ["Assigned", "InProgress", "Finished", "Declined"]),
        Cycle.is_current == True  # noqa # pylint: disable=singleton-comparison
    ).all()

    task_count = len(user_tasks)

    today = date.today()
    overdue_count = sum(
        1 for task in user_tasks if task.end_date and today > task.end_date)

    # NOTE: the return value must be a list so that the result can be
    # directly JSON-serialized to an Array in a HAML template
    return [task_count, overdue_count]
Пример #18
0
def get_jobs(job_type, ids=None):
  """Get list of jobs by type and/or ids"""
  conditions = [ImportExport.created_by == get_current_user(),
                ImportExport.job_type == job_type]
  if ids:
    conditions.append(ImportExport.id.in_(ids))
  return [ie.log_json(is_default=True)
          for ie in ImportExport.query.filter(*conditions)]
Пример #19
0
 def _start_compute_attributes_job(self, revision_ids):
   if revision_ids:
     cur_user = login.get_current_user()
     deferred.defer(
         import_helper.calculate_computed_attributes,
         revision_ids,
         cur_user.id
     )
Пример #20
0
def get(ie_id):
  """Get import_exports entry by id if entry belongs to current user"""
  ie_job = ImportExport.query.get(ie_id)
  if not ie_job:
    raise NotFound()
  if ie_job.created_by == get_current_user():
    return ie_job
  raise Forbidden()
Пример #21
0
def get_full_user_json():
  """Get the full current user"""
  with benchmark("Get full user JSON"):
    from ggrc.models.person import Person
    current_user = get_current_user()
    person = Person.eager_query().filter_by(id=current_user.id).one()
    result = publish_representation(publish(person, (), inclusion_filter))
    return as_json(result)
Пример #22
0
 def load_permissions(self):
   user = get_current_user()
   email = self.get_email_for(user)
   session['permissions'] = {}
   session['permissions']['__user'] = email
   if user is None or user.is_anonymous():
     session['permissions'] = {}
     session['permissions__ts'] = None
   elif hasattr(settings, 'BOOTSTRAP_ADMIN_USERS') \
       and email in settings.BOOTSTRAP_ADMIN_USERS:
     session['permissions'] = {
         DefaultUserPermissions.ADMIN_PERMISSION.action: {
           DefaultUserPermissions.ADMIN_PERMISSION.resource_type: [
             DefaultUserPermissions.ADMIN_PERMISSION.context_id,
             ],
           },
         }
     session['permissions']['__user'] = email
   else:
     session['permissions'] = {}
     session['permissions']['__user'] = email
     user_roles = db.session.query(UserRole)\
         .options(
             sqlalchemy.orm.undefer_group('UserRole_complete'),
             sqlalchemy.orm.undefer_group('Role_complete'),
             sqlalchemy.orm.joinedload('role'))\
         .filter(UserRole.person_id==user.id)\
         .order_by(UserRole.updated_at.desc())\
         .all()
     if len(user_roles) > 0:
       session['permissions__ts'] = user_roles[0].updated_at
     else:
       session['permissions__ts'] = None
     for user_role in user_roles:
       if isinstance(user_role.role.permissions, dict):
         for action, resource_types in user_role.role.permissions.items():
           for resource_type in resource_types:
             session['permissions'].setdefault(action, {})\
                 .setdefault(resource_type, list())\
                 .append(user_role.context_id)
     #grab personal context
     personal_context = db.session.query(Context).filter(
         Context.related_object_id == user.id,
         Context.related_object_type == 'Person',
         ).first()
     if not personal_context:
       personal_context = Context(
           name='Personal Context for {0}'.format(user.id),
           description='',
           context_id=1,
           related_object_id=user.id,
           related_object_type='Person',
           )
       db.session.add(personal_context)
       db.session.commit()
     session['permissions']['__GGRC_ADMIN__'] = {
         '__GGRC_ALL__': [personal_context.id,],
         }
Пример #23
0
def handle_program_post(sender, obj=None, src=None, service=None):
  db.session.flush()
  # get the personal context for this logged in user
  user = get_current_user()
  personal_context = _get_or_create_personal_context(user)
  context = obj.build_object_context(
      context=personal_context,
      name='{object_type} Context {timestamp}'.format(
          object_type=service.model.__name__,
          timestamp=datetime.datetime.now()),
      description='',
  )
  context.modified_by = get_current_user()

  db.session.add(obj)
  db.session.flush()
  db.session.add(context)
  db.session.flush()
  obj.contexts.append(context)
  obj.context = context

  # add a user_roles mapping assigning the user creating the program
  # the ProgramOwner role in the program's context.
  program_owner_role = basic_roles.program_owner()
  user_role = UserRole(
      person=get_current_user(),
      role=program_owner_role,
      context=context,
      modified_by=get_current_user())
  # pass along a temporary attribute for logging the events.
  user_role._display_related_title = obj.title
  db.session.add(user_role)
  db.session.flush()

  # Create the context implication for Program roles to default context
  db.session.add(ContextImplication(
      source_context=context,
      context=None,
      source_context_scope='Program',
      context_scope=None,
      modified_by=get_current_user()))

  if not src.get('private'):
    # Add role implication - all users can read a public program
    add_public_program_context_implication(context)
Пример #24
0
def handle_workflow_person_post(sender, obj=None, src=None, service=None):
  # add a user_roles mapping assigning the user creating the workflow
  # the WorkflowOwner role in the workflow's context.
  UserRole(
      person=obj.person,
      role=_find_role('WorkflowMember'),
      context=obj.context,
      modified_by=get_current_user(),
  )
Пример #25
0
 def add_admin_role(self):
   """Add current user as Evidence admin"""
   from ggrc.models import all_models
   admin_role = db.session.query(all_models.AccessControlRole).filter_by(
       name="Admin", object_type=self.type).one()
   self.extend_access_control_list([{
       "ac_role": admin_role,
       "person": login.get_current_user()
   }])
Пример #26
0
def decline_proposal(
    sender, obj=None, src=None, service=None,
    event=None, initial_state=None):  # noqa
  """Decline proposal procedure hook."""
  if not is_status_changed_to(obj.STATES.DECLINED, obj):
    return
  obj.declined_by = login.get_current_user()
  obj.decline_datetime = datetime.datetime.utcnow()
  add_comment_about(obj, obj.STATES.DECLINED, obj.decline_reason)
Пример #27
0
def _get_or_create_personal_context(user):
  personal_context = user.get_or_create_object_context(
      context=1,
      name='Personal Context for {0}'.format(user.id),
      description='')
  personal_context.modified_by = get_current_user()
  db.session.add(personal_context)
  db.session.flush()
  return personal_context
Пример #28
0
def get_user_task_count():
  """Optimized function for fetching current user task count."""
  with benchmark("Get user task count RAW"):
    current_user = get_current_user()

    user_tasks = CycleTaskGroupObjectTask.query.with_entities(
        # prefetch tasks' finishing dates to avoid firing subsequent queries
        CycleTaskGroupObjectTask.end_date
    ).join(
        Cycle
    ).join(
        all_models.AccessControlList,
        sa.and_(
            all_models.AccessControlList.object_type ==
            CycleTaskGroupObjectTask.__name__,
            all_models.AccessControlList.object_id ==
            CycleTaskGroupObjectTask.id,
        ),
    ).join(
        all_models.AccessControlRole,
        sa.and_(
            all_models.AccessControlRole.id ==
            all_models.AccessControlList.ac_role_id,
            all_models.AccessControlRole.object_type ==
            CycleTaskGroupObjectTask.__name__,
            all_models.AccessControlRole.name.in_(
                ("Task Assignees", "Task Secondary Assignees")),
        )
    ).join(
        all_models.AccessControlPerson,
        sa.and_(
            all_models.AccessControlList.id ==
            all_models.AccessControlPerson.ac_list_id,
            all_models.AccessControlPerson.person_id ==
            current_user.id,
        )
    ).filter(
        CycleTaskGroupObjectTask.status.in_(
            [
                CycleTaskGroupObjectTask.ASSIGNED,
                CycleTaskGroupObjectTask.IN_PROGRESS,
                CycleTaskGroupObjectTask.FINISHED,
                CycleTaskGroupObjectTask.DECLINED,
            ]),
        Cycle.is_current == True  # noqa # pylint: disable=singleton-comparison
    ).all()

    task_count = len(user_tasks)

    today = date.today()
    overdue_count = sum(
        1 for task in user_tasks if task.end_date and today > task.end_date)

    # NOTE: the return value must be a list so that the result can be
    # directly JSON-serialized to an Array in a HAML template
    return [task_count, overdue_count]
Пример #29
0
  def copy(self, _other=None, **kwargs):
    columns = [
        'title', 'description',
        'task_group', 'sort_index',
        'start_date', 'end_date',
        'contact', 'modified_by',
        'task_type', 'response_options',
    ]

    contact = None
    if kwargs.get('clone_people', False):
      contact = self.contact
    else:
      contact = get_current_user()

    kwargs['modified_by'] = get_current_user()

    target = self.copy_into(_other, columns, contact=contact, **kwargs)
    return target
Пример #30
0
 def load_permissions(self):
   user = get_current_user()
   email = self.get_email_for(user)
   self._request_permissions = {}
   self._request_permissions['__user'] = email
   if user is None or user.is_anonymous():
     self._request_permissions = {}
   else:
     with benchmark('load_permissions'):
       self._request_permissions = load_permissions_for(user)
Пример #31
0
def delete_previous_imports():
    """Delete not finished imports"""

    imported_jobs = ImportExport.query.filter(
        ImportExport.created_by == get_current_user(),
        ImportExport.job_type == ImportExport.IMPORT_JOB_TYPE)

    active_jobs = db.session.query(
        imported_jobs.filter(
            ImportExport.status.in_([
                ImportExport.ANALYSIS_STATUS, ImportExport.IN_PROGRESS_STATUS
            ])).exists()).scalar()
    if active_jobs:
        raise BadRequest('Import in progress')

    imported_jobs.filter(
        ImportExport.status.in_(
            [ImportExport.NOT_STARTED_STATUS,
             ImportExport.BLOCKED_STATUS])).delete(synchronize_session=False)
    db.session.commit()
Пример #32
0
    def copy_task_groups(self, target, **kwargs):
        """Copy all task groups and tasks mapped to this workflow.
    """
        for task_group in self.task_groups:
            obj = task_group.copy(
                workflow=target,
                context=target.context,
                clone_people=kwargs.get("clone_people", False),
                clone_objects=kwargs.get("clone_objects", False),
                modified_by=get_current_user(),
            )
            target.task_groups.append(obj)

            if kwargs.get("clone_tasks"):
                task_group.copy_tasks(
                    obj,
                    clone_people=kwargs.get("clone_people", False),
                    clone_objects=kwargs.get("clone_objects", True))

        return target
Пример #33
0
def apply_proposal(sender,
                   obj=None,
                   src=None,
                   service=None,
                   event=None,
                   initial_state=None):  # noqa
    """Apply proposal procedure hook."""
    if not is_status_changed_to(obj.STATES.APPLIED, obj):
        return
    current_user = login.get_current_user()
    now = datetime.datetime.utcnow()
    obj.applied_by = current_user
    obj.apply_datetime = now
    if applier.apply_action(obj.instance, obj.content):
        obj.instance.modified_by = current_user
        obj.instance.updated_at = now
    add_comment_about(obj, obj.STATES.APPLIED, obj.apply_reason)
    # notify proposalable instance that proposal applied
    signals.Proposal.proposal_applied.send(obj.instance.__class__,
                                           instance=obj.instance)
Пример #34
0
    def send_notification(parent_type, parent_id, errors=None, failed=False):
        """Send mail notification with information about errors."""
        parent_model = models.get_model(parent_type)
        parent = parent_model.query.get(parent_id)

        data = {"title": parent.title}
        if failed:
            body = settings.EMAIL_BULK_SYNC_EXCEPTION.render()
        elif errors:
            data["assessments"] = [{
                "url": get_object_url(obj),
                "code": obj.slug,
                "title": obj.title,
            } for (obj, _) in errors]
            body = settings.EMAIL_BULK_SYNC_FAILED.render(sync_data=data)
        else:
            body = settings.EMAIL_BULK_SYNC_SUCCEEDED.render(sync_data=data)

        receiver = login.get_current_user()
        common.send_email(receiver.email, ISSUETRACKER_SYNC_TITLE, body)
Пример #35
0
    def copy(self, _other=None, **kwargs):
        columns = [
            'title', 'description', 'workflow', 'sort_index', 'modified_by',
            'context'
        ]

        if kwargs.get('clone_people', False) and getattr(self, "contact"):
            columns.append("contact")
        else:
            kwargs["contact"] = get_current_user()

        target = self.copy_into(_other, columns, **kwargs)

        if kwargs.get('clone_objects', False):
            self.copy_objects(target, **kwargs)

        if kwargs.get('clone_tasks', False):
            self.copy_tasks(target, **kwargs)

        return target
Пример #36
0
    def json_create(self, obj, src):
        """For Parent and Snapshottable src and dst, fill in the Snapshot obj."""
        parent, child, is_snapshot = self._parse_snapshot_data(src)

        if is_snapshot:
            snapshot_data = {
                "parent": parent,
                "child_type": child["type"],
                "child_id": child["id"],
                "update_revision": "new",
            }
            json_builder.create(obj, snapshot_data)
            obj.modified_by = get_current_user()
            obj.context = obj.parent.context
            relationship.Relationship(
                source=obj.parent,
                destination=obj,
            )
            return None

        return super(RelationshipResource, self).json_create(obj, src)
Пример #37
0
def handle_export_post(**kwargs):
    """Handle export post"""
    check_import_export_headers()
    objects = request.json.get("objects")
    current_time = request.json.get("current_time")
    user = get_current_user()
    if user.system_wide_role == 'No Access':
        raise Forbidden()
    if not objects or not current_time:
        raise BadRequest("Export failed due incorrect request data")
    try:
        filename = get_export_filename(objects, current_time)
        ie = import_export.create_import_export_entry(job_type="Export",
                                                      status="In Progress",
                                                      title=filename)
        deferred.defer(run_export, objects, ie.id, user.id, get_url_root())
        return make_import_export_response(ie.log_json())
    except Exception as e:
        logger.exception("Export failed due incorrect request data: %s",
                         e.message)
        raise BadRequest("Export failed due incorrect request data")
Пример #38
0
    def _clone(self, target=None):
        """Clone Assessment Template.

    Args:
      target: Destination Audit object.

    Returns:
      Instance of assessment template copy.
    """
        data = {
            "title": self.title,
            "audit": target,
            "template_object_type": self.template_object_type,
            "test_plan_procedure": self.test_plan_procedure,
            "procedure_description": self.procedure_description,
            "default_people": self.default_people,
            "modified_by": login.get_current_user(),
        }
        assessment_template_copy = AssessmentTemplate(**data)
        db.session.add(assessment_template_copy)
        return assessment_template_copy
Пример #39
0
def create_users_with_role(email_names, role_name="Creator"):
    """Create Person objects.

  Args:
      email_names(dict): Dictionary containing email and name of users.
        Format: {<email>:<name>}

  Returns:
      Set with created Person objects.
  """
    if not email_names:
        return {}

    now = datetime.datetime.now()
    current_user = login.get_current_user()
    from ggrc.models import all_models
    person_inserter = all_models.Person.__table__.insert().prefix_with(
        "IGNORE")
    db.session.execute(
        person_inserter.values([{
            "modified_by_id": current_user.id,
            "created_at": now,
            "updated_at": now,
            "email": email,
            "name": name,
        } for email, name in email_names.items()]))

    created_people = set(load_people_with_emails(email_names.keys()))

    role_id = basic_roles.find_basic(role_name).id
    ur_inserter = all_models.UserRole.__table__.insert().prefix_with("IGNORE")
    db.session.execute(
        ur_inserter.values([{
            "modified_by_id": current_user.id,
            "created_at": now,
            "updated_at": now,
            "role_id": role_id,
            "person_id": person.id,
        } for person in created_people]))
    return created_people
Пример #40
0
def create_task(name, url, queued_callback=None, parameters=None, method=None):
    """Create a enqueue a bacground task."""
    if not method:
        method = request.method

    # task name must be unique
    if not parameters:
        parameters = {}
    task = BackgroundTask(name=name + str(int(time())))
    task.parameters = parameters
    task.modified_by = get_current_user()
    db.session.add(task)
    db.session.commit()
    banned = {
        "X-Appengine-Country",
        "X-Appengine-Queuename",
        "X-Appengine-Current-Namespace",
        "X-Appengine-Taskname",
        "X-Appengine-Tasketa",
        "X-Appengine-Taskexecutioncount",
        "X-Appengine-Taskretrycount",
        "X-Task-Id",
    }

    # schedule a task queue
    if getattr(settings, 'APP_ENGINE', False):
        from google.appengine.api import taskqueue
        headers = Headers(
            {k: v
             for k, v in request.headers if k not in banned})
        headers.add('X-Task-Id', task.id)
        taskqueue.add(queue_name="ggrc",
                      url=url,
                      name="{}_{}".format(task.name, task.id),
                      params={'task_id': task.id},
                      method=method,
                      headers=headers)
    elif queued_callback:
        queued_callback(task)
    return task
Пример #41
0
def get_saved_searches_by_type(search_type):
  """Get saved searches by type.

  Endpoint returning saved searches JSON representation by their type. Request
  should contain saved search type and can include offset and limit parameters.
  If there isn't any saved searches with provided type, empty response will be
  returned.

  Args:
    search_type (str): Type of saved search.

  Returns:
    Flask Response object with object_name, count, total and values as payload.
  """
  user = login.get_current_user(use_external_user=False)
  all_objects = user.saved_searches.filter(
      SavedSearch.search_type == search_type
  )
  if search_type == SavedSearch.ADVANCED_SEARCH or \
      (search_type == SavedSearch.GLOBAL_SEARCH and
          "object_type" in request.args):
    all_objects = all_objects.filter(
        SavedSearch.object_type == request.args.get("object_type")
    )
  db_query_result = all_objects.order_by(
      SavedSearch.created_at.desc()
  ).offset(
      request.args.get("offset")
  ).limit(
      request.args.get("limit")
  ).all()

  response_data = {
      "object_name": SavedSearch.__name__,
      "count": len(db_query_result),
      "total": all_objects.count(),
      "values": db_query_result,
  }

  return json_success_response(response_data)
Пример #42
0
def handle_import_put(**kwargs):
    """Handle import put"""
    command = kwargs.get("command2")
    ie_id = kwargs.get("id2")
    user = get_current_user()
    if user.system_wide_role == 'No Access':
        raise Forbidden()
    if not ie_id or not command or command not in ("start", "stop"):
        raise BadRequest("Import failed due incorrect request data")
    try:
        ie_job = import_export.get(ie_id)
    except (Forbidden, NotFound):
        raise
    except Exception as e:
        logger.exception("Import failed due incorrect request data: %s",
                         e.message)
        raise BadRequest("Import failed due incorrect request data")
    if command == 'start':
        return handle_start(ie_job, user.id)
    elif command == "stop":
        return handle_import_stop(**kwargs)
    raise BadRequest("Bad params")
Пример #43
0
def get_user_task_count():
    with benchmark("Get user task count RAW"):
        current_user = get_current_user()

        user_tasks = CycleTaskGroupObjectTask.query.with_entities(
            # prefetch tasks' finishing dates to avoid firing subsequent queries
            CycleTaskGroupObjectTask.end_date).join(Cycle).filter(
                CycleTaskGroupObjectTask.contact_id == current_user.id,
                CycleTaskGroupObjectTask.status.in_(
                    ["Assigned", "InProgress", "Finished", "Declined"]),
                Cycle.is_current == True  # noqa # pylint: disable=singleton-comparison
            ).all()

        task_count = len(user_tasks)

        today = date.today()
        overdue_count = sum(1 for task in user_tasks
                            if task.end_date and today > task.end_date)

        # NOTE: the return value must be a list so that the result can be
        # directly JSON-serialized to an Array in a HAML template
        return [task_count, overdue_count]
Пример #44
0
def handle_import_put(**kwargs):
  """Handle import put"""
  command = kwargs.get("command2")
  ie_id = kwargs.get("id2")
  user = login.get_current_user()
  if user.system_wide_role == 'No Access':
    raise wzg_exceptions.Forbidden()
  if not ie_id or not command or command not in ("start", "stop"):
    raise wzg_exceptions.BadRequest(
        app_errors.INCORRECT_REQUEST_DATA.format(job_type="Import"))
  try:
    ie_job = import_export.get(ie_id)
  except (wzg_exceptions.Forbidden, wzg_exceptions.NotFound):
    raise
  except Exception as e:
    logger.exception(e.message)
    raise wzg_exceptions.BadRequest(
        app_errors.INCORRECT_REQUEST_DATA.format(job_type="Import"))
  if command == 'start':
    return handle_start(ie_job)
  elif command == "stop":
    return handle_import_stop(**kwargs)
  raise wzg_exceptions.BadRequest(app_errors.BAD_PARAMS)
Пример #45
0
        def _create(self, parent, action):
            # get assignee type
            current_user = get_current_user()
            # pylint: disable=protected-access
            rel = parent._relationships_map.get(
                (current_user.type, current_user.id))
            if rel:
                assignee_type = rel.attrs["AssigneeType"]
            else:
                assignee_type = None
            # create object
            cad_id = action.custom_attribute_definition_id
            if not cad_id:
                obj = Comment(description=action.description,
                              assignee_type=assignee_type,
                              context=parent.context)
            else:
                obj = Comment(description=action.description,
                              custom_attribute_definition_id=cad_id,
                              assignee_type=assignee_type,
                              context=parent.context)

            return obj
Пример #46
0
    def copy(self, _other=None, **kwargs):
        columns = [
            'title', 'description', 'parent_id', 'workflow', 'modified_by',
            'context'
        ]
        kwargs['parent_id'] = self.id

        if kwargs.get('clone_people', False) and getattr(self, "contact"):
            columns.append("contact")
        else:
            kwargs["contact"] = get_current_user()

        target = self.copy_into(_other, columns, **kwargs)

        target.ensure_assignee_is_workflow_member()

        if kwargs.get('clone_objects', False):
            self.copy_objects(target, **kwargs)

        if kwargs.get('clone_tasks', False):
            self.copy_tasks(target, **kwargs)

        return target
Пример #47
0
  def _all_objects_count(self, **kwargs):
    """Get object counts for all objects page."""
    id_ = kwargs.get("id")
    user = login.get_current_user()
    if id_ != user.id:
      raise Forbidden()

    with benchmark("Make response"):

      response_object = self.ALL_OBJECTS.copy()
      for model_type in response_object:
        model = models.get_model(model_type)
        # pylint: disable=protected-access
        # We must move the type permissions query to a proper utility function
        # but we will not do that for a patch release
        permission_filter = builder.QueryHelper._get_type_query(model, "read")
        if permission_filter is not None:
          count = model.query.filter(permission_filter).count()
        else:
          count = model.query.count()
        response_object[model_type] = count

      return self.json_success_response(response_object, )
Пример #48
0
def run_export(task):
    """Run export"""
    user = get_current_user()
    ie_id = task.parameters.get("ie_id")
    objects = task.parameters.get("objects")
    exportable_objects = task.parameters.get("exportable_objects")

    try:
        ie = import_export.get(ie_id)
        check_for_previous_run()

        content, _ = make_export(objects, exportable_objects)
        db.session.refresh(ie)
        if ie.status == "Stopped":
            return utils.make_simple_response()
        ie.status = "Finished"
        ie.end_at = datetime.utcnow()
        ie.content = content
        db.session.commit()

        job_emails.send_email(job_emails.EXPORT_COMPLETED, user.email,
                              ie.title, ie_id)

    except Exception as e:  # pylint: disable=broad-except
        logger.exception("Export failed: %s", e.message)
        ie = import_export.get(ie_id)
        try:
            ie.status = "Failed"
            ie.end_at = datetime.utcnow()
            db.session.commit()
            job_emails.send_email(job_emails.EXPORT_FAILED, user.email)
            return utils.make_simple_response(e.message)
        except Exception as e:  # pylint: disable=broad-except
            logger.exception("%s: %s", app_errors.STATUS_SET_FAILED, e.message)
            return utils.make_simple_response(e.message)

    return utils.make_simple_response()
Пример #49
0
def handle_cycle_task_group_object_task_put(sender,
                                            obj=None,
                                            src=None,
                                            service=None):  # noqa pylint: disable=unused-argument

    if inspect(obj).attrs._access_control_list.history.has_changes():
        for person_id in obj.get_person_ids_for_rolename("Task Assignees"):
            ensure_assignee_is_workflow_member(obj.cycle.workflow, None,
                                               person_id)

    if any([
            inspect(obj).attrs.start_date.history.has_changes(),
            inspect(obj).attrs.end_date.history.has_changes()
    ]):
        update_cycle_dates(obj.cycle)

    if inspect(obj).attrs.status.history.has_changes():
        # TODO: check why update_cycle_object_parent_state destroys object history
        # when accepting the only task in a cycle. The listener below is a
        # workaround because of that.
        Signals.status_change.send(
            obj.__class__,
            obj=obj,
            new_status=obj.status,
            old_status=inspect(obj).attrs.status.history.deleted.pop(),
        )
        update_cycle_task_object_task_parent_state(obj, is_put=True)

    # Doing this regardless of status.history.has_changes() is important in order
    # to update objects that have been declined. It updates the os_last_updated
    # date and last_updated_by.
    if getattr(obj.task_group_task, 'object_approval', None):
        for tgobj in obj.task_group_task.task_group.objects:
            if obj.status == 'Verified':
                tgobj.modified_by = get_current_user()
                tgobj.set_reviewed_state()
        db.session.flush()
Пример #50
0
def ensure_assignee_is_workflow_member(workflow, assignee, assignee_id=None):
  """Checks what role assignee has in the context of
  a workflow. If he has none he gets the Workflow Member role."""
  if not assignee and not assignee_id:
    return
  if assignee_id is None:
    assignee_id = assignee.id
  if assignee and assignee_id != assignee.id:
    raise ValueError("Conflict value assignee and assignee_id")
  if any(assignee_id == wp.person_id for wp in workflow.workflow_people):
    return

  # Check if assignee is mapped to the Workflow
  workflow_people = models.WorkflowPerson.query.filter(
      models.WorkflowPerson.workflow_id == workflow.id,
      models.WorkflowPerson.person_id == assignee_id).count()
  if not workflow_people:
    models.WorkflowPerson(
        person=assignee,
        person_id=assignee_id,
        workflow=workflow,
        context=workflow.context
    )

  # Check if assignee has a role assignment
  user_roles = UserRole.query.filter(
      UserRole.context_id == workflow.context_id,
      UserRole.person_id == assignee_id).count()
  if not user_roles:
    workflow_member_role = _find_role('WorkflowMember')
    UserRole(
        person=assignee,
        person_id=assignee_id,
        role=workflow_member_role,
        context=workflow.context,
        modified_by=get_current_user(),
    )
Пример #51
0
def create_saved_search():
    """Create a saved search.

  Endpoint creating saved search with provided parameters. Request payload
  should contain saved search `search_type` and `object_type`. Also it
  could contain saved search `name`, `filters`, and `is_visible` parameters. If
  there will any error during saved search creation, 400 status code and
  corresponding error will be returned.

  If no `name` parameter is provided, it will be generated atimatically.
  If no `is_visible` parameter is provided, the default value will be used.
    Defaults to `True`.

  Returns:
    Flask Response object containing JSON representation of created saved
    search or error message if error occurred.
  """
    user = login.get_current_user(use_external_user=False)

    data = request.get_json()

    try:
        search = SavedSearch(
            data.get("name"),
            data.get("object_type"),
            user,
            data.get("search_type"),
            data.get("filters"),
            data.get("is_visible", True),
        )
    except ValidationError as error:
        return make_error_response(error.message, 400, force_json=True)

    user.saved_searches.append(search)
    db.session.commit()

    return json_success_response(search)
Пример #52
0
def handle_workflow_post(sender, obj=None, src=None, service=None):
    _validate_post_workflow_fields(obj)

    source_workflow = None

    if src.get('clone'):
        source_workflow_id = src.get('clone')
        source_workflow = models.Workflow.query.filter_by(
            id=source_workflow_id).first()
        source_workflow.copy(obj, clone_people=src.get('clone_people', False))
        obj.title = source_workflow.title + ' (copy ' + str(obj.id) + ')'

    # get the personal context for this logged in user
    user = get_current_user(use_external_user=False)
    personal_context = user.get_or_create_object_context(context=1)
    workflow_context = obj.get_or_create_object_context(personal_context)
    obj.context = workflow_context

    if src.get('clone'):
        source_workflow.copy_task_groups(
            obj,
            clone_people=src.get('clone_people', False),
            clone_tasks=src.get('clone_tasks', False),
            clone_objects=src.get('clone_objects', False))
Пример #53
0
  def set_obj_attr(self):
    """ Create comments """
    if self.dry_run or not self.value:
      return
    current_obj = self.row_converter.obj
    for description in self.value:
      if current_obj.access_control_list:
        current_user = get_current_user()
        assignee_types = [acl.ac_role.name
                          for person, acl in current_obj.access_control_list
                          if person == current_user]
        assignee_type = ','.join(assignee_types)
        comment = all_models.Comment(description=description,
                                     modified_by_id=get_current_user_id(),
                                     assignee_type=assignee_type)
      else:
        comment = all_models.Comment(description=description,
                                     modified_by_id=get_current_user_id())

      db.session.add(comment)
      mapping = all_models.Relationship(source=current_obj,
                                        destination=comment)
      db.session.add(mapping)
      self.row_converter.comments.append(comment)
Пример #54
0
 def _missed_mandatory_person(self):
     """Create response for missing mandatory field"""
     self.add_warning(errors.OWNER_MISSING, column_name=self.display_name)
     return [get_current_user()]
Пример #55
0
def handle_workflow_post(sender, obj=None, src=None, service=None):  # noqa pylint: disable=unused-argument
    source_workflow = None

    if src.get('clone'):
        source_workflow_id = src.get('clone')
        source_workflow = models.Workflow.query.filter_by(
            id=source_workflow_id).first()
        source_workflow.copy(obj)
        db.session.add(obj)
        db.session.flush()
        obj.title = source_workflow.title + ' (copy ' + str(obj.id) + ')'

    db.session.flush()
    # get the personal context for this logged in user
    user = get_current_user()
    personal_context = _get_or_create_personal_context(user)
    context = obj.build_object_context(
        context=personal_context,
        name='{object_type} Context {timestamp}'.format(
            object_type=service.model.__name__, timestamp=datetime.now()),
        description='',
    )
    context.modified_by = get_current_user()

    db.session.add(obj)
    db.session.flush()
    db.session.add(context)
    db.session.flush()
    obj.contexts.append(context)
    obj.context = context

    # add a user_roles mapping assigning the user creating the workflow
    # the WorkflowOwner role in the workflow's context.
    workflow_owner_role = _find_role('WorkflowOwner')
    user_role = UserRole(
        person=user,
        role=workflow_owner_role,
        context=context,
        modified_by=get_current_user(),
    )
    db.session.add(
        models.WorkflowPerson(
            person=user,
            workflow=obj,
            context=context,
            modified_by=get_current_user(),
        ))
    # pass along a temporary attribute for logging the events.
    user_role._display_related_title = obj.title
    db.session.add(user_role)
    db.session.flush()

    # Create the context implication for Workflow roles to default context
    db.session.add(
        ContextImplication(
            source_context=context,
            context=None,
            source_context_scope='Workflow',
            context_scope=None,
            modified_by=get_current_user(),
        ))

    if not src.get('private'):
        # Add role implication - all users can read a public workflow
        add_public_workflow_context_implication(context)

    if src.get('clone'):
        source_workflow.copy_task_groups(
            obj,
            clone_people=src.get('clone_people', False),
            clone_tasks=src.get('clone_tasks', False),
            clone_objects=src.get('clone_objects', False))

        if src.get('clone_people'):
            workflow_member_role = _find_role('WorkflowMember')
            for authorization in source_workflow.context.user_roles:
                # Current user has already been added as workflow owner
                if authorization.person != user:
                    db.session.add(
                        UserRole(person=authorization.person,
                                 role=workflow_member_role,
                                 context=context,
                                 modified_by=user))
            for person in source_workflow.people:
                if person != user:
                    db.session.add(
                        models.WorkflowPerson(person=person,
                                              workflow=obj,
                                              context=context))
Пример #56
0
def has_system_wide_read():
    """Check if user has system wide read access to all objects."""
    user = login.get_current_user(use_external_user=False)
    system_wide_role = getattr(user, "system_wide_role",
                               SystemWideRoles.NO_ACCESS)
    return system_wide_role in SystemWideRoles.read_roles
Пример #57
0
 def _start_compute_attributes_job(self, revision_ids):
     if revision_ids:
         cur_user = login.get_current_user()
         deferred.defer(import_helper.calculate_computed_attributes,
                        revision_ids, cur_user.id)
Пример #58
0
 def _update_new_reviewed_by(self):
   """When create new review with state REVIEWED set last_reviewed_by"""
   # pylint: disable=attribute-defined-outside-init
   if self.status == Review.STATES.REVIEWED:
     self.last_reviewed_by = get_current_user()
     self.last_reviewed_at = datetime.datetime.utcnow()
Пример #59
0
def _system_wide_read():
    """Check if user has system wide read access to all objects."""
    user = login.get_current_user()
    system_wide_role = getattr(user, "system_wide_role",
                               SystemWideRoles.NO_ACCESS)
    return system_wide_role in SYSTEM_WIDE_READ_ROLES
Пример #60
0
def run_import_phases(task):
  """Execute import phases"""
  ie_id = task.parameters.get("ie_id")
  user = login.get_current_user()
  try:
    ie_job = import_export.get(ie_id)

    csv_data = import_helper.read_csv_file(
        StringIO(ie_job.content.encode("utf-8"))
    )

    if ie_job.status == "Analysis":
      info = make_import(csv_data, True, ie_job)
      db.session.rollback()
      db.session.refresh(ie_job)
      if ie_job.status == "Stopped":
        return utils.make_simple_response()
      ie_job.results = json.dumps(info)
      for block_info in info:
        if block_info["block_errors"] or block_info["row_errors"]:
          ie_job.status = "Analysis Failed"
          ie_job.end_at = datetime.utcnow()
          db.session.commit()
          job_emails.send_email(job_emails.IMPORT_FAILED, user.email,
                                ie_job.title)
          return utils.make_simple_response()
      for block_info in info:
        if block_info["block_warnings"] or block_info["row_warnings"]:
          ie_job.status = "Blocked"
          db.session.commit()
          job_emails.send_email(job_emails.IMPORT_BLOCKED, user.email,
                                ie_job.title)
          return utils.make_simple_response()
      ie_job.status = "In Progress"
      db.session.commit()

    if ie_job.status == "In Progress":
      info = make_import(csv_data, False, ie_job)
      ie_job.results = json.dumps(info)
      for block_info in info:
        if block_info["block_errors"] or block_info["row_errors"]:
          ie_job.status = "Analysis Failed"
          ie_job.end_at = datetime.utcnow()
          job_emails.send_email(job_emails.IMPORT_FAILED, user.email,
                                ie_job.title)
          db.session.commit()
          return utils.make_simple_response()
      ie_job.status = "Finished"
      ie_job.end_at = datetime.utcnow()
      db.session.commit()
      job_emails.send_email(job_emails.IMPORT_COMPLETED, user.email,
                            ie_job.title)
  except Exception as e:  # pylint: disable=broad-except
    logger.exception(e.message)
    ie_job = import_export.get(ie_id)
    try:
      ie_job.status = "Failed"
      ie_job.end_at = datetime.utcnow()
      db.session.commit()
      job_emails.send_email(job_emails.IMPORT_FAILED, user.email,
                            ie_job.title)
      return utils.make_simple_response(e.message)
    except Exception as e:  # pylint: disable=broad-except
      logger.exception("%s: %s", app_errors.STATUS_SET_FAILED, e.message)
      return utils.make_simple_response(e.message)

  return utils.make_simple_response()