def post(self, story): """Create a new story. :param story: A story within the request body. """ # Reject private story types while ACL is not created. if (story.story_type_id and (story.story_type_id == 3 or story.story_type_id == 4)): abort(400, _("Now you can't add story with type %s.") % story.story_type_id) story_dict = story.as_dict() user_id = request.current_user_id if story.creator_id and story.creator_id != user_id: abort(400, _("You can't select author of story.")) story_dict.update({"creator_id": user_id}) if not stories_api.story_can_create_story(story.story_type_id): abort(400, _("Can't create story of this type.")) if not "tags" in story_dict or not story_dict["tags"]: story_dict["tags"] = [] # We can't set due dates when creating stories at the moment. if "due_dates" in story_dict: del story_dict['due_dates'] created_story = stories_api.story_create(story_dict) events_api.story_created_event(created_story.id, user_id, story.title) return wmodels.Story.from_db_model(created_story)
def _connect(self): """This method connects to RabbitMQ, establishes a channel, declares the storyboard exchange if it doesn't yet exist, and executes any post-connection hooks that an extending class may have registered. """ # If the closing flag is set, just exit. if self._closing: return # If a timer is set, kill it. if self._timer: LOG.debug(_('Clearing timer...')) self._timer.cancel() self._timer = None # Create the connection LOG.info(_LI('Connecting to %s'), self._connection_parameters.host) self._connection = pika.BlockingConnection(self._connection_parameters) # Create a channel LOG.debug(_('Creating a new channel')) self._channel = self._connection.channel() self._channel.confirm_delivery() # Declare the exchange LOG.debug(_('Declaring exchange %s'), self._exchange_name) self._channel.exchange_declare(exchange=self._exchange_name, exchange_type='topic', durable=True, auto_delete=False) # Set the open flag and execute any connection hooks. self._open = True self._execute_open_hooks()
def put(self, milestone_id, milestone): """Modify this milestone. :param milestone_id: An ID of the milestone. :param milestone: A milestone within the request body. """ milestone_dict = milestone.as_dict(omit_unset=True) if milestone.expiration_date: abort(400, _("Can't change expiration date.")) if "expired" in six.iterkeys(milestone_dict): if milestone_dict["expired"]: milestone_dict["expiration_date"] = datetime.now(tz=pytz.utc) else: milestone_dict["expiration_date"] = None if milestone.branch_id: original_milestone = milestones_api.milestone_get(milestone_id) if not original_milestone: raise exc.NotFound(_("Milestone %s not found") % milestone_id) if milestone.branch_id != original_milestone.branch_id: abort(400, _("You can't associate milestone %s " "with another branch.") % milestone_id) result = milestones_api.milestone_update(milestone_id, milestone_dict) if result: return wmodels.Milestone.from_db_model(result) else: raise exc.NotFound(_("Milestone %s not found") % milestone_id)
def post(self, user_id, body): """Create a new access token with assigned refresh token for the given user. :param user_id: The user ID of the user. :param body: The access token. :return: The created access token. """ self._assert_can_access(user_id, body) # Generate a random token if one was not provided. if not body.access_token: body.access_token = six.text_type(uuid.uuid4()) # Token duplication check. dupes = token_api.user_token_get_all(access_token=body.access_token) if dupes: abort(409, _('This token already exist.')) token_dict = body.as_dict() if "refresh_token" in token_dict: del token_dict["refresh_token"] token = token_api.user_token_create(token_dict) if not token: abort(400, _("Can't create access token.")) return self._from_db_model(token)
def post(self, user_id, body): """Create a new access token with assigned refresh token for the given user. :param user_id: The user ID of the user. :param body: The access token. :return: The created access token. """ self._assert_can_access(user_id, body) # Generate a random token if one was not provided. if not body.access_token: body.access_token = six.text_type(uuid.uuid4()) # Token duplication check. dupes = token_api.user_token_get_all( access_token=body.access_token ) if dupes: abort(409, _('This token already exist.')) token_dict = body.as_dict() if "refresh_token" in token_dict: del token_dict["refresh_token"] token = token_api.user_token_create(token_dict) if not token: abort(400, _("Can't create access token.")) return self._from_db_model(token)
def post(self, id, item_id, item_type, list_position): """Add an item to a worklist. :param id: The ID of the worklist. :param item_id: The ID of the item. :param item_type: The type of the item (i.e. "story" or "task"). :param list_position: The position in the list to add the item. """ user_id = request.current_user_id if not worklists_api.editable_contents(worklists_api.get(id), user_id): raise exc.NotFound(_("Worklist %s not found") % id) item = None if item_type == 'story': item = stories_api.story_get( item_id, current_user=request.current_user_id) elif item_type == 'task': item = tasks_api.task_get( item_id, current_user=request.current_user_id) if item is None: raise exc.NotFound(_("Item %s refers to a non-existent task or " "story.") % item_id) worklists_api.add_item( id, item_id, item_type, list_position, current_user=request.current_user_id) return wmodels.WorklistItem.from_db_model( worklists_api.get_item_at_position(id, list_position))
def story_remove_tag(story_id, tag_name, current_user=None): session = api_base.get_session() with session.begin(subtransactions=True): story = story_get_simple(story_id, session=session, current_user=current_user) if not story: raise exc.NotFound( _("%(name)s %(id)s not found") % { 'name': "Story", 'id': story_id }) if tag_name not in [t.name for t in story.tags]: raise exc.NotFound( _("The Story %(story_id)d has " "no tag %(tag)s") % { 'story_id': story_id, 'tag': tag_name }) tag = [t for t in story.tags if t.name == tag_name][0] story.tags.remove(tag) session.add(story) session.expunge(story)
def ssh_execute(ssh, cmd, process_input=None, addl_env=None, check_exit_code=True): LOG.debug('Running cmd (SSH): %s', cmd) if addl_env: raise InvalidArgumentError(_('Environment not supported over SSH')) if process_input: # This is (probably) fixable if we need it... raise InvalidArgumentError(_('process_input not supported over SSH')) stdin_stream, stdout_stream, stderr_stream = ssh.exec_command(cmd) channel = stdout_stream.channel # NOTE(justinsb): This seems suspicious... # ...other SSH clients have buffering issues with this approach stdout = stdout_stream.read() stderr = stderr_stream.read() stdin_stream.close() exit_status = channel.recv_exit_status() # exit_status == -1 if no exit code was returned if exit_status != -1: LOG.debug('Result was %s' % exit_status) if check_exit_code and exit_status != 0: raise ProcessExecutionError(exit_code=exit_status, stdout=stdout, stderr=stderr, cmd=cmd) return (stdout, stderr)
def put(self, id, worklist): """Modify this worklist. :param id: The ID of the worklist. :param worklist: A worklist within the request body. """ user_id = request.current_user_id if not worklists_api.editable(worklists_api.get(id), user_id): raise exc.NotFound(_("Worklist %s not found") % id) # We don't use this endpoint to update the worklist's contents if worklist.items != wtypes.Unset: del worklist.items # We don't use this endpoint to update the worklist's filters either if worklist.filters != wtypes.Unset: del worklist.filters worklist_dict = worklist.as_dict(omit_unset=True) updated_worklist = worklists_api.update(id, worklist_dict) if worklists_api.visible(updated_worklist, user_id): worklist_model = wmodels.Worklist.from_db_model(updated_worklist) worklist_model.resolve_items(updated_worklist) worklist_model.resolve_permissions(updated_worklist) return worklist_model else: raise exc.NotFound(_("Worklist %s not found"))
def story_add_tag(story_id, tag_name, current_user=None): session = api_base.get_session() with session.begin(subtransactions=True): # Get a tag or create a new one tag = story_tags.tag_get_by_name(tag_name, session=session) if not tag: tag = story_tags.tag_create({"name": tag_name}) story = story_get_simple(story_id, session=session, current_user=current_user) if not story: raise exc.NotFound( _("%(name)s %(id)s not found") % { 'name': "Story", 'id': story_id }) if tag_name in [t.name for t in story.tags]: raise exc.DBDuplicateEntry( _("The Story %(id)d already has a tag %(tag)s") % { 'id': story_id, 'tag': tag_name }) story.tags.append(tag) session.add(story) session.expunge(story)
def put(self, branch_id, branch): """Modify this branch. :param branch_id: An ID of the branch. :param branch: A branch within the request body. """ branch_dict = branch.as_dict(omit_unset=True) if "expiration_date" in six.iterkeys(branch_dict): abort(400, _("Can't change expiration date.")) if "expired" in six.iterkeys(branch_dict): if branch_dict["expired"]: branch_dict["expiration_date"] = datetime.now(tz=pytz.utc) else: branch_dict["expiration_date"] = None if branch.project_id: original_branch = branches_api.branch_get(branch_id) if not original_branch: raise exc.NotFound(_("Branch %s not found") % branch_id) if branch.project_id != original_branch.project_id: abort(400, _("You can't associate branch %s " "with another project.") % branch_id) result = branches_api.branch_update(branch_id, branch_dict) if result: return wmodels.Branch.from_db_model(result) else: raise exc.NotFound(_("Branch %s not found") % branch_id)
def put(self, id, item_id, list_position, list_id=None, display_due_date=None): """Update a WorklistItem. This method also updates the positions of other items in affected worklists, if necessary. :param id: The ID of the worklist. :param item_id: The ID of the worklist_item to be moved. :param display_due_date: The ID of the due date displayed on the item. """ user_id = request.current_user_id if not worklists_api.editable_contents(worklists_api.get(id), user_id): raise exc.NotFound(_("Worklist %s not found") % id) if worklists_api.get_item_by_id(item_id) is None: raise exc.NotFound(_("Item %s seems to have been deleted, " "try refreshing your page.") % item_id) worklists_api.move_item(id, item_id, list_position, list_id) if display_due_date is not None: if display_due_date == -1: display_due_date = None update_dict = { 'display_due_date': display_due_date } worklists_api.update_item(item_id, update_dict) updated = worklists_api.get_item_by_id(item_id) result = wmodels.WorklistItem.from_db_model(updated) result.resolve_due_date(updated) return result
def project_group_add_project(project_group_id, project_id): session = api_base.get_session() with session.begin(subtransactions=True): project_group = _entity_get(project_group_id, session) if project_group is None: raise exc.NotFound(_("%(name)s %(id)s not found") % {'name': "Project Group", 'id': project_group_id}) project = projects.project_get(project_id) if project is None: raise exc.NotFound(_("%(name)s %(id)s not found") % {'name': "Project", 'id': project_id}) if project_id in [p.id for p in project_group.projects]: raise ClientSideError(_("The Project %(id)d is already in " "Project Group %(group_id)d") % {'id': project_id, 'group_id': project_group_id}) project_group.projects.append(project) session.add(project_group) return project_group
def put(self, id, board): """Modify this board. :param id: The ID of the board. :param board: The new board within the request body. """ user_id = request.current_user_id if not boards_api.editable(boards_api.get(id), user_id): raise exc.NotFound(_("Board %s not found") % id) board_dict = board.as_dict(omit_unset=True) update_lanes(board_dict, id) # This is not how we add due dates. if 'due_dates' in board_dict: del board_dict['due_dates'] updated_board = boards_api.update(id, board_dict) if boards_api.visible(updated_board, user_id): board_model = wmodels.Board.from_db_model(updated_board) board_model.resolve_lanes(updated_board) board_model.resolve_permissions(updated_board) return board_model else: raise exc.NotFound(_("Board %s not found") % id)
def __init__(self, stdout=None, stderr=None, exit_code=None, cmd=None, description=None): self.exit_code = exit_code self.stderr = stderr self.stdout = stdout self.cmd = cmd self.description = description if description is None: description = _("Unexpected error while running command.") if exit_code is None: exit_code = '-' message = _('%(description)s\n' 'Command: %(cmd)s\n' 'Exit code: %(exit_code)s\n' 'Stdout: %(stdout)r\n' 'Stderr: %(stderr)r') % { 'description': description, 'cmd': cmd, 'exit_code': exit_code, 'stdout': stdout, 'stderr': stderr } super(ProcessExecutionError, self).__init__(message)
def project_group_delete_project(project_group_id, project_id): session = api_base.get_session() with session.begin(subtransactions=True): project_group = _entity_get(project_group_id, session) if project_group is None: raise exc.NotFound(_("%(name)s %(id)s not found") % {'name': "Project Group", 'id': project_group_id}) project = projects.project_get(project_id) if project is None: raise exc.NotFound(_("%(name)s %(id)s not found") % {'name': "Project", 'id': project_id}) if project_id not in [p.id for p in project_group.projects]: raise ClientSideError(_("The Project %(id)d is not in " "Project Group %(group_id)d") % {'id': project_id, 'group_id': project_group_id}) project_entry = [p for p in project_group.projects if p.id == project_id][0] project_group.projects.remove(project_entry) session.add(project_group) return project_group
def add_item(worklist_id, item_id, item_type, list_position, current_user=None): worklist = _worklist_get(worklist_id) if worklist is None: raise exc.NotFound(_("Worklist %s not found") % worklist_id) # Check if this item has an archived card in this worklist to restore archived = get_item_by_item_id( worklist, item_type, item_id, archived=True) if archived: update = { 'archived': False, 'list_position': list_position } api_base.entity_update(models.WorklistItem, archived.id, update) return worklist # If this worklist is a lane, check if the item has an archived card # somewhere in the board to restore if is_lane(worklist): board = boards.get_from_lane(worklist) archived = boards.get_card(board, item_type, item_id, archived=True) if archived: update = { 'archived': False, 'list_id': worklist_id, 'list_position': list_position } api_base.entity_update(models.WorklistItem, archived.id, update) return worklist # Create a new card if item_type == 'story': item = stories_api.story_get(item_id, current_user=current_user) elif item_type == 'task': item = tasks_api.task_get(item_id, current_user=current_user) else: raise ClientSideError(_("An item in a worklist must be either a " "story or a task")) if item is None: raise exc.NotFound(_("%(type)s %(id)s not found") % {'type': item_type, 'id': item_id}) item_dict = { 'list_id': worklist_id, 'item_id': item_id, 'item_type': item_type, 'list_position': list_position } worklist_item = api_base.entity_create(models.WorklistItem, item_dict) if worklist.items is None: worklist.items = [worklist_item] else: worklist.items.append(worklist_item) return worklist
def inner(*args, **kwargs): try: with lock(name, lock_file_prefix, external, lock_path): LOG.debug(_('Got semaphore / lock "%(function)s"'), {'function': f.__name__}) return f(*args, **kwargs) finally: LOG.debug(_('Semaphore / lock released "%(function)s"'), {'function': f.__name__})
def put(self, id, due_date): """Modify a due date. :param id: The ID of the due date to edit. :param due_date: The new due date within the request body. """ if not due_dates_api.assignable(due_dates_api.get(id), request.current_user_id): raise exc.NotFound(_("Due date %s not found") % id) original_due_date = due_dates_api.get(id) due_date_dict = due_date.as_dict(omit_unset=True) editing = any(prop in due_date_dict for prop in ("name", "date", "private")) if editing and not due_dates_api.editable(original_due_date, request.current_user_id): raise exc.NotFound(_("Due date %s not found") % id) if due_date.creator_id and due_date.creator_id != original_due_date.creator_id: abort(400, _("You can't select the creator of a due date.")) if "tasks" in due_date_dict: tasks = due_date_dict.pop("tasks") db_tasks = [] for task in tasks: db_tasks.append(tasks_api.task_get(task.id, current_user=request.current_user_id)) due_date_dict["tasks"] = db_tasks if "stories" in due_date_dict: stories = due_date_dict.pop("stories") db_stories = [] for story in stories: db_stories.append(stories_api.story_get_simple(story.id, current_user=request.current_user_id)) due_date_dict["stories"] = db_stories board = None worklist = None if "board_id" in due_date_dict: board = boards_api.get(due_date_dict["board_id"]) if "worklist_id" in due_date_dict: worklist = worklists_api.get(due_date_dict["worklist_id"]) updated_due_date = due_dates_api.update(id, due_date_dict) if board: updated_due_date.boards.append(board) if worklist: updated_due_date.worklists.append(worklist) if due_dates_api.visible(updated_due_date, request.current_user_id): due_date_model = wmodels.DueDate.from_db_model(updated_due_date) due_date_model.resolve_items(updated_due_date) due_date_model.resolve_permissions(updated_due_date, request.current_user_id) return due_date_model else: raise exc.NotFound(_("Due date %s not found") % id)
def _publish(self, payload): """Publishes a payload to the passed exchange. If it encounters a failure, will store the payload for later. :param Payload payload: The payload to send. """ LOG.debug( _("Sending message to %(name)s [%(topic)s]") % { 'name': self._exchange_name, 'topic': payload.topic }) # First check, are we closing? if self._closing: LOG.warning(_LW("Cannot send message, publisher is closing.")) if payload not in self._pending: self._pending.append(payload) return # Second check, are we open? if not self._open: LOG.debug(_("Cannot send message, publisher is connecting.")) if payload not in self._pending: self._pending.append(payload) self._reconnect() return # Third check, are we in a sane state? This should never happen, # but just in case... if not self._connection or not self._channel: LOG.error( _LE("Cannot send message, publisher is " "an unexpected state.")) if payload not in self._pending: self._pending.append(payload) self._reconnect() return # Try to send a message. If we fail, schedule a reconnect and store # the message. try: self._channel.basic_publish( self._exchange_name, payload.topic, json.dumps(payload.payload, ensure_ascii=False), self._properties) if payload in self._pending: self._pending.remove(payload) return True except ConnectionClosed as cc: LOG.warning(_LW("Attempted to send message on closed connection.")) LOG.debug(cc) self._open = False if payload not in self._pending: self._pending.append(payload) self._reconnect() return False
def project_group_delete(project_group_id): project_group = project_group_get(project_group_id) if not project_group: raise exc.NotFound(_('Project group not found.')) if len(project_group.projects) > 0: raise exc.NotEmpty(_('Project group must be empty.')) api_base.entity_hard_delete(models.ProjectGroup, project_group_id)
def team_delete(team_id): team = team_get(team_id) if not team: raise exc.NotFound(_('Team not found.')) if len(team.users) > 0: raise exc.NotEmpty(_('Team must be empty.')) api_base.entity_hard_delete(models.Team, team_id)
def _publish(self, payload): """Publishes a payload to the passed exchange. If it encounters a failure, will store the payload for later. :param Payload payload: The payload to send. """ LOG.debug(_("Sending message to %(name)s [%(topic)s]") % {'name': self._exchange_name, 'topic': payload.topic}) # First check, are we closing? if self._closing: LOG.warning(_LW("Cannot send message, publisher is closing.")) if payload not in self._pending: self._pending.append(payload) return # Second check, are we open? if not self._open: LOG.debug(_("Cannot send message, publisher is connecting.")) if payload not in self._pending: self._pending.append(payload) self._reconnect() return # Third check, are we in a sane state? This should never happen, # but just in case... if not self._connection or not self._channel: LOG.error(_LE("Cannot send message, publisher is " "an unexpected state.")) if payload not in self._pending: self._pending.append(payload) self._reconnect() return # Try to send a message. If we fail, schedule a reconnect and store # the message. try: self._channel.basic_publish(self._exchange_name, payload.topic, json.dumps(payload.payload, ensure_ascii=False), self._properties) if payload in self._pending: self._pending.remove(payload) return True except ConnectionClosed as cc: LOG.warning(_LW("Attempted to send message on closed connection.")) LOG.debug(cc) self._open = False if payload not in self._pending: self._pending.append(payload) self._reconnect() return False
def task_is_valid_put(task, original_task): """Check that task can be update. """ # Check that creator_id of task can't be changed. if task.creator_id and task.creator_id != original_task.creator_id: abort(400, _("You can't change author of task.")) # Set project_id if it isn't in request. if not task.project_id: task.project_id = original_task.project_id # Set branch_id if it isn't in request. if not task.branch_id: task.branch_id = original_task.branch_id # Check that branch is valid for this task. If project_id was changed, # task will be associated with master branch of this project, because # client doesn't support branches. if task.project_id == original_task.project_id: branch_is_valid(task) else: task.branch_id = branches_api.branch_get_master_branch( task.project_id ).id # Check that task ready to associate with milestone if milestone_id in # request. if task.milestone_id: if original_task.status != 'merged' and task.status != 'merged': abort(400, _("Milestones can only be associated with merged tasks")) if (original_task.status == 'merged' and task.status and task.status != 'merged'): abort(400, _("Milestones can only be associated with merged tasks")) elif 'milestone_id' in task.as_dict(omit_unset=True): return task # Set milestone id if task status isn't 'merged' or if original task # has milestone_id. if task.status and task.status != 'merged': task.milestone_id = None elif not task.milestone_id and original_task.milestone_id: task.milestone_id = original_task.milestone_id # Check that milestone is valid for this task. if task.milestone_id: milestone_is_valid(task) return task
def delete(self, id, item_id): """Remove an item from a worklist. :param id: The ID of the worklist. :param item_id: The ID of the worklist item to be removed. """ user_id = request.current_user_id if not worklists_api.editable_contents(worklists_api.get(id), user_id): raise exc.NotFound(_("Worklist %s not found") % id) item = worklists_api.get_item_by_id(item_id) if item is None: raise exc.NotFound(_("Item %s seems to have already been deleted," " try refreshing your page.") % item_id) worklists_api.update_item(item_id, {"archived": True})
def get_one(self, story_id, task_id): """Retrieve details about one task. :param story_id: An ID of the story. :param task_id: An ID of the task. """ task = tasks_api.task_get(task_id) if task: if task.story_id != story_id: abort(400, _("URL story_id and task.story_id do not match")) return wmodels.Task.from_db_model(task) else: raise exc.NotFound(_("Task %s not found") % task_id)
def get_one(self, story_id, task_id): """Retrieve details about one task. :param story_id: An ID of the story. :param task_id: An ID of the task. """ task = tasks_api.task_get( task_id, current_user=request.current_user_id) if task: if task.story_id != story_id: abort(400, _("URL story_id and task.story_id do not match")) return wmodels.Task.from_db_model(task) else: raise exc.NotFound(_("Task %s not found") % task_id)
def _get_not_supported_column(col_name_col_instance, column_name): try: column = col_name_col_instance[column_name] except KeyError: msg = _("Please specify column %s in col_name_col_instance " "param. It is required because column has unsupported " "type by sqlite).") raise ColumnError(msg % column_name) if not isinstance(column, Column): msg = _("col_name_col_instance param has wrong type of " "column instance for column %s It should be instance " "of sqlalchemy.Column.") raise ColumnError(msg % column_name) return column
def get_all(self, story_id=None, event_type=None, marker=None, offset=None, limit=None, sort_field=None, sort_dir=None): """Retrieve all events that have happened under specified story. :param story_id: Filter events by story ID. :param event_type: A selection of event types to get. :param marker: The resource id where the page should begin. :param offset: The offset to start the page at. :param limit: The number of events to retrieve. :param sort_field: The name of the field to sort on. :param sort_dir: Sort direction for results (asc, desc). """ # Boundary check on limit. if limit is not None: limit = max(0, limit) # Sanity check on event types. if event_type: for r_type in event_type: if r_type not in event_types.ALL: msg = _('Invalid event_type requested. Event type must be ' 'one of the following: %s') msg = msg % (', '.join(event_types.ALL), ) abort(400, msg) # Resolve the marker record. marker_event = None if marker is not None: marker_event = events_api.event_get(marker) event_count = events_api.events_get_count(story_id=story_id, event_type=event_type) events = events_api.events_get_all(story_id=story_id, event_type=event_type, marker=marker_event, offset=offset, limit=limit, sort_field=sort_field, sort_dir=sort_dir) # Apply the query response headers. if limit: response.headers['X-Limit'] = str(limit) response.headers['X-Total'] = str(event_count) if marker_event: response.headers['X-Marker'] = str(marker_event.id) if offset is not None: response.headers['X-Offset'] = str(offset) return [ wmodels.TimeLineEvent.resolve_event_values( wmodels.TimeLineEvent.from_db_model(event)) for event in events ]
def paginate_query(query, model, limit, sort_key, marker=None, offset=None, sort_dir=None, sort_dirs=None): if offset is not None: # If we are doing offset-based pagination, don't set a # limit or a marker. # FIXME: Eventually the webclient should be smart enough # to do marker-based pagination, at which point this will # be unnecessary. start, end = (offset, offset + limit) limit, marker = (None, None) try: sorted_query = utils_paginate_query(query=query, model=model, limit=limit, sort_keys=[sort_key], marker=marker, sort_dir=sort_dir, sort_dirs=sort_dirs) if offset is not None: return sorted_query.slice(start, end) return sorted_query except ValueError as ve: raise exc.DBValueError(message=str(ve)) except InvalidSortKey: raise exc.DBInvalidSortKey(_("Invalid sort_field [%s]") % sort_key)
class DBMigrationError(DBException): """Migration error exception This exception wraps the same exception from database. """ message = _("migrations could not be completed successfully")
class DBValueError(DBException): """Value error exception This exception wraps standard ValueError exception. """ message = _("Unknown value.")
def acquire(self): basedir = os.path.dirname(self.fname) if not os.path.exists(basedir): fileutils.ensure_tree(basedir) LOG.info(_LI('Created lock path: %s'), basedir) self.lockfile = open(self.fname, 'w') while True: try: # Using non-blocking locks since green threads are not # patched to deal with blocking locking calls. # Also upon reading the MSDN docs for locking(), it seems # to have a laughable 10 attempts "blocking" mechanism. self.trylock() LOG.debug('Got file lock "%s"', self.fname) return True except IOError as e: if e.errno in (errno.EACCES, errno.EAGAIN): # external locks synchronise things like iptables # updates - give it some time to prevent busy spinning time.sleep(0.01) else: raise threading.ThreadError(_("Unable to acquire lock on" " `%(filename)s` due to" " %(exception)s") % { 'filename': self.fname, 'exception': e, })
def acquire(self): basedir = os.path.dirname(self.fname) if not os.path.exists(basedir): fileutils.ensure_tree(basedir) LOG.info(_LI('Created lock path: %s'), basedir) self.lockfile = open(self.fname, 'w') while True: try: # Using non-blocking locks since green threads are not # patched to deal with blocking locking calls. # Also upon reading the MSDN docs for locking(), it seems # to have a laughable 10 attempts "blocking" mechanism. self.trylock() LOG.debug('Got file lock "%s"', self.fname) return True except IOError as e: if e.errno in (errno.EACCES, errno.EAGAIN): # external locks synchronise things like iptables # updates - give it some time to prevent busy spinning time.sleep(0.01) else: raise threading.ThreadError( _("Unable to acquire lock on" " `%(filename)s` due to" " %(exception)s") % { 'filename': self.fname, 'exception': e, })
class DBDeadLock(DBException): """Deadlock exception This exception wraps the same exception from database. """ message = _("Database in dead lock")
def decorate(self, *args, **kwargs): try: return func(self, *args, **kwargs) except exc.OAuthException as o_exc: # Extract the parameters error = o_exc.error error_description = o_exc.msg or _("No details available.") # If we have a redirect URL, build the error redirect. if o_exc.redirect_uri: # Split the redirect_url apart parts = urlparse(o_exc.redirect_uri) # Add the error and error_description if parts.query: params = urlparse.parse_qsl(parts.query) else: params = [] params.append(('error', error)) params.append(('error_description', error_description)) # Overwrite the old query params and reconstruct the URL parts_list = list(parts) parts_list[4] = urlencode(params) location = urlunparse(parts_list) redirect(location) else: error_body = { 'error': error, 'error_description': error_description } response.json = error_body abort(o_exc.code, error_description, json_body=error_body)
def inner_func(*args, **kwargs): last_log_time = 0 last_exc_message = None exc_count = 0 while True: try: return infunc(*args, **kwargs) except Exception as exc: this_exc_message = six.u(str(exc)) if this_exc_message == last_exc_message: exc_count += 1 else: exc_count = 1 # Do not log any more frequently than once a minute unless # the exception message changes cur_time = int(time.time()) if (cur_time - last_log_time > 60 or this_exc_message != last_exc_message): logging.exception( _('Unexpected exception occurred %d time(s)... ' 'retrying.') % exc_count) last_log_time = cur_time last_exc_message = this_exc_message exc_count = 0 # This should be a very rare event. In case it isn't, do # a sleep. time.sleep(1)
def subscribe(): try: log.register_options(CONF) except cfg.ArgsAlreadyParsedError: pass log.setup(CONF, 'storyboard') CONF(project='storyboard') CONF.register_opts(NOTIFICATION_OPTS, "notifications") subscriber = Subscriber(CONF.notifications) subscriber.start() manager = enabled.EnabledExtensionManager( namespace='storyboard.plugin.worker', check_func=check_enabled, invoke_on_load=True, invoke_args=(CONF,) ) while subscriber.started: (method, properties, body) = subscriber.get() if not method or not properties: LOG.debug(_("No messages available, sleeping for 5 seconds.")) time.sleep(5) continue manager.map(handle_event, body) # Ack the message subscriber.ack(method.delivery_tag)
def deprecated(self, msg, *args, **kwargs): stdmsg = _("Deprecated: %s") % msg if CONF.fatal_deprecations: self.critical(stdmsg, *args, **kwargs) raise DeprecatedConfig(msg=stdmsg) else: self.warn(stdmsg, *args, **kwargs)
def post(self, story_id, task): """Create a new task. :param story_id: An ID of the story. :param task: a task within the request body. """ if not task.story_id: task.story_id = story_id if task.story_id != story_id: abort(400, _("URL story_id and task.story_id do not match")) task = task_is_valid_post(task) creator_id = request.current_user_id task.creator_id = creator_id # We can't set due dates when creating tasks at the moment. task_dict = task.as_dict() if "due_dates" in task_dict: del task_dict['due_dates'] created_task = tasks_api.task_create(task_dict) events_api.task_created_event(story_id=task.story_id, task_id=created_task.id, task_title=created_task.title, author_id=creator_id) return wmodels.Task.from_db_model(created_task)
def db_sync(abs_path, version=None, init_version=0): """Upgrade or downgrade a database. Function runs the upgrade() or downgrade() functions in change scripts. :param abs_path: Absolute path to migrate repository. :param version: Database will upgrade/downgrade until this version. If None - database will update to the latest available version. :param init_version: Initial database version """ if version is not None: try: version = int(version) except ValueError: raise exception.DbMigrationError( message=_("version should be an integer")) current_version = db_version(abs_path, init_version) repository = _find_migrate_repo(abs_path) if version is None or version > current_version: return versioning_api.upgrade(get_engine(), repository, version) else: return versioning_api.downgrade(get_engine(), repository, version)
def __exit__(self, exc_type, exc_val, exc_tb): try: self.unlock() self.lockfile.close() except IOError: LOG.exception(_("Could not release the acquired lock `%s`"), self.fname)
def put(self, user_id, user): """Modify this user. :param user_id: Unique id to identify the user. :param user: A user within the request body. """ current_user = users_api.user_get(request.current_user_id) # Only owners and superadmins are allowed to modify users. if request.current_user_id != user_id \ and not current_user.is_superuser: abort(403, _("You are not allowed to update this user.")) # Strip out values that you're not allowed to change. user_dict = user.as_dict(omit_unset=True) if not current_user.is_superuser: # Only superuser may create superusers or modify login permissions. if 'enable_login' in six.iterkeys(user_dict): del user_dict['enable_login'] if 'is_superuser' in six.iterkeys(user_dict): del user_dict['is_superuser'] updated_user = users_api.user_update(user_id, user_dict) return wmodels.User.from_db_model(updated_user)
def delete(self, id, item_id): """Remove an item from a worklist. :param id: The ID of the worklist. :param item_id: The ID of the worklist item to be removed. """ user_id = request.current_user_id if not worklists_api.editable_contents(worklists_api.get(id), user_id): raise exc.NotFound(_("Worklist %s not found") % id) item = worklists_api.get_item_by_id(item_id) if item is None: raise exc.NotFound(_("Item %s seems to have already been deleted," " try refreshing your page.") % item_id) worklists_api.update_item(item_id, {'archived': True})
def put(self, user_id, access_token_id, body): """Update an access token for the given user. :param user_id: The user ID of the user. :param access_token_id: The ID of the access token. :param body: The access token. :return: The created access token. """ target_token = token_api.user_token_get(access_token_id) self._assert_can_access(user_id, body) self._assert_can_access(user_id, target_token) if not target_token: abort(404, _("Token %s not found.") % access_token_id) # We only allow updating the expiration date. target_token.expires_in = body.expires_in token_dict = target_token.as_dict() if "refresh_token" in token_dict: del token_dict["refresh_token"] result_token = token_api.user_token_update(access_token_id, token_dict) return self._from_db_model(result_token)
class DBInvalidSortKey(DBException): """Invalid sortkey error exception This exception wraps the same exception from database. """ message = _("Invalid sort field")
def get(self, worklist_id): """Get items inside a worklist. :param worklist_id: The ID of the worklist. """ worklist = worklists_api.get(worklist_id) user_id = request.current_user_id if not worklist or not worklists_api.visible(worklist, user_id): raise exc.NotFound(_("Worklist %s not found") % worklist_id) if worklist.automatic: return [wmodels.WorklistItem(**item) for item in worklists_api.filter_items(worklist)] if worklist.items is None: return [] worklist.items.sort(key=lambda i: i.list_position) visible_items = worklists_api.get_visible_items( worklist, current_user=request.current_user_id) return [ wmodels.WorklistItem.from_db_model(item) for item in visible_items ]
def entity_hard_delete(kls, entity_id, session=None): if not session: session = get_session() try: with session.begin(subtransactions=True): query = model_query(kls, session) entity = query.filter_by(id=entity_id).first() if entity is None: raise exc.NotFound( _("%(name)s %(id)s not found") % { 'name': kls.__name__, 'id': entity_id }) session.delete(entity) except db_exc.DBReferenceError as re: raise exc.DBReferenceError(object_name=kls.__name__, value=re.constraint, key=re.key) except db_exc.DBConnectionError: raise exc.DBConnectionError() except db_exc.ColumnError: raise exc.ColumnError() except db_exc.DBDeadlock: raise exc.DBDeadLock() except db_exc.DBInvalidUnicodeParameter: raise exc.DBInvalidUnicodeParameter()