def create_table(env, table, conn=None): """ Creates a the given table in the given environment. The Table has to be of type trac.db.Table, and the Environment a trac.env.Environment. """ assert isinstance(env, Environment), \ "[DB]: env should be an instance of trac.env.Environment, got %s" % type(env) assert isinstance(table, Table), \ "[DB]: table should be an instance of trac.sb.Table, got %s" % type(table) # Get The Databse Manager dbm = DatabaseManager(env) # Get the Connector Object for the current DB schema connector, args = dbm._get_connector() # Ask the connector to generate the proper DDL for the table ddl_gen = connector.to_sql(table) # Get a DB Connection from the pool, create a cursor and the table conn, handle_ta = get_db_for_write(env, conn) try: cursor = conn.cursor() for statement in ddl_gen: debug(env, "[DB]: Table: %s\n%s" % (table.name, statement)) cursor.execute(statement) if handle_ta: conn.commit() debug(env, "[DB]: Successfully Created Table %s" % table.name) except Exception, e: if handle_ta: conn.rollback() error(env, "[DB]: Unable to Create Table %s, an error occurred: %s" % \ (table.name, exception_to_unicode(e))) raise
def process_request(self, req): call_handler = False if req.method in ('PUT', 'DELETE', 'POST'): if self._contains_data(req): http_body = req.read() data = dict(req.args) body_data = self._load_json_data(http_body) if body_data is not None: # REFACT: consider to make the whole body available under a special key # so we can send other types than dictionaries directly to the server and so # we can distinguish between parameters from the url and parameters that where # sent from the body without reparsing it. (not sure if that would even be possible) data.update(body_data) call_handler = True else: # AT: we need to take even with data 0 cause the command # Get on /json/<models>/ is valid, has to return the list # of models data = req.args call_handler = (len(data) >= 0) if call_handler: code = 200 try: response = self.get_handler(req)(req, data) if isinstance(response, tuple): response, code = response self.respond(req, response, code=code) except Exception, e: if isinstance(e, RequestDone): raise msg = exception_to_unicode(e) log.error(self, msg) self.error_response(req, {}, [msg])
def _save(self, timestamp, value, update=False, db=None): """Saves a remaining time value to the database. The update parameter decides if the value should be updated (True) or inserted (False)""" params = { Key.TABLE : BURNDOWN_TABLE, Key.TASK_ID : self.task.id, Key.DATE : timestamp, Key.REMAINING_TIME : value, } if update: sql_query = "UPDATE %(table)s SET remaining_time=%(remaining_time)d " \ "WHERE task_id=%(task_id)d AND date=%(date)f" % params else: sql_query = "INSERT INTO %(table)s (task_id, date, remaining_time) " \ "VALUES (%(task_id)s, %(date)s, %(remaining_time)s)" % params db, handle_ta = get_db_for_write(self.env, db) try: cursor = db.cursor() cursor.execute(sql_query) if handle_ta: db.commit() debug(self, "DB Committed, saved remaining time (%s) for task %d" % \ (params[Key.REMAINING_TIME], self.task.id)) except Exception, e: error(self, to_unicode(e)) if handle_ta: db.rollback() raise TracError("Error while saving remaining time: %s" % \ to_unicode(e))
def save(self, db=None): """saves the object to the database""" db, handle_ta = get_db_for_write(self.env, db) try: for o_day, hours in self._calendar.items(): entry = self.ce_manager.get(date=o_day, teammember=self.team_member, db=db) if not entry: entry = self.ce_manager.create(date=o_day, teammember=self.team_member, db=db) # Save only exceptions if hours != self.team_member.capacity[datetime.fromordinal(o_day).date().weekday()]: entry.hours = hours self.ce_manager.save(entry, db=db) elif entry.exists: # we don't need it anymore self.ce_manager.delete(entry, db=db) if handle_ta: db.commit() # Invalidate the Chart generator cache cause the capacity may be changed from agilo.charts import ChartGenerator ChartGenerator(self.env).invalidate_cache() return True except Exception, e: error(self, _("An error occurred while saving Calendar Entry: %s" % to_unicode(e))) if handle_ta: db.rollback() raise
def parse_calculated_field(configstring, component=None): result = None if configstring != None and configstring.strip() != "": match = CALCULATE.match(configstring) if match: field_name = match.group('name') operator_name = match.group('operator') values = match.group('values') conditions = match.group('conditions') if operator_name in operators: operator = operators[operator_name] try: result = (field_name, operator(values, condition_string=conditions)) if component: base_msg = u"Setting calculated property: %s => %s:%s|%s" msg = base_msg % (field_name, operator_name, values, conditions) debug(component, msg) except AgiloConfigSyntaxError, e: if component: msg = u"Error while parsing calculated property '%s': %s" error(component, msg % (field_name, unicode(e))) else: if component: error(component, u"Unkown operator name '%s'" % field_name)
def save(self, db=None): """saves the object to the database""" db, handle_ta = get_db_for_write(self.env, db) try: for o_day, hours in self._calendar.items(): entry = self.ce_manager.get(date=o_day, teammember=self.team_member, db=db) if not entry: entry = self.ce_manager.create(date=o_day, teammember=self.team_member, db=db) # Save only exceptions if hours != self.team_member.capacity[datetime.fromordinal( o_day).date().weekday()]: entry.hours = hours self.ce_manager.save(entry, db=db) elif entry.exists: # we don't need it anymore self.ce_manager.delete(entry, db=db) if handle_ta: db.commit() # Invalidate the Chart generator cache cause the capacity may be changed from agilo.charts import ChartGenerator ChartGenerator(self.env).invalidate_cache() return True except Exception, e: error( self, _("An error occurred while saving Calendar Entry: %s" % to_unicode(e))) if handle_ta: db.rollback() raise
def _get_upload_file_size(self, upload_fp): if hasattr(upload_fp, 'fileno'): try: size = os.fstat(upload_fp.fileno())[6] except Exception, e: msg = _("Can't get size of uploaded file because of %s") % e error(self, msg) return None
def check_team_membership(self, ticket, sprint, person, is_owner=False): from trac.ticket.api import TicketSystem if person not in [None, '', TicketSystem.default_owner.default]: err_string = is_owner and 'owner' or 'resource' tmmm = TeamMemberModelManager(self.env) teammember = tmmm.get(name=person) sprint_team_name = sprint.team.name if (teammember == None) or (teammember.team == None) or \ (teammember.team.name != sprint_team_name): name = person error(self, "Rule didn't validate...") msg = _(u"The %s '%s' of ticket #%s doesn't belong to the team '%s' assigned to this sprint.") raise RuleValidationException(msg % (err_string, name, ticket.id, sprint_team_name))
def _add_links_for_ticket(self, req, ticket): try: src_ticket_id = int(req.args['src']) except: pass else: src_ticket = AgiloTicketModelManager(self.env).get(tkt_id=src_ticket_id) if ticket != None and src_ticket != None: if src_ticket.is_link_to_allowed(ticket): return src_ticket.link_to(ticket) else: msg = 'You may not link #%d (Type %s) to #%d (Type %s)' error(self, msg % (src_ticket.id, src_ticket.get_type(), ticket.id, ticket.get_type()))
def check_team_membership(self, ticket, sprint, person, is_owner=False): from trac.ticket.api import TicketSystem if person not in [None, '', TicketSystem.default_owner.default]: err_string = is_owner and 'owner' or 'resource' tmmm = TeamMemberModelManager(self.env) teammember = tmmm.get(name=person) sprint_team_name = sprint.team.name if (teammember == None) or (teammember.team == None) or \ (teammember.team.name != sprint_team_name): name = person error(self, "Rule didn't validate...") msg = _( u"The %s '%s' of ticket #%s doesn't belong to the team '%s' assigned to this sprint." ) raise RuleValidationException( msg % (err_string, name, ticket.id, sprint_team_name))
def process_all_rows(self, req, csvfile, performer): i = 2 errors_during_perform = False msg_line_skipped = _('Line %d had errors, skipped') while True: try: row = csvfile.next() except StopIteration: break except Exception, e: errors_during_perform = True add_warning(req, msg_line_skipped % i) error(self, _("Error while processing CSV line %d: %s") % (i, e)) else: performer.process(row) i += 1
def _by_pos(self, data, key=None, command=None): """ Replaces aliases/types in the given data at the given key position. If the position is not valid, than None is returned """ # Check if there is a double key key1 = key2 = None if isinstance(key, tuple): key1, key2 = key else: key1 = key #debug(self, "[By Pos]: Processing %s, with %s" % (repr(data), key)) if key1 is not None and isinstance(key1, int): # Replace if there is no command left if not command: if isinstance(data, list): data[key1] = self._aliasize(data[key1]) if key2 is not None and isinstance(key2, int): data[key2] = self._aliasize(data[key2]) elif isinstance(data, tuple): # is unmutable we need to rebuild the tuple new_data = list() for i, value in enumerate(data): if i in [key1, key2]: new_data.append(self._aliasize(value)) else: new_data.append(value) data = tuple(new_data) else: acc, key, command = self._parse_command(command) if acc is not None: try: #debug(self, "[By Pos]: calling\nacc: %s\ndata: %s" \ # "\nkey: %s\n\n" % \ # (acc, data[key1], key1)) acc(data[key1], key, command) if key2 is not None: #debug(self, "[By Pos]: calling\nacc: %s\n" \ # "data: %s\nkey: %s\n\n" % \ # (acc, data[key2], key2)) acc(data[key2], key, command) except IndexError, e: error(self, "[By Pos]: Invalid index: %s, %s for data: %s" % \ (key1, key2, repr(data)))
def _add_links_for_ticket(self, req, ticket): try: src_ticket_id = int(req.args['src']) except: pass else: src_ticket = AgiloTicketModelManager( self.env).get(tkt_id=src_ticket_id) if ticket != None and src_ticket != None: if src_ticket.is_link_to_allowed(ticket): return src_ticket.link_to(ticket) else: msg = 'You may not link #%d (Type %s) to #%d (Type %s)' error( self, msg % (src_ticket.id, src_ticket.get_type(), ticket.id, ticket.get_type()))
def _build_story_statistics(self, req, stats_provider, sprint_name): """ Assemble statistics for all stories in the given sprint_name so that the progress bar can be displayed. """ sprint_stats = dict() if not sprint_name: return sprint_stats cmd_get_stories = SprintController.ListTicketsHavingPropertiesCommand(self.env, sprint=sprint_name, properties=[Key.STORY_POINTS]) stories = self.controller.process_command(cmd_get_stories) type_dict = dict() closed_stories = list() inprogress_stories = list() for story in stories: if story[Key.STATUS] == Status.CLOSED: closed_stories.append(story) elif story[Key.STATUS] != Status.NEW: inprogress_stories.append(story) type_dict[story[Key.TYPE]] = True type_names = type_dict.keys() number_open_stories = len(stories) - (len(closed_stories) + \ len(inprogress_stories)) try: stat = TicketGroupStats('User Stories status', 'stories') stat.add_interval(_('Completed'), len(closed_stories), qry_args={Key.STATUS: Status.CLOSED}, css_class='closed', overall_completion=True) stat.add_interval(_('In Progress'), len(inprogress_stories), qry_args={Key.STATUS: Status.ACCEPTED}, css_class='inprogress') stat.add_interval(_('Open'), number_open_stories, qry_args={Key.STATUS: [Status.NEW, Status.REOPENED]}, css_class='open') stat.refresh_calcs() sprint_stats = self._build_sprint_stats_data(req, stat, sprint_name, type_names=type_names) except Exception, e: # The DB is closed? And we don't break for statistics error(stats_provider, "ERROR: %s" % to_unicode(e))
def _load(self, db=None): """Try to load the Sprint from the database""" db, handle_ta = get_db_for_write(self.env, db) sql_query = "SELECT date, remaining_time FROM %s" \ " WHERE task_id=%d ORDER BY date DESC" % (BURNDOWN_TABLE, self.task.id) debug(self, "Burndown-SQL Query: %s" % sql_query) try: history = dict() cursor = db.cursor() cursor.execute(sql_query) for row in cursor.fetchall(): timestamp, remaining_time = row history[timestamp] = remaining_time self.loaded = True except Exception, e: error(self, to_unicode(e)) if handle_ta: db.rollback() raise TracError("An error occurred while loading Burndown data: %s" % to_unicode(e))
def _parse_command(self, command): """ Parses a command and returns a tuple containing (accessor, key, command), where: - accessor: is a callable accessor - key: is the key to pass to the accessor - command: is the remaining part of the command """ def _parse_by_key(key_m, command): """Returns the appropriate tuple to represent a by key accessor""" key1 = key_m.group(1) # may be also an alternative key key2 = key_m.group(3) command = command[key_m.end():] # Before sending the keys, check whether they # are string to be avaluated if key2 is not None: #debug(self, "[Parse Command]: Returning By Key, %s, %s" % \ # ((eval(key1), eval(key2)), command)) return (self._by_key, (eval(key1), eval(key2)), command) else: #debug(self, "[Parse Command]: Returning By Key, %s, %s" % \ # (eval(key1), command)) return (self._by_key, eval(key1), command) def _parse_iterate(iter_m, command): """Returns the appropriate tuple to represent an iterate accessor""" key = iter_m.group(1) cond = iter_m.group(2) command = command[iter_m.end():] if cond is not None: #debug(self, "[Parse Command]: Returning Iterate, %s, %s" % \ # ((eval(key), cond), command)) return (self._iterate, (eval(key), cond), command) else: #debug(self, "[Parse Command]: Returning Iterate, %s, %s" % \ # (eval(key), command)) return (self._iterate, eval(key), command) def _parse_by_pos(pos_m, command): """Returns the appropriate tuple to represent a by pos accessor""" key1 = pos_m.group(1) key2 = pos_m.group(2) command = command[pos_m.end():] if key2 is not None: #debug(self, "[Parse Command]: Returning By Pos, %s, %s" % \ # ((eval(key1), eval(key2)), command)) return (self._by_pos, (eval(key1), eval(key2)), command) else: #debug(self, "[Parse Command]: Returning By Pos, %s, %s" % \ # (eval(key1), command)) return (self._by_pos, eval(key1), command) #debug(self, "[Parse Command]: Called with %s" % command) # With match we get the first key for data dictionary if isinstance(command, basestring) and self.ELEM.match(command): elem_m = self.ELEM.match(command) key = elem_m.group(0) command = command[elem_m.end():] #debug(self, "[Parse Command]: Returning By Key, %s, %s" % # (eval(key), command)) return (self._by_key, eval(key), command) # Look if there is a valid iterator expression, including # condition else: # Given the search behaviour we have to check that there is no # previous match of KEY as well: key_m = self.KEY.search(command) iter_m = self.ITER.search(command) pos_m = self.POS.search(command) # find the minimum position and execute that command pos = dict() for match, parse in [(key_m, _parse_by_key), (iter_m, _parse_iterate), (pos_m, _parse_by_pos)]: if match is not None: pos[match.start()] = (parse, match) # Now get the minimum if any if len(pos) > 0: # at least one parse, match = pos[min(pos.keys())] return parse(match, command) else: error(self, "[Parse Command]: Please provide a string or buffer => %s(%s)" % \ (command, type(command))) return (None, None, None)