def move(self, ticket_id, author, env, delete=False): """ move a ticket to another environment env: environment to move to """ tables = {"attachment": "id", "ticket_change": "ticket"} # open the environment if it is a string if isinstance(env, basestring): base_path, _project = os.path.split(self.env.path) env = open_environment(os.path.join(base_path, env), use_cache=True) # get the old ticket old_ticket = Ticket(self.env, ticket_id) # make a new ticket from the old ticket values new_ticket = Ticket(env) new_ticket.values = old_ticket.values.copy() new_ticket.insert(when=old_ticket.time_created) # copy the changelog and attachment DBs for table, _id in tables.items(): for row in get_all_dict(self.env, "SELECT * FROM %s WHERE %s=%%s" % (table, _id), str(ticket_id)): row[_id] = new_ticket.id insert_row_from_dict(env, table, row) # copy the attachments src_attachment_dir = os.path.join(self.env.path, "attachments", "ticket", str(ticket_id)) if os.path.exists(src_attachment_dir): dest_attachment_dir = os.path.join(env.path, "attachments", "ticket") if not os.path.exists(dest_attachment_dir): os.makedirs(dest_attachment_dir) dest_attachment_dir = os.path.join(dest_attachment_dir, str(new_ticket.id)) shutil.copytree(src_attachment_dir, dest_attachment_dir) # note the previous location on the new ticket new_ticket.save_changes(author, "moved from %s" % self.env.abs_href("ticket", ticket_id)) # location of new ticket new_location = env.abs_href.ticket(new_ticket.id) if delete: old_ticket.delete() else: # close old ticket and point to new one old_ticket["status"] = u"closed" old_ticket["resolution"] = u"moved" old_ticket.save_changes(author, u"moved to %s" % new_location) if env.config["trac"].get("base_url"): return new_location else: return None
def users(self, req): """hours for all users""" data = {'hours_format': hours_format} ### date data self.date_data(req, data) ### milestone data milestone = req.args.get('milestone') milestones = Milestone.select(self.env) data['milestones'] = milestones ### get the hours #trachours = TracHoursPlugin(self.env) #tickets = trachours.tickets_with_hours() hours = get_all_dict( self.env, """ SELECT * FROM ticket_time WHERE time_started >= %s AND time_started < %s """, *[ int(time.mktime(i.timetuple())) for i in (data['from_date'], data['to_date']) ]) worker_hours = {} for entry in hours: worker = entry['worker'] if worker not in worker_hours: worker_hours[worker] = 0 if milestone and milestone != \ Ticket(self.env, entry['ticket']).values.get('milestone'): continue worker_hours[worker] += entry['seconds_worked'] worker_hours = [(worker, seconds / 3600.) for worker, seconds in sorted(worker_hours.items())] data['worker_hours'] = worker_hours if req.args.get('format') == 'csv': req.send(self.export_csv(req, data)) #add_link(req, 'prev', self.get_href(query, args, context.href), # _('Prev Week')) #add_link(req, 'next', self.get_href(query, args, context.href), # _('Next Week')) #prevnext_nav(req, _('Prev Week'), _('Next Week')) return 'hours_users.html', data, "text/html"
def user(self, req, user): """hours page for a single user""" data = {'hours_format': hours_format, 'worker': user} self.date_data(req, data) args = [user] args += [ int(time.mktime(i.timetuple())) for i in (data['from_date'], data['to_date']) ] hours = get_all_dict( self.env, """ SELECT * FROM ticket_time WHERE worker=%s AND time_started >= %s AND time_started < %s """, *args) worker_hours = {} for entry in hours: ticket = entry['ticket'] if ticket not in worker_hours: worker_hours[ticket] = 0 worker_hours[ticket] += entry['seconds_worked'] data['tickets'] = dict([(i, Ticket(self.env, i)) for i in worker_hours.keys()]) # sort by ticket number and convert to hours worker_hours = [(ticket_id, seconds / 3600.) for ticket_id, seconds in sorted(worker_hours.items())] data['worker_hours'] = worker_hours data['total_hours'] = sum([hours[1] for hours in worker_hours]) if req.args.get('format') == 'csv': buffer = StringIO() writer = csv.writer(buffer) format = '%B %d, %Y' title = "Hours for %s" % user writer.writerow([title, req.abs_href()]) writer.writerow([]) writer.writerow(['From', 'To']) writer.writerow( [data[i].strftime(format) for i in 'from_date', 'to_date']) writer.writerow([]) writer.writerow(['Ticket', 'Hours']) for ticket, hours in worker_hours: writer.writerow([ticket, hours]) req.send(buffer.getvalue(), 'text/csv') return 'hours_user.html', data, 'text/html'
def users(self, req): """hours for all users""" data = {'hours_format': hours_format} ### date data self.date_data(req, data) ### milestone data milestone = req.args.get('milestone') milestones = Milestone.select(self.env) data['milestones'] = milestones ### get the hours #trachours = TracHoursPlugin(self.env) #tickets = trachours.tickets_with_hours() hours = get_all_dict(self.env, """ SELECT * FROM ticket_time WHERE time_started >= %s AND time_started < %s """, *[int(time.mktime(i.timetuple())) for i in (data['from_date'], data['to_date'])]) worker_hours = {} for entry in hours: worker = entry['worker'] if worker not in worker_hours: worker_hours[worker] = 0 if milestone and milestone != \ Ticket(self.env, entry['ticket']).values.get('milestone'): continue worker_hours[worker] += entry['seconds_worked'] worker_hours = [(worker, seconds/3600.) for worker, seconds in sorted(worker_hours.items())] data['worker_hours'] = worker_hours if req.args.get('format') == 'csv': req.send(self.export_csv(req, data)) #add_link(req, 'prev', self.get_href(query, args, context.href), # _('Prev Week')) #add_link(req, 'next', self.get_href(query, args, context.href), # _('Next Week')) #prevnext_nav(req, _('Prev Week'), _('Next Week')) return 'hours_users.html', data, "text/html"
def users(self, req): """hours for all users""" data = { 'hours_format' : hours_format } ### date data self.date_data(req, data) ### milestone data milestone = req.args.get('milestone') milestones = Milestone.select(self.env) data['milestones'] = milestones ### get the hours #trachours = TracHoursPlugin(self.env) #tickets = trachours.tickets_with_hours() hours = get_all_dict(self.env, "SELECT * FROM ticket_time WHERE time_started >= %s AND time_started < %s", *[int(time.mktime(i.timetuple())) for i in (data['from_date'], data['to_date'])]) worker_hours = {} for entry in hours: worker = entry['worker'] if worker not in worker_hours: worker_hours[worker] = 0 if milestone: if milestone != Ticket(self.env, entry['ticket']).values.get('milestone'): continue worker_hours[worker] += entry['seconds_worked'] worker_hours = [(worker, seconds/3600.) for worker, seconds in sorted(worker_hours.items())] data['worker_hours'] = worker_hours if req.args.get('format') == 'csv': buffer = StringIO() writer = csv.writer(buffer) format = '%B %d, %Y' title = "Hours for %s" % self.env.project_name writer.writerow([title, req.abs_href()]) writer.writerow([]) writer.writerow(['From', 'To']) writer.writerow([data[i].strftime(format) for i in 'from_date', 'to_date']) if milestone:
def shapefile_upload(self, req): """process uploaded shapefiles""" files = [ 'shp', 'shx', 'dbf', ] errors = False # sanity check for f in files: if not hasattr(req.args[f], 'file'): add_warning(req, "Please specify a %s file" % f) errors = True try: srid = int(req.args['srid']) if srid < 1: raise ValueError except ValueError: add_warning(req, "Please specify an SRID integer") errors = True if errors: return # put files in a temporary directory for processing tempdir = tempfile.mkdtemp() for f in files: shapefile = file(os.path.join(tempdir, 'shapefile.%s' % f), 'w') print >> shapefile, req.args[f].file.read() shapefile.close() # run pgsql command to get generated SQL process = subprocess.Popen(["shp2pgsql", "-s", str(srid), "-g", "the_geom", os.path.join(tempdir, 'shapefile'), 'georegions'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) sql, errors = process.communicate() # remove old table if it exists try: table = get_all_dict(self.env, "SELECT * FROM georegions") except: table = None if table: self.shapefile_delete(req) # load the SQL in the DB db = self.env.get_db_cnx() cur = db.cursor() cur.execute(sql) db.commit() # cleanup temporary directory shutil.rmtree(tempdir)
def render_admin_panel(self, req, category, page, path_info): """Process a request for an admin panel. This function should return a tuple of the form `(template, data)`, where `template` is the name of the template to use and `data` is the data to be passed to the template. """ assert req.perm.has_permission('TRAC_ADMIN') methods = [ 'shapefile_upload', 'shapefile_label', 'shapefile_delete', 'kml_upload' ] # process posted data if req.method == 'POST': for method in methods: if method in req.args: getattr(self, method)(req) # process data for display data = { 'column': self.column, 'column_label': self.column_label, 'drivers': self.drivers(), 'openlayers_url': self.env.config.get('geo', 'openlayers_url') } try: table = get_all_dict(self.env, "SELECT * FROM georegions LIMIT 1") except: table = None if table: data['columns'] = [ c for c in table[0].keys() if c != 'the_geom' ] data['row'] = table[0] else: data['columns'] = None data['row'] = None data['srid'] = self.srid() data['kml'] = req.href('region.kml') if not data['columns']: add_warning(req, "You have not successfully uploaded any shapefiles. Please use the upload form.") elif self.column not in data['columns']: add_warning(req, "You have not selected a column for query and display. Please choose a column.") # return the template and associated data return 'regions_admin.html', data
def user(self, req, user): """hours page for a single user""" data = {'hours_format': hours_format, 'worker': user} self.date_data(req, data) args = [user] args += [int(time.mktime(i.timetuple())) for i in (data['from_date'], data['to_date'])] hours = get_all_dict(self.env, """ SELECT * FROM ticket_time WHERE worker=%s AND time_started >= %s AND time_started < %s """, *args) worker_hours = {} for entry in hours: ticket = entry['ticket'] if ticket not in worker_hours: worker_hours[ticket] = 0 worker_hours[ticket] += entry['seconds_worked'] data['tickets'] = dict([(i, Ticket(self.env, i)) for i in worker_hours.keys()]) # sort by ticket number and convert to hours worker_hours = [(ticket_id, seconds/3600.) for ticket_id, seconds in sorted(worker_hours.items())] data['worker_hours'] = worker_hours data['total_hours'] = sum([hours[1] for hours in worker_hours]) if req.args.get('format') == 'csv': buffer = StringIO() writer = csv.writer(buffer) format = '%B %d, %Y' title = "Hours for %s" % user writer.writerow([title, req.abs_href()]) writer.writerow([]) writer.writerow(['From', 'To']) writer.writerow([data[i].strftime(format) for i in 'from_date', 'to_date']) writer.writerow([])
def locate_ticket(self, ticket): if ticket.id: results = get_all_dict( self.env, "select latitude, longitude from ticket_location where ticket='%s'" % ticket.id ) if results: return ticket["location"], (results[0]["latitude"], results[0]["longitude"]) if ticket["location"] is None or not ticket["location"].strip(): raise GeolocationException # XXX blindly assume UTF-8 try: location = ticket["location"].encode("utf-8") except UnicodeEncodeError: raise location, (lat, lon) = self.geolocate(location) if ticket.id: self.set_location(ticket.id, lat, lon) return location, (lat, lon)
def locate_ticket(self, ticket): if ticket.id: results = get_all_dict( self.env, "select latitude, longitude from ticket_location where ticket='%s'" % ticket.id) if results: return ticket['location'], (results[0]['latitude'], results[0]['longitude']) if ticket['location'] is None or not ticket['location'].strip(): raise GeolocationException # XXX blindly assume UTF-8 try: location = ticket['location'].encode('utf-8') except UnicodeEncodeError: raise location, (lat, lon) = self.geolocate(location) if ticket.id: self.set_location(ticket.id, lat, lon) return location, (lat, lon)
def kml_upload(self, req): """process uploaded KML files""" # sanity check if not hasattr(req.args['kml'], 'file'): add_warning(req, "Please specify a KML file") return # generate PostGIS SQL from the KML sql = self.kml2pgsql(req.args['kml'].file.read()) # remove old table if it exists try: table = get_all_dict(self.env, "SELECT * FROM georegions") except: table = None if table: self.shapefile_delete(req) # load the SQL in the DB db = self.env.get_db_cnx() cur = db.cursor() cur.execute(sql) db.commit()
def move(self, ticket_id, author, env, delete=False): """ move a ticket to another environment env: environment to move to """ tables = {'attachment': 'id', 'ticket_change': 'ticket'} # open the environment if it is a string if isinstance(env, basestring): base_path, _project = os.path.split(self.env.path) env = open_environment(os.path.join(base_path, env), use_cache=True) # get the old ticket old_ticket = Ticket(self.env, ticket_id) # make a new ticket from the old ticket values new_ticket = Ticket(env) new_ticket.values = old_ticket.values.copy() new_ticket.insert(when=old_ticket.time_created) # copy the changelog and attachment DBs for table, _id in tables.items(): for row in get_all_dict( self.env, "SELECT * FROM %s WHERE %s=%%s" % (table, _id), str(ticket_id)): row[_id] = new_ticket.id insert_row_from_dict(env, table, row) # copy the attachments src_attachment_dir = os.path.join(self.env.path, 'attachments', 'ticket', str(ticket_id)) if os.path.exists(src_attachment_dir): dest_attachment_dir = os.path.join(env.path, 'attachments', 'ticket') if not os.path.exists(dest_attachment_dir): os.makedirs(dest_attachment_dir) dest_attachment_dir = os.path.join(dest_attachment_dir, str(new_ticket.id)) shutil.copytree(src_attachment_dir, dest_attachment_dir) # note the previous location on the new ticket new_ticket.save_changes( author, 'moved from %s' % self.env.abs_href('ticket', ticket_id)) # location of new ticket new_location = env.abs_href.ticket(new_ticket.id) if delete: old_ticket.delete() else: # close old ticket and point to new one old_ticket['status'] = u'closed' old_ticket['resolution'] = u'moved' old_ticket.save_changes(author, u'moved to %s' % new_location) if env.config['trac'].get('base_url'): return new_location else: return None
def users(self, req): """hours for all users""" data = { 'hours_format' : hours_format } ### date data self.date_data(req, data) ### milestone data milestone = req.args.get('milestone') milestones = Milestone.select(self.env) data['milestones'] = milestones ### get the hours #trachours = TracHoursPlugin(self.env) #tickets = trachours.tickets_with_hours() hours = get_all_dict(self.env, "SELECT * FROM ticket_time WHERE time_started >= %s AND time_started < %s", *[int(time.mktime(i.timetuple())) for i in (data['from_date'], data['to_date'])]) worker_hours = {} for entry in hours: worker = entry['worker'] if worker not in worker_hours: worker_hours[worker] = 0 if milestone: if milestone != Ticket(self.env, entry['ticket']).values.get('milestone'): continue worker_hours[worker] += entry['seconds_worked'] worker_hours = [(worker, seconds/3600.) for worker, seconds in sorted(worker_hours.items())] data['worker_hours'] = worker_hours if req.args.get('format') == 'csv': buffer = StringIO() writer = csv.writer(buffer) format = '%B %d, %Y' title = "Hours for %s" % self.env.project_name writer.writerow([title, req.abs_href()]) writer.writerow([]) writer.writerow(['From', 'To']) writer.writerow([data[i].strftime(format) for i in 'from_date', 'to_date']) if milestone: writer.writerow(['Milestone', milestone]) writer.writerow([]) writer.writerow(['Worker', 'Hours']) for worker, hours in worker_hours: writer.writerow([worker, hours]) req.send(buffer.getvalue(), "text/csv") #add_link(req, 'prev', self.get_href(query, args, context.href), _('Prev Week')) #add_link(req, 'next', self.get_href(query, args, context.href), _('Next Week')) #prevnext_nav(req, _('Prev Week'), _('Next Week')) return 'hours_users.html', data, "text/html"
def move(self, ticket_id, author, env, delete=False): """ move a ticket to another environment env: environment to move to """ self.log.info( "Starting move of ticket %d to environment %r. delete: %r", ticket_id, env, delete) tables = {'attachment': 'id', 'ticket_change': 'ticket'} # open the environment if it is a string if isinstance(env, basestring): base_path, _project = os.path.split(self.env.path) env = open_environment(os.path.join(base_path, env), use_cache=True) PermissionCache(env, author).require('TICKET_CREATE') # get the old ticket old_ticket = Ticket(self.env, ticket_id) # make a new ticket from the old ticket values new_ticket = Ticket(env) new_ticket.values = old_ticket.values.copy() new_ticket.insert(when=old_ticket.values['time']) self.log.debug("Ticket inserted into target environment as id %s", new_ticket.id) # copy the changelog and attachment DBs for table, _id in tables.items(): for row in get_all_dict( self.env, "SELECT * FROM %s WHERE %s = %%s" % (table, _id), str(ticket_id)): row[_id] = new_ticket.id insert_row_from_dict(env, table, row) self.log.debug("Finished copying data from %r table", table) # copy the attachments src_attachment_dir = os.path.join(self.env.path, 'attachments', 'ticket', str(ticket_id)) if os.path.exists(src_attachment_dir): self.log.debug("Copying attachements from %r", src_attachment_dir) dest_attachment_dir = os.path.join(env.path, 'attachments', 'ticket') if not os.path.exists(dest_attachment_dir): os.makedirs(dest_attachment_dir) dest_attachment_dir = os.path.join(dest_attachment_dir, str(new_ticket.id)) shutil.copytree(src_attachment_dir, dest_attachment_dir) # note the previous location on the new ticket if delete: new_ticket.save_changes( author, 'moved from %s (ticket deleted)' % self.env.abs_href()) else: new_ticket.save_changes( author, 'moved from %s' % self.env.abs_href('ticket', ticket_id)) self.log.info("Finished making new ticket @ %r", env.abs_href('ticket', ticket_id)) if delete: self.log.debug("Deleting old ticket") old_ticket.delete() if env.base_url: return env.abs_href('ticket', new_ticket.id) else: self.log.debug("Marking old ticket as duplicate.") # location of new ticket if env.base_url: target_name = env.abs_href('ticket', new_ticket.id) else: target_name = "{0}:#{1}".format(env.project_name, new_ticket.id) # close old ticket and point to new one old_ticket['status'] = u'closed' old_ticket['resolution'] = u'duplicate' old_ticket.save_changes(author, u'moved to %s' % target_name)