def run(): home = DEFAULT_HOME template = DEFAULT_TEMPLATE nuke = sys.argv[-1] == 'nuke' # if there is no tracker in home, force nuke try: instance.open(home) except configuration.NoConfigError: nuke = 1 # if we are to create the tracker, prompt for home if nuke: if len(sys.argv) > 2: backend = sys.argv[-2] else: backend = 'anydbm' # FIXME: i'd like to have an option to abort the tracker creation # say, by entering a single dot. but i cannot think of # appropriate prompt for that. home = raw_input( _('Enter directory path to create demo tracker [%s]: ') % home) if not home: home = DEFAULT_HOME templates = admin.AdminTool().listTemplates().keys() template = raw_input( _('Enter tracker template to use (one of (%s)) [%s]: ') % (','.join(templates),template)) if not template: template = DEFAULT_TEMPLATE # install demo.install_demo(home, backend, admin.AdminTool().listTemplates()[template]['path']) # run demo.run_demo(home)
def run(): home = DEFAULT_HOME template = DEFAULT_TEMPLATE nuke = sys.argv[-1] == 'nuke' # if there is no tracker in home, force nuke try: instance.open(home) except configuration.NoConfigError: nuke = 1 # if we are to create the tracker, prompt for home if nuke: if len(sys.argv) > 2: backend = sys.argv[-2] else: backend = 'anydbm' # FIXME: i'd like to have an option to abort the tracker creation # say, by entering a single dot. but i cannot think of # appropriate prompt for that. home = my_input( _('Enter directory path to create demo tracker [%s]: ') % home) if not home: home = DEFAULT_HOME templates = admin.AdminTool().listTemplates().keys() template = my_input( _('Enter tracker template to use (one of (%s)) [%s]: ') % (','.join(templates),template)) if not template: template = DEFAULT_TEMPLATE # install demo.install_demo(home, backend, admin.AdminTool().listTemplates()[template]['path']) # run demo.run_demo(home)
def __init__(self, args): self.args = args tracker = instance.open(args.dir) self.db = db = tracker.open('admin') self.prodcats = {} self.prodused = {} for id in db.prodcat.getnodeids(retired=False): pd = db.prodcat.getnode(id) nn = normalize_name(pd.name.decode('utf-8')) key = (nn, int(pd.level)) self.prodused[key] = False self.prodcats[key] = pd.id self.products = {} self.pr_used = {} for id in db.product.getnodeids(retired=False): pr = db.product.getnode(id) key = normalize_name(pr.sap_material.decode('utf-8')) self.pr_used[key] = False self.products[key] = pr.id d_s = self.args.delimiter.encode('utf-8') sap_dr = Unicode_DictReader \ (self.fixer_sap (), delimiter = d_s) self.debug("SAP") self.sap_recs = [] self.sap_ids = {} for x in sap_dr: self.debug(repr(x)) id = self.get_material(x) if id in self.sap_ids: self.debug('Ignoring duplicate: %r' % x) continue self.sap_ids[id] = len(self.sap_recs) self.sap_recs.append(x) assert x is self.sap_recs[self.sap_ids[id]]
def __init__(self, opt): self.opt = opt tracker = instance.open(opt.dir) self.db = tracker.open('admin') stati = [self.db.sup_status.lookup(x) for x in ('open', 'customer')] sissues = self.db.support.filter \ ( None , dict (status=stati) , sort=[('+', 'status'), ('+', 'creation')] ) self.sowner = self.db.user.lookup('support') self.status = dict \ ((k, self.db.sup_status.get (k, 'name')) for k in stati) self.unassigned = [] self.todo = [] for i in sissues: sissue = self.db.support.getnode(i) if sissue.responsible == self.sowner: self.unassigned.append(sissue) else: self.todo.append(sissue) cls = self.db.sup_classification.getnodeids(retired=False) cls = [self.db.sup_classification.getnode(i) for i in cls] self.classification = dict((x.id, x.name) for x in cls) self.classification[None] = '-' self.raw_messages = {} self.formatted_messages = {} self.nonosy_customers = {} self.build_mails()
def main () : # most ldap info is now fetched from extensions/config.ini parser = OptionParser () parser.add_option \ ( "-d", "--database-directory" , dest = "database_directory" , help = "Directory of the roundup installation" , default = '.' ) parser.add_option \ ( "-u", "--update" , help = "Update the LDAP directory with info from roundup" , default = False , action = 'store_true' ) opt, args = parser.parse_args () sys.path.insert (1, os.path.join (opt.database_directory, 'lib')) from ldap_sync import LDAP_Roundup_Sync tracker = instance.open (opt.database_directory) db = tracker.open ('admin') lds = LDAP_Roundup_Sync (db) if len (args) : for username in args : lds.sync_user_to_ldap (username, update = opt.update) else : lds.sync_all_users_to_ldap (update = opt.update)
def create_issue(self, title, message_text): """ TODO - write docs - eventually add summary, title """ ## in the roundup install files, see scripts/add-issue ## for the code i used as a model try: tracker = instance.open(self.config['tracker']) db = tracker.open('admin') uid = db.user.lookup(self.config['user']['username']) db.close() db = tracker.open(self.config['user']['username']) #db = tracker.open('user') thismsg = [] if message_text: thismsg = [db.msg.create(content=message_text, author=uid, date=date.Date())] #res=db.bug.create(title=title, # messages=thismsg) res=db.issue.create(title=title, messages=thismsg) db.commit() except BaseException, msg: logger.error("Exception raised when trying to add issue to tracker in create_issue: {}".format(msg.message)) pass
def testNoDBInConfig(self): """Arguably this should be tested in test_instance since it is triggered by instance.open. But it raises an error in the configuration module with a missing required param in config.ini. """ # remove the backend key in config.ini self.munge_configini(mods=[("backend = ", None)]) # this should fail as backend isn't defined. with self.assertRaises(configuration.OptionUnsetError) as cm: instance.open(self.dirname) self.assertEqual("RDBMS_BACKEND is not set" " and has no default", cm.exception.__str__())
def main(): tracker = instance.open(os.getcwd()) db = tracker.open('admin') et = None try: et = db.ext_tracker.lookup('KPM') except KeyError: pass if not et: et = db.ext_tracker.create \ ( name = 'KPM' , description = 'VW/Audi Konzern Problem Management' ) db.issue.set('74017', ext_tracker=et, ext_id='6435580') db.issue.set('74025', ext_tracker=et, ext_id='6435335') kw = None try: kw = db.msg_keyword.lookup('External Sync') except KeyError: pass if kw is None: db.msg_keyword.create \ ( name = 'External Sync' , description = 'Synchronize this message to external tracker' ) db.commit()
def main(): cmd = ArgumentParser() cmd.add_argument \ ( 'leave_submission' , help = 'Leave submission number' ) cmd.add_argument \ ( '-d', '--directory' , help = 'Tracker directory' , default = os.getcwd () ) cmd.add_argument \ ( '-u', '--user' , help = 'User to open DB as' , default = 'admin' ) args = cmd.parse_args() sys.path.insert(1, os.path.join(args.directory, 'lib')) import common import vacation tracker = instance.open(args.directory) db = tracker.open(args.user) ls = db.leave_submission.getnode(args.leave_submission) leave = db.daily_record_status.lookup('leave') d = dict() d['daily_record.user'] = ls.user d['daily_record.date'] = common.pretty_range(ls.first_day, ls.last_day) d['daily_record.status'] = leave trs = db.time_record.filter(None, d) if trs: print("Found time records, exiting") return dy = ls.first_day off = db.work_location.lookup('off') while dy <= ls.last_day: du = mindu = vacation.leave_duration(db, ls.user, dy) dt = common.pretty_range(dy, dy) dr = db.daily_record.filter(None, dict(user=ls.user, date=dt)) wp = db.time_wp.getnode(ls.time_wp) tp = db.time_project.getnode(wp.project) if tp.max_hours is not None: mindu = min(du, tp.max_hours) assert len(dr) == 1 if mindu: db.time_record.create \ ( daily_record = dr [0] , duration = mindu , work_location = off , wp = ls.time_wp ) db.daily_record.set(dr[0], status=leave) dy += common.day db.commit()
def main () : cmd = OptionParser () cmd.add_option \ ( "-d", "--date" , dest = "date" , help = "Specify cut-off date for due-date" , default = ".+1w" ) cmd.add_option \ ( "-m", "--mail" , dest = "mails" , help = "Add mail address to receive summary of all open issues" , default = [] , action = "append" ) cmd.add_option \ ( "-n", "--nosy" , dest = "nosy" , help = "Add nosy issues to report" , action = "store_true" ) cmd.add_option \ ( "-s", "--send" , dest = "send" , help = "Send reports via email (default: print only)" , action = "store_true" ) cmd.add_option \ ( "-t", "--tracker" , dest = "tpath" , help = "Path to tracker instance" , default = "." ) cmd.add_option \ ( "-u", "--user" , dest = "users" , help = "Send/print report only for specified users" , action = "append" , default = [] ) (options, args) = cmd.parse_args () tracker = instance.open (options.tpath) db = tracker.open ("admin") r = Report \ ( db , date = options.date , send_mail = options.send , users = options.users , mailall = options.mails , do_nosy = options.nosy ) r.output () db.close ()
def show_tickets(): returnstring = "" tracker = instance.open('/projects/shared/roundup-1.4.6/demo') db = tracker.open('admin') issues = db.issue.list() resolved_id = db.status.lookup('resolved') for i in issues: issue = db.issue.getnode(i) if issue.status != resolved_id: returnstring += "<br>" + issue.id + "<br>2" + issue return returnstring
def __init__ (self, opt) : self.opt = opt tracker = instance.open (opt.dir) self.db = tracker.open ('admin') now = date.Date ('.') exp = now + date.Interval ('%s days' % opt.days) wps = self.db.time_wp.filter \ ( None , dict (time_end = '.;%s' % exp.pretty ('%Y-%m-%d')) , sort=[('+', 'responsible')] ) self.wps = [self.db.time_wp.getnode (i) for i in wps] self.messages = {} self.build_mails ()
def __init__(self, opt): self.opt = opt tracker = instance.open(opt.dir) self.db = tracker.open('admin') stati = ['new', 'open', 'feedback'] stati = [self.db.it_issue_status.lookup(x) for x in stati] issues = self.db.it_issue.filter \ ( None , dict (status=stati) , sort=[('+', 'status'), ('+', 'creation')] ) self.issues = [self.db.it_issue.getnode(i) for i in issues] self.status = dict \ ((k, self.db.it_issue_status.get (k, 'name')) for k in stati) self.messages = {} self.build_mails()
def __init__(self, dir): tracker = instance.open(dir) self.db = db = tracker.open('admin') self.now = Date('.') udids = db.user_dynamic.getnodeids(retired=False) self.by_u = {} self.freez = {} for udid in udids: dyn = db.user_dynamic.getnode(udid) if not dyn.all_in: continue if dyn.max_flexitime is not None: continue if dyn.valid_to and dyn.valid_to <= self.year: continue if dyn.org_location not in self.olo: continue username = db.user.get(dyn.user, 'username') if username not in self.by_u: self.by_u[username] = [] self.by_u[username].append(dyn)
def __init__(self, path, basedn, user='******'): global ymd, get_user_dynamic, common self.tracker = instance.open(path) self.db = self.tracker.open(user) self.basedn = basedn sys.path.insert(1, os.path.join(path, 'lib')) from user_dynamic import get_user_dynamic import common ymd = common.ymd self.common = common del sys.path[1] for rupname in self.db.getclasses(): classname = self._classname(rupname) if not hasattr(self, classname): # create a derived class dynamically setattr(self, classname, type(classname, (self.Roundup, ), {})) cls = getattr(self, classname) setattr(cls, 'db', self.db) setattr(cls, 'cl', self.db.getclass(rupname)) setattr(cls, 'basedn', self.basedn) setattr(cls, 'master', self)
def main(): # most ldap info is now fetched from extensions/config.ini parser = ArgumentParser() parser.add_argument \ ( "users" , nargs = "*" , help = "Users to update, default: all" ) parser.add_argument \ ( "-d", "--database-directory" , dest = "database_directory" , help = "Directory of the roundup installation" , default = '.' ) parser.add_argument \ ( "-u", "--update" , help = "Update the LDAP directory with info from roundup" , default = False , action = 'store_true' ) parser.add_argument \ ( "-v", "--verbose" , help = "Verbosity" , default = 0 , action = 'count' ) args = parser.parse_args() sys.path.insert(1, os.path.join(args.database_directory, 'lib')) from ldap_sync import LDAP_Roundup_Sync tracker = instance.open(args.database_directory) db = tracker.open('admin') lds = LDAP_Roundup_Sync(db, verbose=args.verbose) if len(args.users): for username in args.users: lds.sync_user_to_ldap(username, update=args.update) else: lds.sync_all_users_to_ldap(update=args.update)
def __init__ \ ( self , user = None , tpath = None , oname = None , toplevel = None , verbose = None ) : if verbose is not None : self.VERBOSE = int (verbose) if self.VERBOSE > 1 : self.debug ("Pygantt_XML, Version", self.VERSION) self.user = user or self.DEFAULT_USER self.tpath = tpath or self.DEFAULT_PATH self.oname = oname or self.DEFAULT_FILE self.toplevel = eval (toplevel or self.DEFAULT_ISSUE) self.now = date.Date (".") if self.VERBOSE > 1 : self.debug ("Roundup user = %s" % self.user) self.debug ("Path to tracker = %s" % self.tpath) self.debug ("Name of output file = %s" % self.oname) self.debug ("Considering toplevel issues %s ..." % self.toplevel) self.debug ("Launching tracker ...") self.tracker = instance.open (self.tpath) sys.path.insert (1, os.path.join (self.tpath, "lib")) import user_dynamic self.get_user_dynamic = user_dynamic.get_user_dynamic self.last_user_dynamic = user_dynamic.last_user_dynamic self.weekly_hours = user_dynamic.weekly_hours if self.VERBOSE > 1 : self.debug ( "Loading roundup data base ...") self.db = self.tracker.open (self.user) self.s_closed = self.db.status.lookup ("closed") self.s_test = self.db.status.lookup ("testing") self.s_susp = self.db.status.lookup ("suspended")
print 'already in use.' port += 100 config['TRACKER_WEB'] = 'http://%s:%s/demo/'%(hostname, port) # write the config config['INSTANT_REGISTRATION'] = 1 # FIXME: Move template-specific demo initialization into the templates. if template == 'responsive': config['STATIC_FILES'] = "static" if template == 'jinja2': config['TEMPLATE_ENGINE'] = 'jinja2' config['STATIC_FILES'] = "static" config.save(os.path.join(home, config.INI_FILE)) # open the tracker and initialise tracker = instance.open(home) tracker.init(password.Password('admin')) # add the "demo" user db = tracker.open('admin') # FIXME: Move tracker-specific demo initialization into the tracker templates. if template == 'minimal': db.user.create(username='******', password=password.Password('demo'), roles='User') else: db.user.create(username='******', password=password.Password('demo'), realname='Demo User', roles='User') db.commit() db.close() def run_demo(home):
import sys import roundup.instance if len(sys.argv) == 1: print __doc__ sys.exit(1) # Iterate over all instance homes specified in argv. for home in sys.argv[1:]: # Do some basic exception handling to catch bad arguments. try: instance = roundup.instance.open(home) except: print 'Cannot open instance home directory %s!' % home continue db = instance.open('admin') print 'Migrating active queries in %s (%s):' % ( instance.config.TRACKER_NAME, home) for query in db.query.list(): url = db.query.get(query, 'url') if url[0] == '?': url = url[1:] print ' Migrating query%s (%s)' % (query, db.query.get(query, 'name')) db.query.set(query, url=url) db.commit() db.close()
def install_demo(home, backend, template): """Install a demo tracker Parameters: home: tracker home directory path backend: database backend name template: tracker template """ from roundup import init, instance, password # set up the config for this tracker template_dir = os.path.join('share', 'roundup', 'templates', template) # Load optional override ini file. Missing ini file is ignored. template_cfg = configuration.UserConfig(template_dir + "/config_ini.ini") config = configuration.CoreConfig( settings={i.name: i.get() for i in template_cfg.items()}) config['TRACKER_HOME'] = home config['MAIL_DOMAIN'] = 'localhost' config['DATABASE'] = 'db' config['WEB_DEBUG'] = True if backend in ('mysql', 'postgresql'): config['RDBMS_HOST'] = 'localhost' config['RDBMS_USER'] = '******' config['RDBMS_PASSWORD'] = '******' config['RDBMS_NAME'] = 'rounduptest' config['RDBMS_BACKEND'] = backend # see if we need to clean up existing directory if os.path.exists(home): if os.path.exists(home + '/config.ini'): # clear everything out to avoid conflicts with former # extensions and detectors print("Nuking directory left from the previous demo instance.") shutil.rmtree(home) else: print("Error: Refusing to nuke non-tracker directory:") print(" %s" % home) sys.exit(1) init.install(home, template_dir) # Remove config_ini.ini file from tracker_home (not template dir). # Ignore file not found - not all templates have # config_ini.ini files. try: os.remove(home + "/config_ini.ini") except OSError as e: # FileNotFound exception under py3 if e.errno == 2: pass else: raise # don't have email flying around nosyreaction = os.path.join(home, 'detectors', 'nosyreaction.py') if os.path.exists(nosyreaction): os.remove(nosyreaction) nosyreaction += 'c' if os.path.exists(nosyreaction): os.remove(nosyreaction) # figure basic params for server hostname = 'localhost' # pick a fairly odd, random port port = 8917 while 1: print('Trying to set up web server on port %d ...' % port, ) s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) try: s.connect((hostname, port)) except socket.error as e: if not hasattr(e, 'args') or e.args[0] != errno.ECONNREFUSED: raise print('should be ok.') break else: s.close() print('already in use.') port += 100 config['TRACKER_WEB'] = 'http://%s:%s/demo/' % (hostname, port) # write the config config['INSTANT_REGISTRATION'] = 1 config.save(os.path.join(home, config.INI_FILE)) # open the tracker and initialise tracker = instance.open(home) tracker.init(password.Password('admin')) # add the "demo" user db = tracker.open('admin') # FIXME: Move tracker-specific demo initialization into the tracker # templates. if os.path.basename(template) == 'minimal': db.user.create(username='******', password=password.Password('demo'), roles='User') else: db.user.create(username='******', password=password.Password('demo'), realname='Demo User', roles='User') db.commit() db.close()
def main(): cmd = ArgumentParser() cmd.add_argument \ ( 'productgroups' , help = 'Product group matrix, SAP numbers' ) cmd.add_argument \ ( 'las' , help = 'LAS with supplier risk' ) cmd.add_argument \ ( '-d', '--directory' , dest = 'dir' , help = 'Tracker instance directory, default=%(default)s' , default = '.' ) cmd.add_argument \ ( '-o', '--orgmap' , help = 'Mapping of organisations in LAS to tracker, default=%(default)s' , default = 'orglist' ) cmd.add_argument \ ( '-n', '--no_update' , dest = 'update' , help = 'Do not update tracker' , default = True , action = 'store_false' ) args = cmd.parse_args() tracker = instance.open(args.dir) db = tracker.open('admin') sys.path.insert(1, args.dir) from common import pretty_range orgmap = {} with open(args.orgmap) as f: dr = DictReader(f, delimiter=';') for rec in dr: orgmap[rec['name_las']] = rec['names_prtracker'].split('+') il = db.infosec_level il_ids = {} il_table = \ [ ('Public', 10, True) #, ('Internal', 20, True) , ('Normal', 30, False) , ('Confidential', 40, True) , ('High', 50, False) , ('Strictly Confidential', 60, True) , ('Very High', 70, False) ] try: special = il.lookup('Special') il.retire(special) except KeyError: pass for name, order, cons in il_table: var = name.lower() id = None try: id = il.lookup(name) except KeyError: pass if id: node = il.getnode(id) d = {} if node.order != order: d['order'] = order if node.name != name: d['name'] = name if node.is_consulting is None or node.is_consulting != cons: d['is_consulting'] = cons if d: il.set(id, **d) else: id = il.create(name=name, order=order, is_consulting=cons) il_ids[var] = id il_ids['normal / internal'] = il_ids['normal'] srg_table = \ [ ('Consulting', 'Consulting', True) , ('Consulting_small', 'Consulting_small', True) , ('COTS', 'COTS (hardware or software)', False) , ('Operation', 'Operation & Operation support', False) , ('SW-Dev', 'Software development', False) , ('Operation / cloud', 'Cloud based services', False) ] srg = db.security_req_group srg_ids = {} srg_by_name = {} for var, name, is_consulting in srg_table: try: v = srg.lookup(name) except KeyError: v = srg.create(name=name, is_consulting=is_consulting) srg_ids[var] = v srg_by_name[name] = v # Lifting of broken LAS csv: srg_by_name ['Cloud bases services (*AAS)'] = srg_by_name \ ['Cloud based services'] del srg_by_name['Cloud based services'] curpg = None for rec in pg_iter(args.productgroups): srg = rec['ISEC-Requirements group'].strip() sap_ref = rec['SAP Ref'].strip() name = rec['English'].strip() if not srg and sap_ref.startswith('Z'): try: curpg = db.pg_category.lookup(name) except KeyError: curpg = db.pg_category.create(name=name, sap_ref=sap_ref) if not srg: continue if srg == 'n.a.' or srg == 'n.a' or srg == 'TBD': srg = None else: if srg.startswith('n.a. /'): srg = srg[6:].strip() srg = srg_ids[srg] il = rec['default protection level'].strip() if il.startswith('n.a. /'): il = il[6:].strip() if not il or il == 'n.a.': il = None else: il = il_ids[il] try: id = db.product_group.lookup(name) pg = db.product_group.getnode(id) if not pg.pg_category: db.product_group.set(id, pg_category=curpg) except KeyError: d = dict \ ( name = name , sap_ref = sap_ref , pg_category = curpg ) if il is not None: d['infosec_level'] = il if srg is not None: d['security_req_group'] = srg db.product_group.create(**d) src_ids = {} src_by_name = {} src_table = \ [ ('low', 'Low', 10) , ('med', 'Medium', 20) , ('hi', 'High', 30) , ('vhi', 'Very High', 40) ] for var, name, order in src_table: try: id = db.supplier_risk_category.lookup(name) except KeyError: id = db.supplier_risk_category.create(name=name, order=order) src_ids[var] = id src_by_name[name] = id prt = db.purchase_risk_type prt_ids = {} prt_table = \ [ ('low', 'Low', 10) , ('med', 'Medium', 20) , ('hi', 'High', 30) , ('vhi', 'Very High', 40) , ('dnp', 'Do not purchase', 50) ] for var, name, order in prt_table: try: id = prt.lookup(name) except KeyError: id = prt.create(name=name, order=order) prt_ids[var] = id # First bunch without a supplier_risk_category: # Supplier not in LAS or not evaluated # Then entries for each supplier_risk_category psr_table = \ [ (None, 'public', 'med') #, (None, 'internal', 'hi') , (None, 'normal', 'hi') , (None, 'confidential', 'vhi') , (None, 'high', 'vhi') , (None, 'strictly confidential', 'dnp') , (None, 'very high', 'dnp') # Low: , ('low', 'public', 'low') #, ('low', 'internal', 'low') , ('low', 'normal', 'low') , ('low', 'confidential', 'low') , ('low', 'high', 'low') , ('low', 'strictly confidential', 'med') , ('low', 'very high', 'med') # Medium: , ('med', 'public', 'low') #, ('med', 'internal', 'med') , ('med', 'normal', 'med') , ('med', 'confidential', 'hi') , ('med', 'high', 'hi') , ('med', 'strictly confidential', 'vhi') , ('med', 'very high', 'vhi') # High: , ('hi', 'public', 'med') #, ('hi', 'internal', 'hi') , ('hi', 'normal', 'hi') , ('hi', 'confidential', 'vhi') , ('hi', 'high', 'vhi') , ('hi', 'strictly confidential', 'dnp') , ('hi', 'very high', 'dnp') # Very High: , ('vhi', 'public', 'hi') #, ('vhi', 'internal', 'dnp') , ('vhi', 'normal', 'dnp') , ('vhi', 'confidential', 'dnp') , ('vhi', 'high', 'dnp') , ('vhi', 'strictly confidential', 'dnp') , ('vhi', 'very high', 'dnp') ] payment_types = \ [ ('Invoice', 10, False) , ('Credit Card', 20, True) ] for src, il, prt in psr_table: d = dict \ ( infosec_level = il_ids [il] ) dd = dict(d) d['purchase_risk_type'] = prt_ids[prt] if src is None: dd['supplier_risk_category'] = '-1' else: dd['supplier_risk_category'] = src_ids[src] d['supplier_risk_category'] = src_ids[src] ids = db.purchase_security_risk.filter(None, dd) if ids: assert len(ids) == 1 id = ids[0] else: id = db.purchase_security_risk.create(**d) for lasrec in las_iter(args.las): if not lasrec['Consulting_small']: continue entity = lasrec['Entity'].strip() entity = orgmap.get(entity, [entity]) sname = lasrec['Name of Supplier'] try: sup = db.pr_supplier.lookup(sname) except KeyError: print("Supplier not found: %s" % sname) continue srcs = {} for k in srg_by_name: srgid = srg_by_name[k] if k in lasrec: srcs[srgid] = src_by_name[lasrec[k].strip()] else: raise ValueError('Invalid LAS Entry: %s' % k) for e in entity: if e: try: org = db.organisation.lookup(e) except KeyError: print("Organisation not found: %s" % e) continue insert_supplier_risk(db, sup, org, srcs) else: print("Warning: Empty Entity") # iter over valid orgs d = dict(may_purchase=True) d['valid_from'] = ';.,-' d['valid_to'] = '.;,-' for org in db.organisation.filter(None, d): insert_supplier_risk(db, sup, org, srcs) break for name, order, need in payment_types: try: pt = db.payment_type.lookup(name) pt = db.payment_type.getnode(pt) if need != pt.need_approval: db.payment_type.set(pt.id, need_approval=need) except KeyError: pt = db.payment_type.create \ (name = name, order = order, need_approval = need) if args.update: db.commit()
('-s', '--send-email-via' , dest = 'send_email' , help = "Send as email via this server, don't report to standard output" , default = None ) cmd.add_option \ ('-d', '--tracker-directory' , dest = "dir" , help = "Directory of the tracker to check" , default = "." ) opt, args = cmd.parse_args() if not len(args): cmd.error('Need at least one container issue number') tracker = instance.open(opt.dir) db = tracker.open('admin') containers = dict((x, db.issue.getnode(x)) for x in args) emails = {} # (adr, what) to send to stati = [db.status.lookup(x) for x in 'open', 'testing'] closed = db.status.lookup('closed') issues = db.issue.filter(None, dict(status=stati)) for i in issues: issue = db.issue.getnode(i) # dependency check: if not issue.depends: continue # container check: parent = issue
def import_xml(tracker_home, xml_file, file_dir): """ Generate Roundup tracker import files based on the tracker schema, sf.net xml export and downloaded files from sf.net. """ tracker = instance.open(tracker_home) db = tracker.open('admin') resolved = db.status.lookup('resolved') unread = db.status.lookup('unread') chatting = db.status.lookup('unread') critical = db.priority.lookup('critical') urgent = db.priority.lookup('urgent') bug = db.priority.lookup('bug') feature = db.priority.lookup('feature') wish = db.priority.lookup('wish') adminuid = db.user.lookup('admin') anonuid = db.user.lookup('anonymous') root = ElementTree.parse(xml_file).getroot() def to_date(ts): return date.Date(time.gmtime(float(ts))) # parse out the XML artifacts = [] categories = set() users = set() add_files = set() remove_files = set() for artifact in root.find('artifacts'): d = {} op = {} artifacts.append(d) for field in artifact.findall('field'): name = field.get('name') if name == 'artifact_messages': for message in field.findall('message'): l = d.setdefault('messages', []) m = {} l.append(m) for field in message.findall('field'): name = field.get('name') if name == 'adddate': m[name] = to_date(field.text) else: m[name] = field.text if name == 'user_name': users.add(field.text) elif name == 'artifact_history': for event in field.findall('history'): l = d.setdefault('history', []) e = {} l.append(e) for field in event.findall('field'): name = field.get('name') if name == 'entrydate': e[name] = to_date(field.text) else: e[name] = field.text if name == 'mod_by': users.add(field.text) if e['field_name'] == 'File Added': add_files.add(e['old_value'].split(':')[0]) elif e['field_name'] == 'File Deleted': remove_files.add(e['old_value'].split(':')[0]) elif name == 'details': op['body'] = field.text elif name == 'submitted_by': op['user_name'] = field.text d[name] = field.text users.add(field.text) elif name == 'open_date': thedate = to_date(field.text) op['adddate'] = thedate d[name] = thedate else: d[name] = field.text categories.add(d['category']) if 'body' in op: l = d.setdefault('messages', []) l.insert(0, op) add_files -= remove_files # create users userd = {'nobody': '2'} users.remove('nobody') data = [ {'id': '1', 'username': '******', 'password': password.Password('admin'), 'roles': 'Admin', 'address': '*****@*****.**'}, {'id': '2', 'username': '******', 'roles': 'Anonymous'}, ] for n, user in enumerate(list(users)): userd[user] = n+3 data.append({'id': str(n+3), 'username': user, 'roles': 'User', 'address': '*****@*****.**'%user}) write_csv(db.user, data) users=userd # create categories categoryd = {'None': None} categories.remove('None') data = [] for n, category in enumerate(list(categories)): categoryd[category] = n data.append({'id': str(n), 'name': category}) write_csv(db.keyword, data) categories = categoryd # create issues issue_data = [] file_data = [] message_data = [] issue_journal = [] message_id = 0 for artifact in artifacts: d = {} d['id'] = artifact['artifact_id'] d['title'] = artifact['summary'] d['assignedto'] = users[artifact['assigned_to']] if d['assignedto'] == '2': d['assignedto'] = None d['creation'] = artifact['open_date'] activity = artifact['open_date'] d['creator'] = users[artifact['submitted_by']] actor = d['creator'] if categories[artifact['category']]: d['keyword'] = [categories[artifact['category']]] issue_journal.append(( d['id'], d['creation'].get_tuple(), d['creator'], "'create'", {} )) p = int(artifact['priority']) if artifact['artifact_type'] == 'Feature Requests': if p > 3: d['priority'] = feature else: d['priority'] = wish else: if p > 7: d['priority'] = critical elif p > 5: d['priority'] = urgent elif p > 3: d['priority'] = bug else: d['priority'] = feature s = artifact['status'] if s == 'Closed': d['status'] = resolved elif s == 'Deleted': d['status'] = resolved d['is retired'] = True else: d['status'] = unread nosy = set() for message in artifact.get('messages', []): authid = users[message['user_name']] if not message['body']: continue body = convert_message(message['body'], message_id) if not body: continue m = {'content': body, 'author': authid, 'date': message['adddate'], 'creation': message['adddate'], } message_data.append(m) if authid not in (None, '2'): nosy.add(authid) activity = message['adddate'] actor = authid if d['status'] == unread: d['status'] = chatting # add import message m = {'content': 'IMPORT FROM SOURCEFORGE', 'author': '1', 'date': today, 'creation': today} message_data.append(m) # sort messages and assign ids d['messages'] = [] message_data.sort(key=lambda a:a['date']) for message in message_data: message_id += 1 message['id'] = str(message_id) d['messages'].append(message_id) d['nosy'] = list(nosy) files = [] for event in artifact.get('history', []): if event['field_name'] == 'File Added': fid, name = event['old_value'].split(':', 1) if fid in add_files: files.append(fid) name = name.strip() try: f = open(os.path.join(file_dir, fid), 'rb') content = f.read() f.close() except: content = 'content missing' file_data.append({ 'id': fid, 'creation': event['entrydate'], 'creator': users[event['mod_by']], 'name': name, 'type': mimetypes.guess_type(name)[0], 'content': content, }) continue elif event['field_name'] == 'close_date': action = "'set'" info = { 'status': unread } elif event['field_name'] == 'summary': action = "'set'" info = { 'title': event['old_value'] } else: # not an interesting / translatable event continue row = [ d['id'], event['entrydate'].get_tuple(), users[event['mod_by']], action, info ] if event['entrydate'] > activity: activity = event['entrydate'] issue_journal.append(row) d['files'] = files d['activity'] = activity d['actor'] = actor issue_data.append(d) write_csv(db.issue, issue_data) write_csv(db.msg, message_data) write_csv(db.file, file_data) f = open('/tmp/imported/issue-journals.csv', 'w') writer = csv.writer(f, colon_separated) writer.writerows(issue_journal) f.close()
#!/usr/bin/python # -*- coding: iso-8859-1 -*- # Cron-job for updating adr-type for valid or closed abos. # Should be run in the early morning of every 1st of a month. import sys from roundup.date import Date from roundup import instance tracker = instance.open (sys.argv [1]) db = tracker.open ('admin') now = Date ('.') type_cat = db.adr_type_cat.lookup ('ABO') abo_adr_types = db.adr_type.find (typecat = type_cat) adr = db.address.filter (None, {'adr_type' : abo_adr_types}) adr = dict ([(k, 1) for k in adr]) valid_abos = db.abo.filter (None, {'end' : '-1'}) storno_abos = db.abo.filter (None, {'end' : now.pretty (';%Y-%m-%d')}) for abo in valid_abos + storno_abos : adr [db.abo.get (abo, 'subscriber')] = 1 for a in adr.iterkeys () : # May seem like a noop -- leave the correct updating to the auditor. db.address.set (a, adr_type = db.address.get (a, 'adr_type')) db.commit ()
Load up the indicated tracker with N issues and N/100 users. ''' import sys, os, random from roundup import instance # open the instance if len(sys.argv) < 2: print "Error: Not enough arguments" print __doc__.strip()%(sys.argv[0]) sys.exit(1) tracker_home = sys.argv[1] N = int(sys.argv[2]) # open the tracker tracker = instance.open(tracker_home) db = tracker.open('admin') priorities = db.priority.list() statuses = db.status.list() resolved_id = db.status.lookup('resolved') statuses.remove(resolved_id) names = ['alpha', 'beta', 'gamma', 'delta', 'epsilon', 'zeta', 'eta', 'theta', 'iota', 'kappa', 'lambda', 'mu', 'nu', 'xi', 'omicron', 'pi', 'rho'] titles = '''Lorem ipsum dolor sit amet, consectetuer adipiscing elit. Duis nibh purus, bibendum sed, condimentum ut, bibendum ut, risus. Fusce pede enim, nonummy sit amet, dapibus a, blandit eget, metus. Nulla risus.
''' from __future__ import print_function import sys, os, random from roundup import instance # open the instance if len(sys.argv) < 2: print("Error: Not enough arguments") print(__doc__.strip() % (sys.argv[0])) sys.exit(1) tracker_home = sys.argv[1] N = int(sys.argv[2]) # open the tracker tracker = instance.open(tracker_home) db = tracker.open('admin') db.tx_Source = "cli" priorities = db.priority.list() statuses = db.status.list() resolved_id = db.status.lookup('resolved') statuses.remove(resolved_id) names = [ 'alpha', 'beta', 'gamma', 'delta', 'epsilon', 'zeta', 'eta', 'theta', 'iota', 'kappa', 'lambda', 'mu', 'nu', 'xi', 'omicron', 'pi', 'rho' ] titles = '''Lorem ipsum dolor sit amet, consectetuer adipiscing elit. Duis nibh purus, bibendum sed, condimentum ut, bibendum ut, risus.
#!/usr/bin/python import sys, os from roundup import date from roundup import instance tracker = sys.argv[1] sys.path.insert(1, os.path.join(tracker, 'lib')) from common import tt_clearance_by tracker = instance.open(tracker) db = tracker.open('admin') submitted = db.daily_record_status.lookup('submitted') drecs = db.daily_record.find(status=submitted) users = {} clearers = {} for dr in drecs: users[db.daily_record.get(dr, 'user')] = 1 for u in users.iterkeys(): for c in tt_clearance_by(db, u, only_subs=True): clearers[c] = 1 print clearers
def main(argv): """Handle the arguments to the program and initialise environment. """ # take the argv array and parse it leaving the non-option # arguments in the args array. try: optionsList, args = getopt.getopt(argv[1:], "vc:C:S:", ["set=", "class="]) except getopt.GetoptError: # print help information and exit: usage(argv) sys.exit(2) for (opt, arg) in optionsList: if opt == "-v": print "%s (python %s)" % (roundup_version, sys.version.split()[0]) return # figure the instance home if len(args) > 0: instance_home = args[0] else: instance_home = os.environ.get("ROUNDUP_INSTANCE", "") if not (instance_home and os.path.isdir(instance_home)): return usage(argv) # get the instance import roundup.instance instance = roundup.instance.open(instance_home) # get a mail handler db = instance.open("admin") # now wrap in try/finally so we always close the database try: if hasattr(instance, "MailGW"): handler = instance.MailGW(instance, db, optionsList) else: handler = mailgw.MailGW(instance, db, optionsList) # if there's no more arguments, read a single message from stdin if len(args) == 1: return handler.do_pipe() # otherwise, figure what sort of mail source to handle if len(args) < 3: return usage(argv, _("Error: not enough source specification information")) source, specification = args[1:3] # time out net connections after a minute if we can if source not in ("mailbox", "imaps"): if hasattr(socket, "setdefaulttimeout"): socket.setdefaulttimeout(60) if source == "mailbox": return handler.do_mailbox(specification) # the source will be a network server, so obtain the credentials to # use in connecting to the server try: # attempt to obtain credentials from a ~/.netrc file authenticator = netrc.netrc().authenticators(specification) username = authenticator[0] password = authenticator[2] server = specification # IOError if no ~/.netrc file, TypeError if the hostname # not found in the ~/.netrc file: except (IOError, TypeError): match = re.match(r"((?P<user>[^:]+)(:(?P<pass>.+))?@)?(?P<server>.+)", specification) if match: username = match.group("user") password = match.group("pass") server = match.group("server") else: return usage(argv, _("Error: %s specification not valid") % source) # now invoke the mailgw handler depending on the server handler requested if source.startswith("pop"): ssl = source.endswith("s") if ssl and sys.version_info < (2, 4): return usage(argv, _("Error: a later version of python is required")) return handler.do_pop(server, username, password, ssl) elif source == "apop": return handler.do_apop(server, username, password) elif source.startswith("imap"): ssl = source.endswith("s") mailbox = "" if len(args) > 3: mailbox = args[3] return handler.do_imap(server, username, password, mailbox, ssl) return usage(argv, _('Error: The source must be either "mailbox",' ' "pop", "pops", "apop", "imap" or "imaps"')) finally: # handler might have closed the initial db and opened a new one handler.db.close()
import roundup.instance if len(sys.argv) == 1: print __doc__ sys.exit(1) # Iterate over all instance homes specified in argv. for home in sys.argv[1:]: # Do some basic exception handling to catch bad arguments. try: instance = roundup.instance.open(home) except: print 'Cannot open instance home directory %s!' % home continue db = instance.open('admin') db.tx_Source = "cli" print 'Migrating active queries in %s (%s):'%( instance.config.TRACKER_NAME, home) for query in db.query.list(): url = db.query.get(query, 'url') if url[0] == '?': url = url[1:] print ' Migrating query%s (%s)'%(query, db.query.get(query, 'name')) db.query.set(query, url=url) db.commit() db.close()
# Schema diagram generator contributed by Stefan Seefeld of the fresco # project http://www.fresco.org/. # # It generates a 'dot file' that is then fed into the 'dot' # tool (http://www.graphviz.org) to generate a graph: # # %> ./schema.py # %> dot -Tps schema.dot -o schema.ps # %> gv schema.ps # import sys import roundup.instance # open the instance instance = roundup.instance.open(sys.argv[1]) db = instance.open() # diagram preamble print 'digraph schema {' print 'size="8,6"' print 'node [shape="record" bgcolor="#ffe4c4" style=filled]' print 'edge [taillabel="1" headlabel="1" dir=back arrowtail=ediamond]' # get all the classes types = db.classes.keys() # one record node per class for i in range(len(types)): print 'node%d [label=\"{%s|}"]' % (i, types[i]) # now draw in the relations
def tracker(directory=settings.TRACKER_HOME): return instance.open(directory)
def main(): # most ldap info is now fetched from extensions/config.ini parser = ArgumentParser() parser.add_argument \ ( "file" , help = "CSV import file" ) parser.add_argument \ ( "-d", "--database-directory" , dest = "database_directory" , help = "Directory of the roundup installation" , default = '.' ) parser.add_argument \ ( '-D', '--delimiter' , dest = 'delimiter' , help = 'CSV delimiter character (tab)' , default = '\t' ) parser.add_argument \ ( "-f", "--field" , dest = "fields" , help = "Fields to update in dyn. user, e.g. sap_cc or department" " can be specified multiple times" , action = 'append' , default = [] ) parser.add_argument \ ( "-N", "--new" , help = "Date of new dynamic user" , default = '2017-10-01' ) parser.add_argument \ ( "-u", "--update" , help = "Update roundup" , default = False , action = 'store_true' ) parser.add_argument \ ( "-v", "--verbose" , help = "Verbose messages" , default = False , action = 'store_true' ) args = parser.parse_args() tracker = instance.open(args.database_directory) db = tracker.open('admin') sys.path.insert(1, os.path.join(args.database_directory, 'lib')) import user_dynamic r = Reader(args.file) d = DictReader(r, delimiter=args.delimiter) for line in d: if 'username' in line: try: user = db.user.getnode(db.user.lookup(line['username'])) except KeyError: print("User not found: %s" % line['username']) continue sn = user.lastname fn = user.firstname username = user.username else: sn = line['Surname'].decode('utf-8') fn = line['First name'].decode('utf-8') if not sn or not fn: print("Name empty: %(sn)s %(fn)s" % locals()) continue users = db.user.filter \ (None, dict (firstname = fn, lastname = sn, status = st)) if not users and ' ' in fn: fn = fn.split(' ', 1)[0] users = db.user.filter \ (None, dict (firstname = fn, lastname = sn, status = st)) if not users: print("User not found: %(sn)s %(fn)s" % locals()) continue if len(users) != 1: uu = [] for u in users: user = db.user.getnode(u) if (user.firstname.decode('utf-8') != fn or user.lastname.decode('utf-8') != sn): continue uu.append(u) users = uu if len(users) != 1: print(users, fn, sn) assert len(users) == 1 user = db.user.getnode(users[0]) if (user.firstname.decode('utf-8') != fn or user.lastname.decode('utf-8') != sn): print(user.firstname, user.lastname, fn, sn) username = user.username dt = date.Date(args.new) st = db.user_status.lookup('valid') # Get user dynamic record dyn = user_dynamic.get_user_dynamic(db, user.id, dt) if not dyn: print("No dyn. user record: %(username)s" % locals()) continue if dyn.valid_to: print("Dyn. user record limited: %(username)s" % locals()) continue if dyn.valid_from > dt: print("Dyn. record starts after date: %(username)s" % locals()) continue if not dyn.vacation_yearly: print("No yearly vacation: %(username)s" % locals()) continue do_create = True if dyn.valid_from == dt: do_create = False update = {} try: key = '' for k in fieldmap: f = fieldmap[k] if f in args.fields and k in line: key = line[k].strip() if f in item_map: key = item_map[f].get(key, key) cn = dyn.cl.properties[f].classname cls = db.getclass(cn) item = cls.lookup(key) if dyn[f] != item: update[f] = item except KeyError: print("%(f)s not found: %(key)s: %(username)s" % locals()) continue if update: if do_create: fields = user_dynamic.dynuser_copyfields param = dict((i, dyn[i]) for i in fields) param['valid_from'] = dt param.update(update) if args.update: id = db.user_dynamic.create(**param) if args.verbose: print("CREATED: %s" % id) else: if args.verbose: print("user_dynamic-create: %s" % param) else: if args.update: db.user_dynamic.set(dyn.id, **update) else: if args.verbose: print \ ( "user_dynamic-update: %s %s %s" % (update, fn, sn) ) if args.update: db.commit()
, 'shadowFlag' : ('always', bool, 'shadow_used') , 'shadowInactive' : ('always', int, 'shadow_inactive') , 'shadowLastChange' : ('always', datecvt, 'shadow_last_change') , 'shadowMax' : ('always', int, 'shadow_max') , 'shadowMin' : ('always', int, 'shadow_min') , 'shadowWarning' : ('always', int, 'shadow_warning') , 'uidNumber' : ('once', int, 'uid') , 'userPassword' : ('always', str, 'user_password') } config = Config() openlog(config.LOG_PREFIX, 0, config.LOG_FACILITY) setlogmask(LOG_UPTO(config.LOGLEVEL)) syslog(LOG_DEBUG, "started") try: tracker = instance.open(config.TRACKER) db = tracker.open(config.ROUNDUP_USER) ld = ldap.initialize(config.URL) ld.simple_bind_s(config.BIND_DN, config.BIND_PW) except StandardError, cause: log_traceback(cause) sys.exit(23) for line in sys.stdin: name = line.strip() syslog(LOG_DEBUG, "name=%s" % name) try: u = db.user.getnode(db.user.lookup(name)) if u.sync_with_ldap: syslog(LOG_INFO, "updating: %s" % u.realname) k = KEYS.keys()
, action = "store_true" ) parser.add_argument \ ( "-v", "--verbose" , dest = "verbose" , help = "Output types in addition to property names" , default = False , action = "store_true" ) args = parser.parse_args() sys.path.insert(1, os.path.join(args.directory, 'lib')) sys.path.insert(1, os.path.join(args.directory, 'extensions')) from help import combined_name from ldap_sync import LDAP_Roundup_Sync tracker = instance.open(args.directory) db = tracker.open('admin') _ = get_translation(db.config.TRACKER_LANGUAGE, db.config.TRACKER_HOME).gettext if args.as_csv: writer = csv.writer(sys.stdout, delimiter=args.delimiter) l = ['table', 'property', 'gui-name'] if args.ldap: l.extend(('ldap attribute', 'sync direction')) writer.writerow(l) if args.as_list: print "properties = \\" lds = None if args.ldap: lds = LDAP_Roundup_Sync(db) for clcnt, cl in enumerate(sorted(db.getclasses())):
#!/usr/bin/python # -*- coding: iso-8859-1 -*- import os import sys from roundup import instance tracker = instance.open (os.getcwd ()) db = tracker.open ('admin') # Loop over all issues and set effort_hours from numeric_effort * 8 ids = db.issue.getnodeids (retired = False) ids.sort (key = int) print "Last issue: %s" % ids [-1] for id in ids : if (int (id) % 100) == 0 : print "\r%s" % id, sys.stdout.flush () issue = db.issue.getnode (id) if issue.numeric_effort is None : continue hours = issue.numeric_effort * 8 if issue.numeric_effort and not issue.effort_hours : db.issue.set (id, effort_hours = hours, numeric_effort = None) print "" db.commit ()
# Schema diagram generator contributed by Stefan Seefeld of the fresco # project http://www.fresco.org/. # # It generates a 'dot file' that is then fed into the 'dot' # tool (http://www.graphviz.org) to generate a graph: # # %> ./schema.py # %> dot -Tps schema.dot -o schema.ps # %> gv schema.ps # import sys import roundup.instance # open the instance instance = roundup.instance.open(sys.argv[1]) db = instance.open() # diagram preamble print 'digraph schema {' print 'size="8,6"' print 'node [shape="record" bgcolor="#ffe4c4" style=filled]' print 'edge [taillabel="1" headlabel="1" dir=back arrowtail=ediamond]' # get all the classes types = db.classes.keys() # one record node per class for i in range(len(types)): print 'node%d [label=\"{%s|}"]'%(i, types[i]) # now draw in the relations
if not hasattr(e, 'args') or e.args[0] != errno.ECONNREFUSED: raise print 'should be ok.' break else: s.close() print 'already in use.' port += 100 config['TRACKER_WEB'] = 'http://%s:%s/demo/'%(hostname, port) # write the config config['INSTANT_REGISTRATION'] = 1 config.save(os.path.join(home, config.INI_FILE)) # open the tracker and initialise tracker = instance.open(home) tracker.init(password.Password('admin')) # add the "demo" user db = tracker.open('admin') db.user.create(username='******', password=password.Password('demo'), realname='Demo User', roles='User') db.commit() db.close() def run_demo(home): """Run the demo tracker installed in ``home``""" cfg = configuration.CoreConfig(home) url = cfg["TRACKER_WEB"] hostname, port = urlparse.urlparse(url)[1].split(':') port = int(port)
def setup_tracker(self, backend=None): """ Install and initialize tracker in dirname, return tracker instance. If directory exists, it is wiped out before the operation. """ self.__class__.count += 1 self.dirname = '_test_init_%s' % self.count if backend: self.backend = backend self.config = config = configuration.CoreConfig() config.DATABASE = 'db' config.RDBMS_NAME = "rounduptestttt" config.RDBMS_HOST = "localhost" if 'RDBMS_HOST' in os.environ: config.RDBMS_HOST = os.environ['RDBMS_HOST'] config.RDBMS_USER = "******" if 'RDBMS_USER' in os.environ: config.RDBMS_USER = os.environ['RDBMS_USER'] config.RDBMS_PASSWORD = "******" if 'RDBMS_PASSWORD' in os.environ: config.RDBMS_PASSWORD = os.environ['RDBMS_PASSWORD'] config.MAIL_DOMAIN = "your.tracker.email.domain.example" config.TRACKER_WEB = "http://localhost:4711/ttt/" config.RDBMS_TEMPLATE = "template0" config.MAIL_DEBUG = "maildebug" config.init_logging() self.tearDown() srcdir = os.path.join(os.path.dirname(__file__), '..') os.mkdir(self.dirname) for f in ('detectors', 'extensions', 'html', 'initial_data.py', 'lib', 'locale', 'schema', 'schemas/%s.py' % self.schemafile, 'TEMPLATE-INFO.txt', 'utils'): ft = f if f.startswith('schemas'): ft = 'schema.py' os.symlink \ ( os.path.abspath (os.path.join (srcdir, f)) , os.path.join (self.dirname, ft) ) config.RDBMS_BACKEND = self.backend self.config.save(os.path.join(self.dirname, 'config.ini')) tracker = instance.open(self.dirname) if tracker.exists(): tracker.nuke() tracker.init(password.Password(self.config.RDBMS_PASSWORD)) self.tracker = tracker # LDAP Config config = self.tracker.config self.base_dn = 'OU=example,DC=example,DC=com' ldap_settings = dict \ ( uri = 'ldap://do.not.care:389' , bind_dn = 'CN=system,OU=test' , password = '******' , base_dn = self.base_dn , update_ldap = 'True' , update_roundup = 'True' , objectclass = 'user' , ad_domains = 'ds1.internal' , no_starttls = 'False' , do_not_sync_roundup_properties = '' , do_not_sync_ldap_properties = '' , allowed_dn_suffix_by_domain = \ 'ext1.internal:OU=External' ) limit_settings = dict \ ( picture_sync_size = '9k' #, picture_quality = '80' ) config.ext = UserConfig() for k in ldap_settings: o = Option(config.ext, 'LDAP', k) config.ext.add_option(o) config.ext['LDAP_' + k.upper()] = ldap_settings[k] for k in limit_settings: o = Option(config.ext, 'LIMIT', k) config.ext.add_option(o) config.ext['LIMIT_' + k.upper()] = limit_settings[k] # Override before call to setup_ldap if necessary self.aux_ldap_parameters = {}
def import_xml(tracker_home, xml_file, file_dir): """ Generate Roundup tracker import files based on the tracker schema, sf.net xml export and downloaded files from sf.net. """ tracker = instance.open(tracker_home) db = tracker.open('admin') resolved = db.status.lookup('resolved') unread = db.status.lookup('unread') chatting = db.status.lookup('unread') critical = db.priority.lookup('critical') urgent = db.priority.lookup('urgent') bug = db.priority.lookup('bug') feature = db.priority.lookup('feature') wish = db.priority.lookup('wish') adminuid = db.user.lookup('admin') anonuid = db.user.lookup('anonymous') root = ElementTree.parse(xml_file).getroot() def to_date(ts): return date.Date(time.gmtime(float(ts))) # parse out the XML artifacts = [] categories = set() users = set() add_files = set() remove_files = set() for artifact in root.find('artifacts'): d = {} op = {} artifacts.append(d) for field in artifact.findall('field'): name = field.get('name') if name == 'artifact_messages': for message in field.findall('message'): l = d.setdefault('messages', []) m = {} l.append(m) for field in message.findall('field'): name = field.get('name') if name == 'adddate': m[name] = to_date(field.text) else: m[name] = field.text if name == 'user_name': users.add(field.text) elif name == 'artifact_history': for event in field.findall('history'): l = d.setdefault('history', []) e = {} l.append(e) for field in event.findall('field'): name = field.get('name') if name == 'entrydate': e[name] = to_date(field.text) else: e[name] = field.text if name == 'mod_by': users.add(field.text) if e['field_name'] == 'File Added': add_files.add(e['old_value'].split(':')[0]) elif e['field_name'] == 'File Deleted': remove_files.add(e['old_value'].split(':')[0]) elif name == 'details': op['body'] = field.text elif name == 'submitted_by': op['user_name'] = field.text d[name] = field.text users.add(field.text) elif name == 'open_date': thedate = to_date(field.text) op['adddate'] = thedate d[name] = thedate else: d[name] = field.text categories.add(d['category']) if op.has_key('body'): l = d.setdefault('messages', []) l.insert(0, op) add_files -= remove_files # create users userd = {'nobody': '2'} users.remove('nobody') data = [ {'id': '1', 'username': '******', 'password': password.Password('admin'), 'roles': 'Admin', 'address': '*****@*****.**'}, {'id': '2', 'username': '******', 'roles': 'Anonymous'}, ] for n, user in enumerate(list(users)): userd[user] = n+3 data.append({'id': str(n+3), 'username': user, 'roles': 'User', 'address': '*****@*****.**'%user}) write_csv(db.user, data) users=userd # create categories categoryd = {'None': None} categories.remove('None') data = [] for n, category in enumerate(list(categories)): categoryd[category] = n data.append({'id': str(n), 'name': category}) write_csv(db.keyword, data) categories = categoryd # create issues issue_data = [] file_data = [] message_data = [] issue_journal = [] message_id = 0 for artifact in artifacts: d = {} d['id'] = artifact['artifact_id'] d['title'] = artifact['summary'] d['assignedto'] = users[artifact['assigned_to']] if d['assignedto'] == '2': d['assignedto'] = None d['creation'] = artifact['open_date'] activity = artifact['open_date'] d['creator'] = users[artifact['submitted_by']] actor = d['creator'] if categories[artifact['category']]: d['keyword'] = [categories[artifact['category']]] issue_journal.append(( d['id'], d['creation'].get_tuple(), d['creator'], "'create'", {} )) p = int(artifact['priority']) if artifact['artifact_type'] == 'Feature Requests': if p > 3: d['priority'] = feature else: d['priority'] = wish else: if p > 7: d['priority'] = critical elif p > 5: d['priority'] = urgent elif p > 3: d['priority'] = bug else: d['priority'] = feature s = artifact['status'] if s == 'Closed': d['status'] = resolved elif s == 'Deleted': d['status'] = resolved d['is retired'] = True else: d['status'] = unread nosy = set() for message in artifact.get('messages', []): authid = users[message['user_name']] if not message['body']: continue body = convert_message(message['body'], message_id) if not body: continue m = {'content': body, 'author': authid, 'date': message['adddate'], 'creation': message['adddate'], } message_data.append(m) if authid not in (None, '2'): nosy.add(authid) activity = message['adddate'] actor = authid if d['status'] == unread: d['status'] = chatting # add import message m = {'content': 'IMPORT FROM SOURCEFORGE', 'author': '1', 'date': today, 'creation': today} message_data.append(m) # sort messages and assign ids d['messages'] = [] message_data.sort(lambda a,b:cmp(a['date'],b['date'])) for message in message_data: message_id += 1 message['id'] = str(message_id) d['messages'].append(message_id) d['nosy'] = list(nosy) files = [] for event in artifact.get('history', []): if event['field_name'] == 'File Added': fid, name = event['old_value'].split(':', 1) if fid in add_files: files.append(fid) name = name.strip() try: f = open(os.path.join(file_dir, fid)) content = f.read() f.close() except: content = 'content missing' file_data.append({ 'id': fid, 'creation': event['entrydate'], 'creator': users[event['mod_by']], 'name': name, 'type': mimetypes.guess_type(name)[0], 'content': content, }) continue elif event['field_name'] == 'close_date': action = "'set'" info = { 'status': unread } elif event['field_name'] == 'summary': action = "'set'" info = { 'title': event['old_value'] } else: # not an interesting / translatable event continue row = [ d['id'], event['entrydate'].get_tuple(), users[event['mod_by']], action, info ] if event['entrydate'] > activity: activity = event['entrydate'] issue_journal.append(row) d['files'] = files d['activity'] = activity d['actor'] = actor issue_data.append(d) write_csv(db.issue, issue_data) write_csv(db.msg, message_data) write_csv(db.file, file_data) f = open('/tmp/imported/issue-journals.csv', 'w') writer = csv.writer(f, colon_separated) writer.writerows(issue_journal) f.close()
def main(argv): '''Handle the arguments to the program and initialise environment. ''' # take the argv array and parse it leaving the non-option # arguments in the args array. try: optionsList, args = getopt.getopt(argv[1:], 'vc:C:S:', ['set=', 'class=']) except getopt.GetoptError: # print help information and exit: usage(argv) sys.exit(2) for (opt, arg) in optionsList: if opt == '-v': print '%s (python %s)'%(roundup_version, sys.version.split()[0]) return # figure the instance home if len(args) > 0: instance_home = args[0] else: instance_home = os.environ.get('ROUNDUP_INSTANCE', '') if not (instance_home and os.path.isdir(instance_home)): return usage(argv) # get the instance import roundup.instance instance = roundup.instance.open(instance_home) # get a mail handler db = instance.open('admin') # now wrap in try/finally so we always close the database try: if hasattr(instance, 'MailGW'): handler = instance.MailGW(instance, db, optionsList) else: handler = mailgw.MailGW(instance, db, optionsList) # if there's no more arguments, read a single message from stdin if len(args) == 1: return handler.do_pipe() # otherwise, figure what sort of mail source to handle if len(args) < 3: return usage(argv, _('Error: not enough source specification information')) source, specification = args[1:3] # time out net connections after a minute if we can if source not in ('mailbox', 'imaps'): if hasattr(socket, 'setdefaulttimeout'): socket.setdefaulttimeout(60) if source == 'mailbox': return handler.do_mailbox(specification) elif source == 'pop' or source == 'pops': m = re.match(r'((?P<user>[^:]+)(:(?P<pass>.+))?@)?(?P<server>.+)', specification) if m: ssl = source.endswith('s') if ssl and sys.version_info<(2,4): return usage(argv, _('Error: a later version of python is required')) return handler.do_pop(m.group('server'), m.group('user'), m.group('pass'),ssl) return usage(argv, _('Error: pop specification not valid')) elif source == 'apop': m = re.match(r'((?P<user>[^:]+)(:(?P<pass>.+))?@)?(?P<server>.+)', specification) if m: return handler.do_apop(m.group('server'), m.group('user'), m.group('pass')) return usage(argv, _('Error: apop specification not valid')) elif source == 'imap' or source == 'imaps': m = re.match(r'((?P<user>[^:]+)(:(?P<pass>.+))?@)?(?P<server>.+)', specification) if m: ssl = source.endswith('s') mailbox = '' if len(args) > 3: mailbox = args[3] return handler.do_imap(m.group('server'), m.group('user'), m.group('pass'), mailbox, ssl) return usage(argv, _('Error: The source must be either "mailbox",' ' "pop", "apop", "imap" or "imaps"')) finally: # handler might have closed the initial db and opened a new one handler.db.close()
#!/usr/bin/python # -*- coding: iso-8859-1 -*- import sys import os import re from roundup import date from roundup import instance from roundup.password import Password, encodePassword dir = os.getcwd() tracker = instance.open(dir) db = tracker.open('admin') name_re = re.compile("^[0-9a-zA-Z/]+$") ref_re = re.compile("^[-0-9a-zA-Z/]+$") for id in db.department.getnodeids(retired=False): name = db.department.get(id, 'doc_num') if name and not name_re.match(name): print >> sys.stderr, "Wrong name for department %s: %s" % (id, name) for cl, regex in \ (db.product_type, name_re), (db.reference, ref_re), (db.artefact, name_re) : for id in cl.getnodeids(retired=False): name = cl.get(id, 'name') if not name: print >> sys.stderr, "Wrong name (empty or None) for %s %s" \ % (cl.classname, id) continue if not regex.match(name): print >> sys.stderr, \ "Wrong name for %s %s: %s" % (cl.classname, id, name)
import os import roundup from roundup import instance HOME = os.path.dirname(os.path.abspath(__file__)) ROOT = os.path.dirname(HOME) TRACKER_HOME = os.path.join(ROOT,"trackers") trackers = ( dict(name="gsib",url="gsib/", tracker=instance.open(os.path.join(TRACKER_HOME,"tracker"))), dict(name="demo",url="demo/",tracker=instance.open(os.path.join(TRACKER_HOME,"demo"))), )