def testEmptyPasswordNotSet(self): nodeid = self.db.user.create(username='******', password=password.Password('foo')) self.assertEqual(self.parseForm({'password': ''}, 'user', nodeid), ({('user', nodeid): {}}, [])) nodeid = self.db.user.create(username='******', password=password.Password('foo')) self.assertEqual(self.parseForm({'password': '', ':confirm:password': ''}, 'user', nodeid), ({('user', nodeid): {}}, []))
def setUp(self): self.dirname = '_test_xmlrpc' # set up and open a tracker self.instance = db_test_base.setupTracker(self.dirname, self.backend) # open the database self.db = self.instance.open('admin') print("props_only default", self.db.security.get_props_only_default()) # Get user id (user4 maybe). Used later to get data from db. self.joeid = 'user' + self.db.user.create( username='******', password=password.Password('random'), address='*****@*****.**', realname='Joe Random', roles='User') self.db.commit() self.db.close() self.db = self.instance.open('joe') self.db.tx_Source = 'web' self.db.issue.addprop(tx_Source=hyperdb.String()) self.db.msg.addprop(tx_Source=hyperdb.String()) self.db.post_init() tx_Source_init(self.db) self.server = RoundupInstance(self.db, self.instance.actions, None)
def handle(self): ''' Perform some action. No return value is required. ''' db = self.client.db if not self.client.session_api.get('oauth_token'): print 'oath is %s' % (self.client.session_api.get('oauth_token')) else: print 'Do stuff if ' print 'oath is %s' % (self.client.session_api.get('oauth_token')) # print db.config.ext['QA_RECIPIENTS'] # print type(self.db.config.ext['DATAUG_OAUTH2_SCOPE'].split(",")) # print self.db.config.ext['DATAUG_OAUTH2_CLIENT_ID'] # print self.db.config.ext['DATAUG_OAUTH2_AUTHORIZATION_BASE_URL'] print db.user.list() today = date.Date() props = {'username':'******' + str(today)} passwd = password.generatePassword(100) # print str(today) print props userid = db.user.create(**props) self.db.user.set(userid, roles=self.db.config['NEW_WEB_USER_ROLES']) self.db.user.set(userid, password=password.Password(passwd)) db.commit() print "New password%s " % (password.generatePassword(100)) print "%s?@action=login1" % (self.client.base) print "db classname %s type %s" % (db.user.__class__.__name__, type(db.user)) self.client.add_ok_message(self._('You are loggedsxds out'))
def testPasswordMigration(self): chef = self.db.user.lookup('Chef') form = dict(__login_name='Chef', __login_password='******') cl = self._make_client(form) # assume that the "best" algorithm is the first one and doesn't # need migration, all others should be migrated. for scheme in password.Password.deprecated_schemes: if scheme == 'crypt' and os.name == 'nt': continue # crypt is not available on Windows pw1 = password.Password('foo', scheme=scheme) self.assertEqual(pw1.needs_migration(), True) self.db.user.set(chef, password=pw1) self.db.commit() actions.LoginAction(cl).handle() pw = self.db.user.get(chef, 'password') self.assertEqual(pw, 'foo') self.assertEqual(pw.needs_migration(), False) pw1 = pw self.assertEqual(pw1.needs_migration(), False) scheme = password.Password.known_schemes[0] self.assertEqual(scheme, pw1.scheme) actions.LoginAction(cl).handle() pw = self.db.user.get(chef, 'password') self.assertEqual(pw, 'foo') self.assertEqual(pw, pw1) cl.db.close()
def confirm_registration(self, otk): props = self.getOTKManager().getall(otk) for propname, proptype in self.user.getprops().items(): value = props.get(propname, None) if value is None: pass elif isinstance(proptype, hyperdb.Date): props[propname] = date.Date(value) elif isinstance(proptype, hyperdb.Interval): props[propname] = date.Interval(value) elif isinstance(proptype, hyperdb.Password): props[propname] = password.Password(encrypted=value) # tag new user creation with 'admin' self.journaltag = 'admin' # create the new user cl = self.user props['roles'] = self.config.NEW_WEB_USER_ROLES userid = cl.create(**props) # clear the props from the otk database self.getOTKManager().destroy(otk) self.commit() return userid
def testPasswordConfigOption(self): chef = self.db.user.lookup('Chef') form = dict(__login_name='Chef', __login_password='******') cl = self._make_client(form) self.db.config.PASSWORD_PBKDF2_DEFAULT_ROUNDS = 1000 pw1 = password.Password('foo', scheme='MD5') self.assertEqual(pw1.needs_migration(), True) self.db.user.set(chef, password=pw1) self.db.commit() actions.LoginAction(cl).handle() pw = self.db.user.get(chef, 'password') self.assertEqual('PBKDF2', pw.scheme) self.assertEqual(1000, password.pbkdf2_unpack(pw.password)[0]) cl.db.close()
def setup_class(cls): '''All tests in this class use the same roundup instance. This instance persists across all tests. Create the tracker dir here so that it is ready for the create_app() method to be called. ''' # tests in this class. # set up and open a tracker cls.instance = db_test_base.setupTracker(cls.dirname, cls.backend) # open the database cls.db = cls.instance.open('admin') # add a user without edit access for status. cls.db.user.create(username="******", roles='User', password=password.Password('sekrit'), address='*****@*****.**') # set the url the test instance will run at. cls.db.config['TRACKER_WEB'] = "http://localhost:9001/" # set up mailhost so errors get reported to debuging capture file cls.db.config.MAILHOST = "localhost" cls.db.config.MAIL_HOST = "localhost" cls.db.config.MAIL_DEBUG = "../_test_tracker_mail.log" # added to enable csrf forgeries/CORS to be tested cls.db.config.WEB_CSRF_ENFORCE_HEADER_ORIGIN = "required" cls.db.config.WEB_ALLOWED_API_ORIGINS = "https://client.com" cls.db.config['WEB_CSRF_ENFORCE_HEADER_X-REQUESTED-WITH'] = "required" # disable web login rate limiting. The fast rate of tests # causes them to trip the rate limit and fail. cls.db.config.WEB_LOGIN_ATTEMPTS_MIN = 0 # enable static precompressed files cls.db.config.WEB_USE_PRECOMPRESSED_FILES = 1 cls.db.config.save() cls.db.commit() cls.db.close() # Force locale config to find locales in checkout not in # installed directories cls.backup_domain = i18n.DOMAIN cls.backup_locale_dirs = i18n.LOCALE_DIRS i18n.LOCALE_DIRS = ['locale'] i18n.DOMAIN = ''
def setUp(self): self.dirname = '_test_xmlrpc' # set up and open a tracker self.instance = db_test_base.setupTracker(self.dirname, self.backend) # open the database self.db = self.instance.open('admin') self.joeid = 'user' + self.db.user.create(username='******', password=password.Password('random'), address='*****@*****.**', realname='Joe Random', roles='User') self.db.commit() self.db.close() self.server = RoundupServer(self.dirname)
def verifyPassword(self, userid, givenpw): '''Verify the password that the user has supplied. Optionally migrate to new password scheme if configured ''' db = self.db stored = db.user.get(userid, 'password') if givenpw == stored: if db.config.WEB_MIGRATE_PASSWORDS and stored.needs_migration(): newpw = password.Password(givenpw, config=db.config) db.user.set(userid, password=newpw) db.commit() return 1 if not givenpw and not stored: return 1 return 0
def confirm_registration(self, otk): props = self.getOTKManager().getall(otk) for propname, proptype in self.user.getprops().items(): value = props.get(propname, None) if value is None: pass elif isinstance(proptype, hyperdb.Date): props[propname] = date.Date(value) elif isinstance(proptype, hyperdb.Interval): props[propname] = date.Interval(value) elif isinstance(proptype, hyperdb.Password): props[propname] = password.Password(encrypted=value) # tag new user creation with 'admin' self.journaltag = 'admin' # create the new user cl = self.user props['roles'] = self.config.NEW_WEB_USER_ROLES try: # ASSUME:: ValueError raised during create due to key value # conflict. I an use message in exception to determine # when I should intercept the exception with a more # friendly error message. If i18n is used to translate # original exception message this will fail and translated # text (probably unfriendly) will be used. userid = cl.create(**props) except ValueError as e: username = props['username'] # Try to make error message less cryptic to the user. if str(e) == 'node with key "%s" exists' % username: raise ValueError(_("Username '%s' already exists." % username)) else: raise # clear the props from the otk database self.getOTKManager().destroy(otk) # commit cl.create (and otk changes) self.commit() return userid
def verifyLogin(self): """Fetching a protected resource using an OAuth 2 token. """ client_id = self.db.config.ext['OAUTH2_CLIENT_ID'] profile = self.db.config.ext['OAUTH2_PROFILE'] oauth_token = self.client.session_api.get('oauth_token') dataug = OAuth2Session(client_id, token=oauth_token) # hacky way to force the token to be sent in query. # Default way to to send it in header dataug._client.default_token_placement = 'query' remote_user = dataug.get(profile).json() try: userid = self.db.user.lookup(remote_user['login']) except KeyError: # create user props = {'username': remote_user['login']} userid = self.db.user.create(**props) self.db.user.set(userid, roles=self.db.config['NEW_WEB_USER_ROLES']) self.db.user.set(userid, address=remote_user['email']) passwd = password.generatePassword(100) self.db.user.set(userid, password=password.Password(passwd)) self.db.commit() except: raise exceptions.LoginError(self._('Invalid login')) # print type(myresponse) self.client.userid = userid # print 'My response: %s', myresponse self.client.user = self.db.user.get(self.client.userid, 'username') if not self.hasPermission("Web Access"): raise exceptions.LoginError( self._("You do not have permission to login"))
def add_attr_local_user(self, **props): """Add the attributes `props` for a user to the local database if those are still empty. If 'self.autocreate' is False then the user is considered a new user.""" props['password'] = PW.Password(props['password']) self.db.journaltag = 'admin' try: self.client.userid = self.db.user.lookup(self.client.user) # update the empty values with LDAP values uid = self.client.userid if self.autocreate: for pkey, prop in props.items(): try: self.LOG.debug("Look key '%s' for user '%s'", pkey, uid) value = self.db.user.get(uid, pkey) self.LOG.debug("Value %r for key,user '%s','%s'", value, pkey, uid) if not value: self.LOG.debug( "Set value %r for property %r of user \ '%s'", props[pkey], pkey, self.client.user) pair = {pkey: props[pkey]} self.db.user.set(uid, **pair) except Exception, err_msg: self.LOG.exception( "caught an exception, traceback follows.\ ..") except KeyError: # add new user to local database props['roles'] = self.db.config.NEW_WEB_USER_ROLES self.userid = self.db.user.create(**props) self.db.commit() ## ?? why do we re-read the userid ?? # self.client.userid = self.db.user.lookup(self.client.user) msg = u"New account created for user '%s'" % props['username'] self.LOG.debug(msg) self.client.ok_message.append(msg)
def install_demo(home, backend, template): """Install a demo tracker Parameters: home: tracker home directory path backend: database backend name template: tracker template """ from roundup import init, instance, password # set up the config for this tracker template_dir = os.path.join('share', 'roundup', 'templates', template) # Load optional override ini file. Missing ini file is ignored. template_cfg = configuration.UserConfig(template_dir + "/config_ini.ini") config = configuration.CoreConfig( settings={i.name: i.get() for i in template_cfg.items()}) config['TRACKER_HOME'] = home config['MAIL_DOMAIN'] = 'localhost' config['DATABASE'] = 'db' config['WEB_DEBUG'] = True if backend in ('mysql', 'postgresql'): config['RDBMS_HOST'] = 'localhost' config['RDBMS_USER'] = '******' config['RDBMS_PASSWORD'] = '******' config['RDBMS_NAME'] = 'rounduptest' config['RDBMS_BACKEND'] = backend # see if we need to clean up existing directory if os.path.exists(home): if os.path.exists(home + '/config.ini'): # clear everything out to avoid conflicts with former # extensions and detectors print("Nuking directory left from the previous demo instance.") shutil.rmtree(home) else: print("Error: Refusing to nuke non-tracker directory:") print(" %s" % home) sys.exit(1) init.install(home, template_dir) # Remove config_ini.ini file from tracker_home (not template dir). # Ignore file not found - not all templates have # config_ini.ini files. try: os.remove(home + "/config_ini.ini") except OSError as e: # FileNotFound exception under py3 if e.errno == 2: pass else: raise # don't have email flying around nosyreaction = os.path.join(home, 'detectors', 'nosyreaction.py') if os.path.exists(nosyreaction): os.remove(nosyreaction) nosyreaction += 'c' if os.path.exists(nosyreaction): os.remove(nosyreaction) # figure basic params for server hostname = 'localhost' # pick a fairly odd, random port port = 8917 while 1: print('Trying to set up web server on port %d ...' % port, ) s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) try: s.connect((hostname, port)) except socket.error as e: if not hasattr(e, 'args') or e.args[0] != errno.ECONNREFUSED: raise print('should be ok.') break else: s.close() print('already in use.') port += 100 config['TRACKER_WEB'] = 'http://%s:%s/demo/' % (hostname, port) # write the config config['INSTANT_REGISTRATION'] = 1 config.save(os.path.join(home, config.INI_FILE)) # open the tracker and initialise tracker = instance.open(home) tracker.init(password.Password('admin')) # add the "demo" user db = tracker.open('admin') # FIXME: Move tracker-specific demo initialization into the tracker # templates. if os.path.basename(template) == 'minimal': db.user.create(username='******', password=password.Password('demo'), roles='User') else: db.user.create(username='******', password=password.Password('demo'), realname='Demo User', roles='User') db.commit() db.close()
from roundup import password, date pri = db.getclass('priority') pri.create(name='' "critical", order="1") pri.create(name='' "urgent", order="2") pri.create(name='' "bug", order="3") pri.create(name='' "feature", order="4") pri.create(name='' "wish", order="5") stat = db.getclass('status') stat.create(name='' "unread", order="1") stat.create(name='' "deferred", order="2") stat.create(name='' "chatting", order="3") stat.create(name='' "need-eg", order="4") stat.create(name='' "in-progress", order="5") stat.create(name='' "testing", order="6") stat.create(name='' "done-cbb", order="7") stat.create(name='' "resolved", order="8") user = db.getclass('user') user.create(username="******", password=adminpw, address=admin_email, roles='Admin') user.create(username="******", roles='Anonymous') user.create(username='******', password=password.Password('testuser'), realname='Test User', roles='User', address='*****@*****.**')
def create(journaltag, create=True, debug=False, prefix=default_prefix): # "Nuke" in-memory db db_nuke('') db = Database(new_config(debug), journaltag) # load standard schema if not prefix.startswith('/'): prefix = os.path.join(os.path.dirname(__file__), prefix) schema = os.path.join(prefix, 'schema.py') vars = hyperdb.__dict__ vars['Class'] = Class vars['FileClass'] = FileClass vars['IssueClass'] = IssueClass vars['db'] = db fd = open(schema) exec(compile(fd.read(), schema, 'exec'), vars) fd.close() initial_data = os.path.join(prefix, 'initial_data.py') vars = dict(db=db, admin_email='*****@*****.**', adminpw=password.Password('sekrit')) fd = open(initial_data) exec(compile(fd.read(), initial_data, 'exec'), vars) fd.close() # load standard detectors dirname = os.path.join(prefix, 'detectors') for fn in os.listdir(dirname): if not fn.endswith('.py'): continue vars = {} with open(os.path.join(dirname, fn)) as fd: exec(compile(fd.read(), os.path.join(dirname, fn), 'exec'), vars) vars['init'](db) tx_Source_init(db) ''' status = Class(db, "status", name=String()) status.setkey("name") priority = Class(db, "priority", name=String(), order=String()) priority.setkey("name") keyword = Class(db, "keyword", name=String(), order=String()) keyword.setkey("name") user = Class(db, "user", username=String(), password=Password(), assignable=Boolean(), age=Number(), roles=String(), address=String(), supervisor=Link('user'),realname=String(),alternate_addresses=String()) user.setkey("username") file = FileClass(db, "file", name=String(), type=String(), comment=String(indexme="yes"), fooz=Password()) file_nidx = FileClass(db, "file_nidx", content=String(indexme='no')) issue = IssueClass(db, "issue", title=String(indexme="yes"), status=Link("status"), nosy=Multilink("user"), deadline=Date(), foo=Interval(), files=Multilink("file"), assignedto=Link('user'), priority=Link('priority'), spam=Multilink('msg'), feedback=Link('msg')) stuff = Class(db, "stuff", stuff=String()) session = Class(db, 'session', title=String()) msg = FileClass(db, "msg", date=Date(), author=Link("user", do_journal='no'), files=Multilink('file'), inreplyto=String(), messageid=String(), summary=String(), content=String(), recipients=Multilink("user", do_journal='no') ) ''' if create: db.user.create(username="******", roles='User', password=password.Password('sekrit'), address='*****@*****.**') db.security.addPermissionToRole('User', 'Email Access') ''' db.security.addPermission(name='Register', klass='user') db.security.addPermissionToRole('User', 'Web Access') db.security.addPermissionToRole('Anonymous', 'Email Access') db.security.addPermissionToRole('Anonymous', 'Register', 'user') for cl in 'issue', 'file', 'msg', 'keyword': db.security.addPermissionToRole('User', 'View', cl) db.security.addPermissionToRole('User', 'Edit', cl) db.security.addPermissionToRole('User', 'Create', cl) for cl in 'priority', 'status': db.security.addPermissionToRole('User', 'View', cl) ''' return db
def handle(self): provider = select_provider(self.client.form) client = self.init_oic(provider) redirect_uri = self.redirect_uri(provider) client.redirect_uris = [redirect_uri] aresp = client.parse_response(AuthorizationResponse, info=self.client.env['QUERY_STRING'], sformat="urlencoded") args = { "code": aresp["code"], "redirect_uri": client.redirect_uris[0], "client_id": client.client_id, "client_secret": client.client_secret } resp = client.do_access_token_request( scope=self.scopes, state=aresp["state"], request_args=args, authn_method="client_secret_post", headers={"Accept": "application/json"}, ) if provider == PROVIDER_GITHUB: return self.on_github_response(client, resp) try: id_token = resp['id_token'] except KeyError: raise ValueError, "Missing id_token from provider" else: iss = id_token['iss'] if iss == 'accounts.google.com': iss = 'https://accounts.google.com' sub = id_token['sub'] # find user by iss and sub oic_account = self.db.oic_account.filter(None, { 'issuer': iss, 'subject': sub }) if oic_account: # there should be only one user with that ID assert len(oic_account) == 1 user = self.db.oic_account.get(oic_account[0], 'user') return self.login(user) try: openid_id = id_token['openid_id'] # find user by OpenID 2, then associate iss and sub user = self.db.user.filter(None, {'openids': openid_id}) if user: assert len(user) == 1 user = user[0] # store new oic credentials for this user self.db.oic_account.create(user=user, issuer=iss, subject=sub) # delete openid openids = self.db.user.get(user, 'openids').split() openids.remove(openid_id) self.db.user.set(user, openids=' '.join(openids)) # commit and log in self.db.commit() return self.login(user) except KeyError: # no OpenID 2 migration pass # New user, request info from provider # XXX Google insists on GET userinfo = client.do_user_info_request(method="GET", state=aresp["state"]) name = userinfo['name'].encode('utf-8') email = userinfo['email'].encode('utf-8') email_verified = userinfo['email_verified'] # If email is verified and there is already an account with the same # email, try to associate it. Otherwise, avoid creation of duplicate # accounts. users = self.db.user.filter(None, {'address': email}) if users: if len(users) == 1 and email_verified: user = users[0] self.db.oic_account.create(user=user, issuer=iss, subject=sub) self.db.commit() self.client.add_ok_message( 'You account has been successfully associated with your Google ' 'account.') return self.login(user) else: raise ValueError('There is already an account for %s' % email) # Look for unused account name initial_username = email.split("@")[0] username = initial_username suffix = 1 while True: user = self.db.user.filter(None, {'username': username}) if not user: break suffix += 1 username = initial_username + str(suffix) # create account if email_verified: pw = password.Password(password.generatePassword()) user = self.db.user.create( username=username, realname=name, password=pw, roles=self.db.config['NEW_WEB_USER_ROLES'], address=email) self.db.oic_account.create(user=user, issuer=iss, subject=sub) # complete login self.db.commit() return self.login(user) # email not verified: fail # In principle, it should be possible to do a email confirmation here # See previous versions of this file for an attempt to do so # However, the confrego action does not support preserving the OIC parameters, # as they live in a different table. This could be fixed by using an alternative # confirmation action. Doing so is deferred until need arises raise ValueError, "Your OpenID Connect account is not supported. Please contact [email protected]"
def handle(self): """Performs an edit of all of a class' items in one go. The "rows" CGI var defines the CSV-formatted entries for the class. New nodes are identified by the ID 'X' (or any other non-existent ID) and removed lines are retired. """ cl = self.db.classes[self.classname] idlessprops = cl.getprops(protected=0).keys() idlessprops.sort() props = ['id'] + idlessprops # do the edit rows = StringIO.StringIO(self.form['rows'].value) reader = csv.reader(rows) found = {} line = 0 for values in reader: line += 1 if line == 1: continue # skip property names header if values == props: continue # extract the nodeid nodeid, values = values[0], values[1:] found[nodeid] = 1 # see if the node exists if nodeid in ('x', 'X') or not cl.hasnode(nodeid): exists = 0 else: exists = 1 # confirm correct weight if len(idlessprops) != len(values): self.client.error_message.append( self._('Not enough values on line %(line)s') % {'line': line}) return # extract the new values d = {} for name, value in zip(idlessprops, values): prop = cl.properties[name] value = value.strip() # only add the property if it has a value if value: # if it's a multilink, split it if isinstance(prop, hyperdb.Multilink): value = value.split(':') elif isinstance(prop, hyperdb.Password): value = password.Password(value) elif isinstance(prop, hyperdb.Interval): value = date.Interval(value) elif isinstance(prop, hyperdb.Date): value = date.Date(value) elif isinstance(prop, hyperdb.Boolean): value = value.lower() in ('yes', 'true', 'on', '1') elif isinstance(prop, hyperdb.Number): value = float(value) d[name] = value elif exists: # nuke the existing value if isinstance(prop, hyperdb.Multilink): d[name] = [] else: d[name] = None # perform the edit if exists: # edit existing cl.set(nodeid, **d) else: # new node found[cl.create(**d)] = 1 # retire the removed entries for nodeid in cl.list(): if not found.has_key(nodeid): cl.retire(nodeid) # all OK self.db.commit() self.client.ok_message.append(self._('Items edited OK'))
def parse(self, create=0, num_re=re.compile(r'^\d+$')): """ Item properties and their values are edited with html FORM variables and their values. You can: - Change the value of some property of the current item. - Create a new item of any class, and edit the new item's properties, - Attach newly created items to a multilink property of the current item. - Remove items from a multilink property of the current item. - Specify that some properties are required for the edit operation to be successful. In the following, <bracketed> values are variable, "@" may be either ":" or "@", and other text "required" is fixed. Most properties are specified as form variables: <propname> - property on the current context item <designator>"@"<propname> - property on the indicated item (for editing related information) Designators name a specific item of a class. <classname><N> Name an existing item of class <classname>. <classname>"-"<N> Name the <N>th new item of class <classname>. If the form submission is successful, a new item of <classname> is created. Within the submitted form, a particular designator of this form always refers to the same new item. Once we have determined the "propname", we look at it to see if it's special: @required The associated form value is a comma-separated list of property names that must be specified when the form is submitted for the edit operation to succeed. When the <designator> is missing, the properties are for the current context item. When <designator> is present, they are for the item specified by <designator>. The "@required" specifier must come before any of the properties it refers to are assigned in the form. @remove@<propname>=id(s) or @add@<propname>=id(s) The "@add@" and "@remove@" edit actions apply only to Multilink properties. The form value must be a comma-separate list of keys for the class specified by the simple form variable. The listed items are added to (respectively, removed from) the specified property. @link@<propname>=<designator> If the edit action is "@link@", the simple form variable must specify a Link or Multilink property. The form value is a comma-separated list of designators. The item corresponding to each designator is linked to the property given by simple form variable. These are collected up and returned in all_links. None of the above (ie. just a simple form value) The value of the form variable is converted appropriately, depending on the type of the property. For a Link('klass') property, the form value is a single key for 'klass', where the key field is specified in dbinit.py. For a Multilink('klass') property, the form value is a comma-separated list of keys for 'klass', where the key field is specified in dbinit.py. Note that for simple-form-variables specifiying Link and Multilink properties, the linked-to class must have a key field. For a String() property specifying a filename, the file named by the form value is uploaded. This means we try to set additional properties "filename" and "type" (if they are valid for the class). Otherwise, the property is set to the form value. For Date(), Interval(), Boolean(), and Number(), Integer() properties, the form value is converted to the appropriate Any of the form variables may be prefixed with a classname or designator. Two special form values are supported for backwards compatibility: @note This is equivalent to:: @link@messages=msg-1 msg-1@content=value except that in addition, the "author" and "date" properties of "msg-1" are set to the userid of the submitter, and the current time, respectively. @file This is equivalent to:: @link@files=file-1 file-1@content=value The String content value is handled as described above for file uploads. If "multiple" is turned on for file uploads in the html template, multiple links are generated:: @link@files=file-2 file-2@content=value ... depending on how many files the user has attached. If both the "@note" and "@file" form variables are specified, the action:: @link@msg-1@files=file-1 is also performed. If "multiple" is specified this is carried out for each of the attached files. We also check that FileClass items have a "content" property with actual content, otherwise we remove them from all_props before returning. The return from this method is a dict of (classname, id): properties ... this dict _always_ has an entry for the current context, even if it's empty (ie. a submission for an existing issue that doesn't result in any changes would return {('issue','123'): {}}) The id may be None, which indicates that an item should be created. """ # some very useful variables db = self.db form = self.form if not hasattr(self, 'FV_SPECIAL'): # generate the regexp for handling special form values classes = '|'.join(db.classes.keys()) # specials for parsePropsFromForm # handle the various forms (see unit tests) self.FV_SPECIAL = re.compile(self.FV_LABELS % classes, re.VERBOSE) self.FV_DESIGNATOR = re.compile(r'(%s)([-\d]+)' % classes) # these indicate the default class / item default_cn = self.classname default_cl = self.db.classes[default_cn] default_nodeid = self.nodeid # we'll store info about the individual class/item edit in these all_required = {} # required props per class/item all_props = {} # props to set per class/item got_props = {} # props received per class/item all_propdef = {} # note - only one entry per class all_links = [] # as many as are required # we should always return something, even empty, for the context all_props[(default_cn, default_nodeid)] = {} keys = form.keys() timezone = db.getUserTimezone() # sentinels for the :note and :file props have_note = have_file = 0 # extract the usable form labels from the form matches = [] for key in keys: m = self.FV_SPECIAL.match(key) if m: matches.append((key, m.groupdict())) # now handle the matches for key, d in matches: if d['classname']: # we got a designator cn = d['classname'] cl = self.db.classes[cn] nodeid = d['id'] propname = d['propname'] elif d['note']: # the special note field cn = 'msg' cl = self.db.classes[cn] nodeid = '-1' propname = 'content' all_links.append( (default_cn, default_nodeid, 'messages', [('msg', '-1')])) have_note = 1 elif d['file']: # the special file field cn = default_cn cl = default_cl nodeid = default_nodeid propname = 'files' else: # default cn = default_cn cl = default_cl nodeid = default_nodeid propname = d['propname'] # the thing this value relates to is... this = (cn, nodeid) # skip implicit create if this isn't a create action if not create and nodeid is None: continue # get more info about the class, and the current set of # form props for it if cn not in all_propdef: all_propdef[cn] = cl.getprops() propdef = all_propdef[cn] if this not in all_props: all_props[this] = {} props = all_props[this] if this not in got_props: got_props[this] = {} # is this a link command? if d['link']: value = [] for entry in self.extractFormList(form[key]): m = self.FV_DESIGNATOR.match(entry) if not m: raise FormError( self._('link "%(key)s" ' 'value "%(entry)s" not a designator') % locals()) value.append((m.group(1), m.group(2))) # get details of linked class lcn = m.group(1) lcl = self.db.classes[lcn] lnodeid = m.group(2) if lcn not in all_propdef: all_propdef[lcn] = lcl.getprops() if (lcn, lnodeid) not in all_props: all_props[(lcn, lnodeid)] = {} if (lcn, lnodeid) not in got_props: got_props[(lcn, lnodeid)] = {} # make sure the link property is valid if (not isinstance(propdef[propname], hyperdb.Multilink) and not isinstance(propdef[propname], hyperdb.Link)): raise FormError( self._('%(class)s %(property)s ' 'is not a link or multilink property') % { 'class': cn, 'property': propname }) all_links.append((cn, nodeid, propname, value)) continue # detect the special ":required" variable if d['required']: for entry in self.extractFormList(form[key]): m = self.FV_SPECIAL.match(entry) if not m: raise FormError( self._('The form action claims to ' 'require property "%(property)s" ' 'which doesn\'t exist') % {'property': propname}) if m.group('classname'): this = (m.group('classname'), m.group('id')) entry = m.group('propname') if this not in all_required: all_required[this] = [] all_required[this].append(entry) continue # see if we're performing a special multilink action mlaction = 'set' if d['remove']: mlaction = 'remove' elif d['add']: mlaction = 'add' # does the property exist? if propname not in propdef: if mlaction != 'set': raise FormError( self._('You have submitted a %(action)s ' 'action for the property "%(property)s" ' 'which doesn\'t exist') % { 'action': mlaction, 'property': propname }) # the form element is probably just something we don't care # about - ignore it continue proptype = propdef[propname] # Get the form value. This value may be a MiniFieldStorage # or a list of MiniFieldStorages. value = form[key] # handle unpacking of the MiniFieldStorage / list form value if d['file']: assert isinstance(proptype, hyperdb.Multilink) # value is a file upload... we *always* handle multiple # files here (html5) if not isinstance(value, type([])): value = [value] elif isinstance(proptype, hyperdb.Multilink): value = self.extractFormList(value) else: # multiple values are not OK if isinstance(value, type([])): raise FormError( self._('You have submitted more than one ' 'value for the %s property') % propname) # value might be a single file upload if not getattr(value, 'filename', None): value = value.value.strip() # now that we have the props field, we need a teensy little # extra bit of help for the old :note field... if d['note'] and value: props['author'] = self.db.getuid() props['date'] = date.Date() # handle by type now if isinstance(proptype, hyperdb.Password): if not value: # ignore empty password values continue if d['confirm']: # ignore the "confirm" password value by itself continue for key, d in matches: if d['confirm'] and d['propname'] == propname: confirm = form[key] break else: raise FormError( self._('Password and confirmation text ' 'do not match')) if isinstance(confirm, type([])): raise FormError( self._('You have submitted more than one ' 'value for the %s property') % propname) if value != confirm.value: raise FormError( self._('Password and confirmation text ' 'do not match')) try: value = password.Password(value, scheme=proptype.scheme, config=self.db.config) except hyperdb.HyperdbValueError as msg: raise FormError(msg) elif d['file']: # This needs to be a Multilink and is checked above fcn = 'file' fcl = self.db.classes[fcn] fpropname = 'content' if fcn not in all_propdef: all_propdef[fcn] = fcl.getprops() fpropdef = all_propdef[fcn] have_file = [] for n, v in enumerate(value): if not hasattr(v, 'filename'): raise FormError(self._('Not a file attachment')) # skip if the upload is empty if not v.filename: continue fnodeid = str(-(n + 1)) have_file.append(fnodeid) fthis = (fcn, fnodeid) if fthis not in all_props: all_props[fthis] = {} fprops = all_props[fthis] all_links.append( (cn, nodeid, 'files', [('file', fnodeid)])) fprops['content'] = self.parse_file(fpropdef, fprops, v) value = None nodeid = None elif isinstance(proptype, hyperdb.Multilink): # convert input to list of ids try: l = hyperdb.rawToHyperdb(self.db, cl, nodeid, propname, value) except hyperdb.HyperdbValueError as msg: raise FormError(msg) # now use that list of ids to modify the multilink if mlaction == 'set': value = l else: # we're modifying the list - get the current list of ids if propname in props: existing = props[propname] elif nodeid and not nodeid.startswith('-'): existing = cl.get(nodeid, propname, []) else: existing = [] # now either remove or add if mlaction == 'remove': # remove - handle situation where the id isn't in # the list for entry in l: try: existing.remove(entry) except ValueError: raise FormError( self._('property ' '"%(propname)s": "%(value)s" ' 'not currently in list') % { 'propname': propname, 'value': entry }) else: # add - easy, just don't dupe for entry in l: if entry not in existing: existing.append(entry) value = existing # Sort the value in the same order used by # Multilink.from_raw. value.sort(key=int) elif value == '' or value == b'': # other types should be None'd if there's no value value = None else: # handle all other types try: # Try handling file upload if (isinstance(proptype, hyperdb.String) and hasattr(value, 'filename') and value.filename is not None): value = self.parse_file(propdef, props, value) else: value = hyperdb.rawToHyperdb(self.db, cl, nodeid, propname, value) except hyperdb.HyperdbValueError as msg: raise FormError(msg) # register that we got this property if isinstance(proptype, hyperdb.Multilink): if value != []: got_props[this][propname] = 1 elif value is not None: got_props[this][propname] = 1 # get the old value if nodeid and not nodeid.startswith('-'): try: existing = cl.get(nodeid, propname) except KeyError: # this might be a new property for which there is # no existing value if propname not in propdef: raise except IndexError as message: raise FormError(str(message)) # make sure the existing multilink is sorted. We must # be sure to use the same sort order in all places, # since we want to compare values with "=" or "!=". # The canonical order (given in Multilink.from_raw) is # by the numeric value of the IDs. if isinstance(proptype, hyperdb.Multilink): existing.sort(key=int) # "missing" existing values may not be None if not existing: if isinstance(proptype, hyperdb.String): # some backends store "missing" Strings as # empty strings if existing == self.db.BACKEND_MISSING_STRING: existing = None elif isinstance(proptype, hyperdb.Number) or \ isinstance(proptype, hyperdb.Integer): # some backends store "missing" Numbers as 0 :( if existing == self.db.BACKEND_MISSING_NUMBER: existing = None elif isinstance(proptype, hyperdb.Boolean): # likewise Booleans if existing == self.db.BACKEND_MISSING_BOOLEAN: existing = None # if changed, set it if value != existing: props[propname] = value else: # don't bother setting empty/unset values if value is None: continue elif isinstance(proptype, hyperdb.Multilink) and value == []: continue elif isinstance(proptype, hyperdb.String) and value == '': continue props[propname] = value # check to see if we need to specially link files to the note if have_note and have_file: for fid in have_file: all_links.append(('msg', '-1', 'files', [('file', fid)])) # see if all the required properties have been supplied s = [] for thing, required in all_required.items(): # register the values we got got = got_props.get(thing, {}) for entry in required[:]: if entry in got: required.remove(entry) # If a user doesn't have edit permission for a given property, # but the property is already set in the database, we don't # require a value. if not (create or nodeid is None): for entry in required[:]: if not self.db.security.hasPermission( 'Edit', self.client.userid, self.classname, entry): cl = self.db.classes[self.classname] if cl.get(nodeid, entry) is not None: required.remove(entry) # any required values not present? if not required: continue # tell the user to entry the values required s.append( self.ngettext( 'Required %(class)s property %(property)s not supplied', 'Required %(class)s properties %(property)s not supplied', len(required)) % { 'class': self._(thing[0]), 'property': ', '.join(map(self.gettext, required)) }) if s: raise FormError('\n'.join(s)) # When creating a FileClass node, it should have a non-empty content # property to be created. When editing a FileClass node, it should # either have a non-empty content property or no property at all. In # the latter case, nothing will change. for (cn, id), props in list(all_props.items()): if id is not None and id.startswith('-') and not props: # new item (any class) with no content - ignore del all_props[(cn, id)] elif isinstance(self.db.classes[cn], hyperdb.FileClass): # three cases: # id references existng file. If content is empty, # remove content from form so we don't wipe # existing file contents. # id is -1, -2 ... I.E. a new file. # if content is not defined remove all fields that # reference that file. # if content is defined, let it pass through even if # content is empty. Yes people can upload/create # empty files. if 'content' in props: if id is not None and \ not id.startswith('-') and \ not props['content']: # This is an existing file with emtpy content # value in the form. del props['content'] else: # this is a new file without any content property. if id is not None and id.startswith('-'): del all_props[(cn, id)] # if this is a new file with content (even 0 length content) # allow it through and create the zero length file. return all_props, all_links
def handle(self): """Performs an edit of all of a class' items in one go. The "rows" CGI var defines the CSV-formatted entries for the class. New nodes are identified by the ID 'X' (or any other non-existent ID) and removed lines are retired. """ # ensure modification comes via POST if self.client.env['REQUEST_METHOD'] != 'POST': raise roundup.exceptions.Reject(self._('Invalid request')) # figure the properties list for the class cl = self.db.classes[self.classname] props_without_id = list(cl.getprops(protected=0)) # the incoming CSV data will always have the properties in colums # sorted and starting with the "id" column props_without_id.sort() props = ['id'] + props_without_id # do the edit rows = io_.BytesIO(self.form['rows'].value) reader = csv.reader(rows) found = {} line = 0 for values in reader: line += 1 if line == 1: continue # skip property names header if values == props: continue # extract the itemid itemid, values = values[0], values[1:] found[itemid] = 1 # see if the node exists if itemid in ('x', 'X') or not cl.hasnode(itemid): exists = 0 # check permission to create this item if not self.hasPermission('Create', classname=self.classname): raise exceptions.Unauthorised( self._( 'You do not have permission to create %(class)s') % {'class': self.classname}) elif cl.hasnode(itemid) and cl.is_retired(itemid): # If a CSV line just mentions an id and the corresponding # item is retired, then the item is restored. cl.restore(itemid) continue else: exists = 1 # confirm correct weight if len(props_without_id) != len(values): self.client.add_error_message( self._('Not enough values on line %(line)s') % {'line': line}) return # extract the new values d = {} for name, value in zip(props_without_id, values): # check permission to edit this property on this item if exists and not self.hasPermission('Edit', itemid=itemid, classname=self.classname, property=name): raise exceptions.Unauthorised( self._('You do not have permission to edit %(class)s') % {'class': self.classname}) prop = cl.properties[name] value = value.strip() # only add the property if it has a value if value: # if it's a multilink, split it if isinstance(prop, hyperdb.Multilink): value = value.split(':') elif isinstance(prop, hyperdb.Password): value = password.Password(value, config=self.db.config) elif isinstance(prop, hyperdb.Interval): value = date.Interval(value) elif isinstance(prop, hyperdb.Date): value = date.Date(value) elif isinstance(prop, hyperdb.Boolean): value = value.lower() in ('yes', 'true', 'on', '1') elif isinstance(prop, hyperdb.Number): value = float(value) d[name] = value elif exists: # nuke the existing value if isinstance(prop, hyperdb.Multilink): d[name] = [] else: d[name] = None # perform the edit if exists: # edit existing cl.set(itemid, **d) else: # new node found[cl.create(**d)] = 1 # retire the removed entries for itemid in cl.list(): if itemid not in found: # check permission to retire this item if not self.hasPermission( 'Retire', itemid=itemid, classname=self.classname): raise exceptions.Unauthorised( self._( 'You do not have permission to retire %(class)s') % {'class': self.classname}) cl.retire(itemid) # all OK self.db.commit() self.client.add_ok_message(self._('Items edited OK'))
def parse(self, create=0, num_re=re.compile('^\d+$')): """ Item properties and their values are edited with html FORM variables and their values. You can: - Change the value of some property of the current item. - Create a new item of any class, and edit the new item's properties, - Attach newly created items to a multilink property of the current item. - Remove items from a multilink property of the current item. - Specify that some properties are required for the edit operation to be successful. In the following, <bracketed> values are variable, "@" may be either ":" or "@", and other text "required" is fixed. Most properties are specified as form variables: <propname> - property on the current context item <designator>"@"<propname> - property on the indicated item (for editing related information) Designators name a specific item of a class. <classname><N> Name an existing item of class <classname>. <classname>"-"<N> Name the <N>th new item of class <classname>. If the form submission is successful, a new item of <classname> is created. Within the submitted form, a particular designator of this form always refers to the same new item. Once we have determined the "propname", we look at it to see if it's special: @required The associated form value is a comma-separated list of property names that must be specified when the form is submitted for the edit operation to succeed. When the <designator> is missing, the properties are for the current context item. When <designator> is present, they are for the item specified by <designator>. The "@required" specifier must come before any of the properties it refers to are assigned in the form. @remove@<propname>=id(s) or @add@<propname>=id(s) The "@add@" and "@remove@" edit actions apply only to Multilink properties. The form value must be a comma-separate list of keys for the class specified by the simple form variable. The listed items are added to (respectively, removed from) the specified property. @link@<propname>=<designator> If the edit action is "@link@", the simple form variable must specify a Link or Multilink property. The form value is a comma-separated list of designators. The item corresponding to each designator is linked to the property given by simple form variable. These are collected up and returned in all_links. None of the above (ie. just a simple form value) The value of the form variable is converted appropriately, depending on the type of the property. For a Link('klass') property, the form value is a single key for 'klass', where the key field is specified in dbinit.py. For a Multilink('klass') property, the form value is a comma-separated list of keys for 'klass', where the key field is specified in dbinit.py. Note that for simple-form-variables specifiying Link and Multilink properties, the linked-to class must have a key field. For a String() property specifying a filename, the file named by the form value is uploaded. This means we try to set additional properties "filename" and "type" (if they are valid for the class). Otherwise, the property is set to the form value. For Date(), Interval(), Boolean(), and Number() properties, the form value is converted to the appropriate Any of the form variables may be prefixed with a classname or designator. Two special form values are supported for backwards compatibility: @note This is equivalent to:: @link@messages=msg-1 msg-1@content=value except that in addition, the "author" and "date" properties of "msg-1" are set to the userid of the submitter, and the current time, respectively. @file This is equivalent to:: @link@files=file-1 file-1@content=value The String content value is handled as described above for file uploads. If both the "@note" and "@file" form variables are specified, the action:: @link@msg-1@files=file-1 is also performed. We also check that FileClass items have a "content" property with actual content, otherwise we remove them from all_props before returning. The return from this method is a dict of (classname, id): properties ... this dict _always_ has an entry for the current context, even if it's empty (ie. a submission for an existing issue that doesn't result in any changes would return {('issue','123'): {}}) The id may be None, which indicates that an item should be created. """ # some very useful variables db = self.db form = self.form if not hasattr(self, 'FV_SPECIAL'): # generate the regexp for handling special form values classes = '|'.join(db.classes.keys()) # specials for parsePropsFromForm # handle the various forms (see unit tests) self.FV_SPECIAL = re.compile(self.FV_LABELS%classes, re.VERBOSE) self.FV_DESIGNATOR = re.compile(r'(%s)([-\d]+)'%classes) # these indicate the default class / item default_cn = self.classname default_cl = self.db.classes[default_cn] default_nodeid = self.nodeid # we'll store info about the individual class/item edit in these all_required = {} # required props per class/item all_props = {} # props to set per class/item got_props = {} # props received per class/item all_propdef = {} # note - only one entry per class all_links = [] # as many as are required # we should always return something, even empty, for the context all_props[(default_cn, default_nodeid)] = {} keys = form.keys() timezone = db.getUserTimezone() # sentinels for the :note and :file props have_note = have_file = 0 # extract the usable form labels from the form matches = [] for key in keys: m = self.FV_SPECIAL.match(key) if m: matches.append((key, m.groupdict())) # now handle the matches for key, d in matches: if d['classname']: # we got a designator cn = d['classname'] cl = self.db.classes[cn] nodeid = d['id'] propname = d['propname'] elif d['note']: # the special note field cn = 'msg' cl = self.db.classes[cn] nodeid = '-1' propname = 'content' all_links.append((default_cn, default_nodeid, 'messages', [('msg', '-1')])) have_note = 1 elif d['file']: # the special file field cn = 'file' cl = self.db.classes[cn] nodeid = '-1' propname = 'content' all_links.append((default_cn, default_nodeid, 'files', [('file', '-1')])) have_file = 1 else: # default cn = default_cn cl = default_cl nodeid = default_nodeid propname = d['propname'] # the thing this value relates to is... this = (cn, nodeid) # skip implicit create if this isn't a create action if not create and nodeid is None: continue # get more info about the class, and the current set of # form props for it if not all_propdef.has_key(cn): all_propdef[cn] = cl.getprops() propdef = all_propdef[cn] if not all_props.has_key(this): all_props[this] = {} props = all_props[this] if not got_props.has_key(this): got_props[this] = {} # is this a link command? if d['link']: value = [] for entry in self.extractFormList(form[key]): m = self.FV_DESIGNATOR.match(entry) if not m: raise FormError, self._('link "%(key)s" ' 'value "%(entry)s" not a designator') % locals() value.append((m.group(1), m.group(2))) # get details of linked class lcn = m.group(1) lcl = self.db.classes[lcn] lnodeid = m.group(2) if not all_propdef.has_key(lcn): all_propdef[lcn] = lcl.getprops() if not all_props.has_key((lcn, lnodeid)): all_props[(lcn, lnodeid)] = {} if not got_props.has_key((lcn, lnodeid)): got_props[(lcn, lnodeid)] = {} # make sure the link property is valid if (not isinstance(propdef[propname], hyperdb.Multilink) and not isinstance(propdef[propname], hyperdb.Link)): raise FormError, self._('%(class)s %(property)s ' 'is not a link or multilink property') % { 'class':cn, 'property':propname} all_links.append((cn, nodeid, propname, value)) continue # detect the special ":required" variable if d['required']: for entry in self.extractFormList(form[key]): m = self.FV_SPECIAL.match(entry) if not m: raise FormError, self._('The form action claims to ' 'require property "%(property)s" ' 'which doesn\'t exist') % { 'property':propname} if m.group('classname'): this = (m.group('classname'), m.group('id')) entry = m.group('propname') if not all_required.has_key(this): all_required[this] = [] all_required[this].append(entry) continue # see if we're performing a special multilink action mlaction = 'set' if d['remove']: mlaction = 'remove' elif d['add']: mlaction = 'add' # does the property exist? if not propdef.has_key(propname): if mlaction != 'set': raise FormError, self._('You have submitted a %(action)s ' 'action for the property "%(property)s" ' 'which doesn\'t exist') % { 'action': mlaction, 'property':propname} # the form element is probably just something we don't care # about - ignore it continue proptype = propdef[propname] # Get the form value. This value may be a MiniFieldStorage # or a list of MiniFieldStorages. value = form[key] # handle unpacking of the MiniFieldStorage / list form value if isinstance(proptype, hyperdb.Multilink): value = self.extractFormList(value) else: # multiple values are not OK if isinstance(value, type([])): raise FormError, self._('You have submitted more than one ' 'value for the %s property') % propname # value might be a file upload... if not hasattr(value, 'filename') or value.filename is None: # nope, pull out the value and strip it value = value.value.strip() # now that we have the props field, we need a teensy little # extra bit of help for the old :note field... if d['note'] and value: props['author'] = self.db.getuid() props['date'] = date.Date() # handle by type now if isinstance(proptype, hyperdb.Password): if not value: # ignore empty password values continue for key, d in matches: if d['confirm'] and d['propname'] == propname: confirm = form[key] break else: raise FormError, self._('Password and confirmation text ' 'do not match') if isinstance(confirm, type([])): raise FormError, self._('You have submitted more than one ' 'value for the %s property') % propname if value != confirm.value: raise FormError, self._('Password and confirmation text ' 'do not match') try: value = password.Password(value) except hyperdb.HyperdbValueError, msg: raise FormError, msg elif isinstance(proptype, hyperdb.Multilink): # convert input to list of ids try: l = hyperdb.rawToHyperdb(self.db, cl, nodeid, propname, value) except hyperdb.HyperdbValueError, msg: raise FormError, msg
def on_github_response(self, client, response): if 'access_token' not in response: raise ValueError('Invalid response from GitHub.') # Grab their info from the GitHub API. token = response['access_token'] user_info = client.http_request( 'https://api.github.com/user', method='GET', headers={ 'Authorization': 'token {}'.format(token), 'User-Agent': 'bugs.python.org', 'Accept': 'application/json', }, ) if user_info.status_code != 200: raise ValueError('Could not fetch user information from GitHub.') user_info = user_info.json() # Login existing integrated accounts directly. github_issuer = PROVIDER_URL_MAP[PROVIDER_GITHUB] github_id = str(user_info['id']) oic_account = self.db.oic_account.filter(None, { 'issuer': github_issuer, 'subject': github_id }) if oic_account: if len(oic_account) > 1: raise ValueError( 'There are multiple records with the same issuer. Please ' 'open a new issue at https://github.com/python/bugs.python.org.' ) user = self.db.oic_account.get(oic_account[0], 'user') return self.login(user) github_username = user_info['login'] github_name = user_info['name'] github_email = user_info['email'] if github_email is None: raise ValueError( 'Your email address couldn\'t be fetched from your GitHub ' 'profile. Please make it public in the "Public email" section of ' 'https://github.com/settings/profile.') users = self.db.user.filter(None, {'address': github_email}) if users: if len(users) > 1: raise ValueError( 'There are multiple records with the same email address %s.' % github_email) user = users[0] self.db.oic_account.create(user=user, issuer=github_issuer, subject=github_id) self.db.user.set(user, github=github_username) self.db.commit() self.client.add_ok_message( 'You account has been successfully associated with your GitHub ' 'account.') return self.login(user) username = self.generate_username(github_username) passwd = password.Password(password.generatePassword()) user = self.db.user.create( username=username, realname=github_name, github=github_username, password=passwd, roles=self.db.config['NEW_WEB_USER_ROLES'], address=github_email, ) self.db.oic_account.create(user=user, issuer=github_issuer, subject=github_id) self.db.commit() return self.login(user)
raise print 'should be ok.' break else: s.close() print 'already in use.' port += 100 config['TRACKER_WEB'] = 'http://%s:%s/demo/' % (hostname, port) # write the config config['INSTANT_REGISTRATION'] = 1 config.save(os.path.join(home, config.INI_FILE)) # open the tracker and initialise tracker = instance.open(home) tracker.init(password.Password('admin')) # add the "demo" user db = tracker.open('admin') db.user.create(username='******', password=password.Password('demo'), realname='Demo User', roles='User') db.commit() db.close() def run_demo(home): """Run the demo tracker installed in ``home``""" cfg = configuration.CoreConfig(home) url = cfg["TRACKER_WEB"]
def setup_tracker(self, backend=None): """ Install and initialize tracker in dirname, return tracker instance. If directory exists, it is wiped out before the operation. """ self.__class__.count += 1 self.dirname = '_test_init_%s' % self.count if backend: self.backend = backend self.config = config = configuration.CoreConfig() config.DATABASE = 'db' config.RDBMS_NAME = "rounduptestttt" config.RDBMS_HOST = "localhost" if 'RDBMS_HOST' in os.environ: config.RDBMS_HOST = os.environ['RDBMS_HOST'] config.RDBMS_USER = "******" if 'RDBMS_USER' in os.environ: config.RDBMS_USER = os.environ['RDBMS_USER'] config.RDBMS_PASSWORD = "******" if 'RDBMS_PASSWORD' in os.environ: config.RDBMS_PASSWORD = os.environ['RDBMS_PASSWORD'] config.MAIL_DOMAIN = "your.tracker.email.domain.example" config.TRACKER_WEB = "http://localhost:4711/ttt/" config.RDBMS_TEMPLATE = "template0" config.MAIL_DEBUG = "maildebug" config.init_logging() self.tearDown() srcdir = os.path.join(os.path.dirname(__file__), '..') os.mkdir(self.dirname) for f in ('detectors', 'extensions', 'html', 'initial_data.py', 'lib', 'locale', 'schema', 'schemas/%s.py' % self.schemafile, 'TEMPLATE-INFO.txt', 'utils'): ft = f if f.startswith('schemas'): ft = 'schema.py' os.symlink \ ( os.path.abspath (os.path.join (srcdir, f)) , os.path.join (self.dirname, ft) ) config.RDBMS_BACKEND = self.backend self.config.save(os.path.join(self.dirname, 'config.ini')) tracker = instance.open(self.dirname) if tracker.exists(): tracker.nuke() tracker.init(password.Password(self.config.RDBMS_PASSWORD)) self.tracker = tracker # LDAP Config config = self.tracker.config self.base_dn = 'OU=example,DC=example,DC=com' ldap_settings = dict \ ( uri = 'ldap://do.not.care:389' , bind_dn = 'CN=system,OU=test' , password = '******' , base_dn = self.base_dn , update_ldap = 'True' , update_roundup = 'True' , objectclass = 'user' , ad_domains = 'ds1.internal' , no_starttls = 'False' , do_not_sync_roundup_properties = '' , do_not_sync_ldap_properties = '' , allowed_dn_suffix_by_domain = \ 'ext1.internal:OU=External' ) limit_settings = dict \ ( picture_sync_size = '9k' #, picture_quality = '80' ) config.ext = UserConfig() for k in ldap_settings: o = Option(config.ext, 'LDAP', k) config.ext.add_option(o) config.ext['LDAP_' + k.upper()] = ldap_settings[k] for k in limit_settings: o = Option(config.ext, 'LIMIT', k) config.ext.add_option(o) config.ext['LIMIT_' + k.upper()] = limit_settings[k] # Override before call to setup_ldap if necessary self.aux_ldap_parameters = {}
port += 100 config['TRACKER_WEB'] = 'http://%s:%s/demo/'%(hostname, port) # write the config config['INSTANT_REGISTRATION'] = 1 # FIXME: Move template-specific demo initialization into the templates. if template == 'responsive': config['STATIC_FILES'] = "static" if template == 'jinja2': config['TEMPLATE_ENGINE'] = 'jinja2' config['STATIC_FILES'] = "static" config.save(os.path.join(home, config.INI_FILE)) # open the tracker and initialise tracker = instance.open(home) tracker.init(password.Password('admin')) # add the "demo" user db = tracker.open('admin') # FIXME: Move tracker-specific demo initialization into the tracker templates. if template == 'minimal': db.user.create(username='******', password=password.Password('demo'), roles='User') else: db.user.create(username='******', password=password.Password('demo'), realname='Demo User', roles='User') db.commit() db.close() def run_demo(home): """Run the demo tracker instance from its ``home`` directory"""
class Database(rdbms_common.Database): # char to use for positional arguments if sqlite_version in (2,3): arg = '?' else: arg = '%s' # used by some code to switch styles of query implements_intersect = 1 hyperdb_to_sql_datatypes = { hyperdb.String : 'VARCHAR(255)', hyperdb.Date : 'VARCHAR(30)', hyperdb.Link : 'INTEGER', hyperdb.Interval : 'VARCHAR(255)', hyperdb.Password : '******', hyperdb.Boolean : 'BOOLEAN', hyperdb.Number : 'REAL', } hyperdb_to_sql_value = { hyperdb.String : str, hyperdb.Date : lambda x: x.serialise(), hyperdb.Link : int, hyperdb.Interval : str, hyperdb.Password : str, hyperdb.Boolean : int, hyperdb.Number : lambda x: x, hyperdb.Multilink : lambda x: x, # used in journal marshalling } sql_to_hyperdb_value = { hyperdb.String : lambda x: isinstance(x, unicode) and x.encode('utf8') or str(x), hyperdb.Date : lambda x: date.Date(str(x)), hyperdb.Link : str, # XXX numeric ids hyperdb.Interval : date.Interval, hyperdb.Password : lambda x: password.Password(encrypted=x), hyperdb.Boolean : int, hyperdb.Number : rdbms_common._num_cvt, hyperdb.Multilink : lambda x: x, # used in journal marshalling } def sqlite_busy_handler(self, data, table, count): """invoked whenever SQLite tries to access a database that is locked""" if count == 1: # use a 30 second timeout (extraordinarily generous) # for handling locked database self._busy_handler_endtime = time.time() + 30 elif time.time() > self._busy_handler_endtime: # timeout expired - no more retries return 0 # sleep adaptively as retry count grows, # starting from about half a second time_to_sleep = 0.01 * (2 << min(5, count)) time.sleep(time_to_sleep) return 1 def sql_open_connection(self): '''Open a standard, non-autocommitting connection. pysqlite will automatically BEGIN TRANSACTION for us. ''' # make sure the database directory exists # database itself will be created by sqlite if needed if not os.path.isdir(self.config.DATABASE): os.makedirs(self.config.DATABASE) db = os.path.join(self.config.DATABASE, 'db') logging.getLogger('hyperdb').info('open database %r'%db) # set a 30 second timeout (extraordinarily generous) for handling # locked database if sqlite_version == 1: conn = sqlite.connect(db=db) conn.db.sqlite_busy_handler(self.sqlite_busy_handler) else: conn = sqlite.connect(db, timeout=30) conn.row_factory = sqlite.Row cursor = conn.cursor() return (conn, cursor) def open_connection(self): # ensure files are group readable and writable os.umask(self.config.UMASK) (self.conn, self.cursor) = self.sql_open_connection() try: self.load_dbschema() except sqlite.DatabaseError, error: if str(error) != 'no such table: schema': raise self.init_dbschema() self.sql('create table schema (schema varchar)') self.sql('create table ids (name varchar, num integer)') self.sql('create index ids_name_idx on ids(name)') self.create_version_2_tables()
def add_new_columns_v2(self): '''While we're adding the actor column, we need to update the tables to have the correct datatypes.''' for klass in self.classes.values(): cn = klass.classname properties = klass.getprops() old_spec = self.database_schema['tables'][cn] # figure the non-Multilink properties to copy over propnames = ['activity', 'creation', 'creator'] # figure actions based on data type for name, s_prop in old_spec[1]: # s_prop is a repr() string of a hyperdb type object if s_prop.find('Multilink') == -1: if name in properties: propnames.append(name) continue tn = '%s_%s'%(cn, name) if name in properties: # grabe the current values sql = 'select linkid, nodeid from %s'%tn self.sql(sql) rows = self.cursor.fetchall() # drop the old table self.drop_multilink_table_indexes(cn, name) sql = 'drop table %s'%tn self.sql(sql) if name in properties: # re-create and populate the new table self.create_multilink_table(klass, name) sql = '''insert into %s (linkid, nodeid) values (%s, %s)'''%(tn, self.arg, self.arg) for linkid, nodeid in rows: self.sql(sql, (int(linkid), int(nodeid))) # figure the column names to fetch fetch = ['_%s'%name for name in propnames] # select the data out of the old table fetch.append('id') fetch.append('__retired__') fetchcols = ','.join(fetch) sql = 'select %s from _%s'%(fetchcols, cn) self.sql(sql) # unserialise the old data olddata = [] propnames = propnames + ['id', '__retired__'] cols = [] first = 1 for entry in self.cursor.fetchall(): l = [] olddata.append(l) for i in range(len(propnames)): name = propnames[i] v = entry[i] if name in ('id', '__retired__'): if first: cols.append(name) l.append(int(v)) continue if first: cols.append('_' + name) prop = properties[name] if isinstance(prop, hyperdb.Date) and v is not None: v = date.Date(v) elif isinstance(prop, hyperdb.Interval) and v is not None: v = date.Interval(v) elif isinstance(prop, hyperdb.Password) and v is not None: v = password.Password(encrypted=v) elif isinstance(prop, hyperdb.Integer) and v is not None: v = int(v) elif (isinstance(prop, hyperdb.Boolean) or isinstance(prop, hyperdb.Number)) and v is not None: v = float(v) # convert to new MySQL data type prop = properties[name] if v is not None: e = self.to_sql_value(prop.__class__)(v) else: e = None l.append(e) # Intervals store the seconds value too if isinstance(prop, hyperdb.Interval): if first: cols.append('__' + name + '_int__') if v is not None: l.append(v.as_seconds()) else: l.append(e) first = 0 self.drop_class_table_indexes(cn, old_spec[0]) # drop the old table self.sql('drop table _%s'%cn) # create the new table self.create_class_table(klass) # do the insert of the old data args = ','.join([self.arg for x in cols]) cols = ','.join(cols) sql = 'insert into _%s (%s) values (%s)'%(cn, cols, args) for entry in olddata: self.sql(sql, tuple(entry)) # now load up the old journal data to migrate it cols = ','.join('nodeid date tag action params'.split()) sql = 'select %s from %s__journal'%(cols, cn) self.sql(sql) # data conversions olddata = [] for nodeid, journaldate, journaltag, action, params in \ self.cursor.fetchall(): #nodeid = int(nodeid) journaldate = date.Date(journaldate) #params = eval(params) olddata.append((nodeid, journaldate, journaltag, action, params)) # drop journal table and indexes self.drop_journal_table_indexes(cn) sql = 'drop table %s__journal'%cn self.sql(sql) # re-create journal table self.create_journal_table(klass) dc = self.to_sql_value(hyperdb.Date) for nodeid, journaldate, journaltag, action, params in olddata: self.save_journal(cn, cols, nodeid, dc(journaldate), journaltag, action, params) # make sure the normal schema update code doesn't try to # change things self.database_schema['tables'][cn] = klass.schema()
class Database(rdbms_common.Database): """Sqlite DB backend implementation attributes: dbtype: holds the value for the type of db. It is used by indexer to identify the database type so it can import the correct indexer module when using native text search mode. """ # char to use for positional arguments if sqlite_version in (2, 3): arg = '?' else: arg = '%s' dbtype = "sqlite" # used by some code to switch styles of query implements_intersect = 1 # used in generic backend to determine if db supports # 'DOUBLE PRECISION' for floating point numbers. Note that sqlite # already has double precision as its standard 'REAL' type. So this # is set to False here. implements_double_precision = False hyperdb_to_sql_datatypes = { hyperdb.String: 'VARCHAR(255)', hyperdb.Date: 'VARCHAR(30)', hyperdb.Link: 'INTEGER', hyperdb.Interval: 'VARCHAR(255)', hyperdb.Password: '******', hyperdb.Boolean: 'BOOLEAN', hyperdb.Number: 'REAL', hyperdb.Integer: 'INTEGER', } hyperdb_to_sql_value = { hyperdb.String: str, hyperdb.Date: lambda x: x.serialise(), hyperdb.Link: int, hyperdb.Interval: str, hyperdb.Password: str, hyperdb.Boolean: int, hyperdb.Integer: int, hyperdb.Number: lambda x: x, hyperdb.Multilink: lambda x: x, # used in journal marshalling } sql_to_hyperdb_value = { hyperdb.String: uany2s, hyperdb.Date: lambda x: date.Date(str(x)), hyperdb.Link: str, # XXX numeric ids hyperdb.Interval: date.Interval, hyperdb.Password: lambda x: password.Password(encrypted=x), hyperdb.Boolean: int, hyperdb.Integer: int, hyperdb.Number: rdbms_common._num_cvt, hyperdb.Multilink: lambda x: x, # used in journal marshalling } # We're using DBM for managing session info and one-time keys: # For SQL database storage of this info we would need two concurrent # connections to the same database which SQLite doesn't support def getSessionManager(self): if not self.Session: self.Session = Sessions(self) return self.Session def getOTKManager(self): if not self.Otk: self.Otk = OneTimeKeys(self) return self.Otk def sqlite_busy_handler(self, data, table, count): """invoked whenever SQLite tries to access a database that is locked""" now = time.time() if count == 1: # Timeout for handling locked database (default 30s) self._busy_handler_endtime = now + self.config.RDBMS_SQLITE_TIMEOUT elif now > self._busy_handler_endtime: # timeout expired - no more retries return 0 # sleep adaptively as retry count grows, # starting from about half a second time_to_sleep = 0.01 * (2 << min(5, count)) time.sleep(time_to_sleep) return 1 def sql_open_connection(self): """Open a standard, non-autocommitting connection. pysqlite will automatically BEGIN TRANSACTION for us. """ # make sure the database directory exists # database itself will be created by sqlite if needed if not os.path.isdir(self.config.DATABASE): os.makedirs(self.config.DATABASE) db = os.path.join(self.config.DATABASE, 'db') logging.getLogger('roundup.hyperdb').info('open database %r' % db) # set timeout (30 second default is extraordinarily generous) # for handling locked database if sqlite_version == 1: conn = sqlite.connect(db=db) conn.db.sqlite_busy_handler(self.sqlite_busy_handler) else: conn = sqlite.connect(db, timeout=self.config.RDBMS_SQLITE_TIMEOUT) conn.row_factory = sqlite.Row # pysqlite2 / sqlite3 want us to store Unicode in the db but # that's not what's been done historically and it's definitely # not what the other backends do, so we'll stick with UTF-8 if sqlite_version in (2, 3): conn.text_factory = str cursor = conn.cursor() return (conn, cursor) def open_connection(self): # ensure files are group readable and writable os.umask(self.config.UMASK) (self.conn, self.cursor) = self.sql_open_connection() try: self.load_dbschema() except sqlite.DatabaseError as error: if str(error) != 'no such table: schema': raise self.init_dbschema() self.sql('create table schema (schema varchar)') self.sql('create table ids (name varchar, num integer)') self.sql('create index ids_name_idx on ids(name)') self.create_version_2_tables() def create_version_2_tables(self): self.sql('create table otks (otk_key varchar, ' 'otk_value varchar, otk_time integer)') self.sql('create index otks_key_idx on otks(otk_key)') self.sql('create table sessions (session_key varchar, ' 'session_time integer, session_value varchar)') self.sql('create index sessions_key_idx on ' 'sessions(session_key)') # full-text indexing store self.sql( 'CREATE TABLE __textids (_class varchar, ' '_itemid varchar, _prop varchar, _textid integer primary key) ') self.sql('CREATE TABLE __words (_word varchar, ' '_textid integer)') self.sql('CREATE INDEX words_word_ids ON __words(_word)') self.sql('CREATE INDEX words_by_id ON __words (_textid)') self.sql('CREATE UNIQUE INDEX __textids_by_props ON ' '__textids (_class, _itemid, _prop)') sql = 'insert into ids (name, num) values (%s,%s)' % (self.arg, self.arg) self.sql(sql, ('__textids', 1)) def add_new_columns_v2(self): # update existing tables to have the new actor column tables = self.database_schema['tables'] for classname, spec in self.classes.items(): if classname in tables: dbspec = tables[classname] self.update_class(spec, dbspec, force=1, adding_v2=1) # we've updated - don't try again tables[classname] = spec.schema() def fix_version_3_tables(self): # NOOP - no restriction on column length here pass def update_class(self, spec, old_spec, force=0, adding_v2=0): """ Determine the differences between the current spec and the database version of the spec, and update where necessary. If 'force' is true, update the database anyway. SQLite doesn't have ALTER TABLE, so we have to copy and regenerate the tables with the new schema. """ new_spec = spec.schema() new_spec[1].sort() old_spec[1].sort() if not force and new_spec == old_spec: # no changes return 0 logging.getLogger('roundup.hyperdb').info('update_class %s' % spec.classname) # detect multilinks that have been removed, and drop their table old_has = {} for name, prop in old_spec[1]: old_has[name] = 1 if name in spec.properties or not isinstance( prop, hyperdb.Multilink): continue # it's a multilink, and it's been removed - drop the old # table. First drop indexes. self.drop_multilink_table_indexes(spec.classname, name) sql = 'drop table %s_%s' % (spec.classname, prop) self.sql(sql) # now figure how we populate the new table if adding_v2: fetch = ['_activity', '_creation', '_creator'] else: fetch = ['_actor', '_activity', '_creation', '_creator'] properties = spec.getprops() for propname, x in new_spec[1]: prop = properties[propname] if isinstance(prop, hyperdb.Multilink): if propname not in old_has: # we need to create the new table self.create_multilink_table(spec, propname) elif force: tn = '%s_%s' % (spec.classname, propname) # grabe the current values sql = 'select linkid, nodeid from %s' % tn self.sql(sql) rows = self.cursor.fetchall() # drop the old table self.drop_multilink_table_indexes(spec.classname, propname) sql = 'drop table %s' % tn self.sql(sql) # re-create and populate the new table self.create_multilink_table(spec, propname) sql = """insert into %s (linkid, nodeid) values (%s, %s)""" % (tn, self.arg, self.arg) for linkid, nodeid in rows: self.sql(sql, (int(linkid), int(nodeid))) elif propname in old_has: # we copy this col over from the old table fetch.append('_' + propname) # select the data out of the old table fetch.append('id') fetch.append('__retired__') fetchcols = ','.join(fetch) cn = spec.classname sql = 'select %s from _%s' % (fetchcols, cn) self.sql(sql) olddata = self.cursor.fetchall() # TODO: update all the other index dropping code self.drop_class_table_indexes(cn, old_spec[0]) # drop the old table self.sql('drop table _%s' % cn) # create the new table self.create_class_table(spec) if olddata: inscols = [ 'id', '_actor', '_activity', '_creation', '_creator', '__retired__' ] for propname, x in new_spec[1]: prop = properties[propname] if isinstance(prop, hyperdb.Multilink): continue elif isinstance(prop, hyperdb.Interval): inscols.append('_' + propname) inscols.append('__' + propname + '_int__') elif propname in old_has: # we copy this col over from the old table inscols.append('_' + propname) # do the insert of the old data - the new columns will have # NULL values args = ','.join([self.arg for x in inscols]) cols = ','.join(inscols) sql = 'insert into _%s (%s) values (%s)' % (cn, cols, args) for entry in olddata: d = [] retired_id = None for name in inscols: # generate the new value for the Interval int column if name.endswith('_int__'): name = name[2:-6] if sqlite_version in (2, 3): try: v = hyperdb.Interval(entry[name]).as_seconds() except IndexError: v = None elif name in entry: v = hyperdb.Interval(entry[name]).as_seconds() else: v = None elif sqlite_version in (2, 3): try: v = entry[name] except IndexError: v = None elif (sqlite_version == 1 and name in entry): v = entry[name] else: v = None if name == 'id': retired_id = v elif name == '__retired__' and retired_id and v not in [ '0', 0 ]: v = retired_id d.append(v) self.sql(sql, tuple(d)) return 1 def sql_close(self): """ Squash any error caused by us already having closed the connection. """ try: self.conn.close() except sqlite.ProgrammingError as value: if str(value) != 'close failed - Connection is closed.': raise def sql_rollback(self): """ Squash any error caused by us having closed the connection (and therefore not having anything to roll back) """ try: self.conn.rollback() except sqlite.ProgrammingError as value: if str(value) != 'rollback failed - Connection is closed.': raise def __repr__(self): return '<roundlite 0x%x>' % id(self) def sql_commit(self): """ Actually commit to the database. Ignore errors if there's nothing to commit. """ try: self.conn.commit() except sqlite.DatabaseError as error: if str(error) != 'cannot commit - no transaction is active': raise # open a new cursor for subsequent work self.cursor = self.conn.cursor() def sql_index_exists(self, table_name, index_name): self.sql('pragma index_list(%s)' % table_name) for entry in self.cursor.fetchall(): if entry[1] == index_name: return 1 return 0 # old-skool id generation def newid(self, classname): """ Generate a new id for the given class """ # Prevent other processes from reading while we increment. # Otherwise multiple processes can end up with the same # new id and hilarity results. # # Defeat pysqlite's attempts to do locking by setting # isolation_level to None. Pysqlite can commit # on it's own even if we don't want it to end the transaction. # If we rewrite to use another sqlite library like apsw we # don't have to deal with this autocommit/autotransact foolishness. self.conn.isolation_level = None # Manage the transaction locks manually. self.sql("BEGIN IMMEDIATE") # get the next ID sql = 'select num from ids where name=%s' % self.arg self.sql(sql, (classname, )) newid = int(self.cursor.fetchone()[0]) # leave the next larger number as the next newid sql = 'update ids set num=num+1 where name=%s' % self.arg vals = (classname, ) self.sql(sql, vals) # reset pysqlite's auto transact stuff to default since the # rest of the code expects it. self.conn.isolation_level = '' # commit writing the data, clearing locks for other processes # and create a new cursor to the database. self.sql_commit() # return as string return str(newid) def setid(self, classname, setid): """ Set the id counter: used during import of database We add one to make it behave like the sequences in postgres. """ sql = 'update ids set num=%s where name=%s' % (self.arg, self.arg) vals = (int(setid) + 1, classname) self.sql(sql, vals) def clear(self): rdbms_common.Database.clear(self) # set the id counters to 0 (setid adds one) so we start at 1 for cn in self.classes.keys(): self.setid(cn, 0) def create_class(self, spec): rdbms_common.Database.create_class(self, spec) sql = 'insert into ids (name, num) values (%s, %s)' % (self.arg, self.arg) vals = (spec.classname, 1) self.sql(sql, vals) if sqlite_version in (2, 3): def load_journal(self, classname, cols, nodeid): """We need to turn the sqlite3.Row into a tuple so it can be unpacked""" l = rdbms_common.Database.load_journal(self, classname, cols, nodeid) cols = range(5) return [[row[col] for col in cols] for row in l]
def import_xml(tracker_home, xml_file, file_dir): """ Generate Roundup tracker import files based on the tracker schema, sf.net xml export and downloaded files from sf.net. """ tracker = instance.open(tracker_home) db = tracker.open('admin') resolved = db.status.lookup('resolved') unread = db.status.lookup('unread') chatting = db.status.lookup('unread') critical = db.priority.lookup('critical') urgent = db.priority.lookup('urgent') bug = db.priority.lookup('bug') feature = db.priority.lookup('feature') wish = db.priority.lookup('wish') adminuid = db.user.lookup('admin') anonuid = db.user.lookup('anonymous') root = ElementTree.parse(xml_file).getroot() def to_date(ts): return date.Date(time.gmtime(float(ts))) # parse out the XML artifacts = [] categories = set() users = set() add_files = set() remove_files = set() for artifact in root.find('artifacts'): d = {} op = {} artifacts.append(d) for field in artifact.findall('field'): name = field.get('name') if name == 'artifact_messages': for message in field.findall('message'): l = d.setdefault('messages', []) m = {} l.append(m) for field in message.findall('field'): name = field.get('name') if name == 'adddate': m[name] = to_date(field.text) else: m[name] = field.text if name == 'user_name': users.add(field.text) elif name == 'artifact_history': for event in field.findall('history'): l = d.setdefault('history', []) e = {} l.append(e) for field in event.findall('field'): name = field.get('name') if name == 'entrydate': e[name] = to_date(field.text) else: e[name] = field.text if name == 'mod_by': users.add(field.text) if e['field_name'] == 'File Added': add_files.add(e['old_value'].split(':')[0]) elif e['field_name'] == 'File Deleted': remove_files.add(e['old_value'].split(':')[0]) elif name == 'details': op['body'] = field.text elif name == 'submitted_by': op['user_name'] = field.text d[name] = field.text users.add(field.text) elif name == 'open_date': thedate = to_date(field.text) op['adddate'] = thedate d[name] = thedate else: d[name] = field.text categories.add(d['category']) if 'body' in op: l = d.setdefault('messages', []) l.insert(0, op) add_files -= remove_files # create users userd = {'nobody': '2'} users.remove('nobody') data = [ {'id': '1', 'username': '******', 'password': password.Password('admin'), 'roles': 'Admin', 'address': '*****@*****.**'}, {'id': '2', 'username': '******', 'roles': 'Anonymous'}, ] for n, user in enumerate(list(users)): userd[user] = n+3 data.append({'id': str(n+3), 'username': user, 'roles': 'User', 'address': '*****@*****.**'%user}) write_csv(db.user, data) users=userd # create categories categoryd = {'None': None} categories.remove('None') data = [] for n, category in enumerate(list(categories)): categoryd[category] = n data.append({'id': str(n), 'name': category}) write_csv(db.keyword, data) categories = categoryd # create issues issue_data = [] file_data = [] message_data = [] issue_journal = [] message_id = 0 for artifact in artifacts: d = {} d['id'] = artifact['artifact_id'] d['title'] = artifact['summary'] d['assignedto'] = users[artifact['assigned_to']] if d['assignedto'] == '2': d['assignedto'] = None d['creation'] = artifact['open_date'] activity = artifact['open_date'] d['creator'] = users[artifact['submitted_by']] actor = d['creator'] if categories[artifact['category']]: d['keyword'] = [categories[artifact['category']]] issue_journal.append(( d['id'], d['creation'].get_tuple(), d['creator'], "'create'", {} )) p = int(artifact['priority']) if artifact['artifact_type'] == 'Feature Requests': if p > 3: d['priority'] = feature else: d['priority'] = wish else: if p > 7: d['priority'] = critical elif p > 5: d['priority'] = urgent elif p > 3: d['priority'] = bug else: d['priority'] = feature s = artifact['status'] if s == 'Closed': d['status'] = resolved elif s == 'Deleted': d['status'] = resolved d['is retired'] = True else: d['status'] = unread nosy = set() for message in artifact.get('messages', []): authid = users[message['user_name']] if not message['body']: continue body = convert_message(message['body'], message_id) if not body: continue m = {'content': body, 'author': authid, 'date': message['adddate'], 'creation': message['adddate'], } message_data.append(m) if authid not in (None, '2'): nosy.add(authid) activity = message['adddate'] actor = authid if d['status'] == unread: d['status'] = chatting # add import message m = {'content': 'IMPORT FROM SOURCEFORGE', 'author': '1', 'date': today, 'creation': today} message_data.append(m) # sort messages and assign ids d['messages'] = [] message_data.sort(key=lambda a:a['date']) for message in message_data: message_id += 1 message['id'] = str(message_id) d['messages'].append(message_id) d['nosy'] = list(nosy) files = [] for event in artifact.get('history', []): if event['field_name'] == 'File Added': fid, name = event['old_value'].split(':', 1) if fid in add_files: files.append(fid) name = name.strip() try: f = open(os.path.join(file_dir, fid), 'rb') content = f.read() f.close() except: content = 'content missing' file_data.append({ 'id': fid, 'creation': event['entrydate'], 'creator': users[event['mod_by']], 'name': name, 'type': mimetypes.guess_type(name)[0], 'content': content, }) continue elif event['field_name'] == 'close_date': action = "'set'" info = { 'status': unread } elif event['field_name'] == 'summary': action = "'set'" info = { 'title': event['old_value'] } else: # not an interesting / translatable event continue row = [ d['id'], event['entrydate'].get_tuple(), users[event['mod_by']], action, info ] if event['entrydate'] > activity: activity = event['entrydate'] issue_journal.append(row) d['files'] = files d['activity'] = activity d['actor'] = actor issue_data.append(d) write_csv(db.issue, issue_data) write_csv(db.msg, message_data) write_csv(db.file, file_data) f = open('/tmp/imported/issue-journals.csv', 'w') writer = csv.writer(f, colon_separated) writer.writerows(issue_journal) f.close()
def handle(self): """Handle password reset requests. Presence of either "name" or "address" generates email. Presence of "otk" performs the reset. """ otks = self.db.getOTKManager() if self.form.has_key('otk'): # pull the rego information out of the otk database otk = self.form['otk'].value uid = otks.get(otk, 'uid', default=None) if uid is None: self.client.error_message.append( self._("Invalid One Time Key!\n" "(a Mozilla bug may cause this message " "to show up erroneously, please check your email)")) return # re-open the database as "admin" if self.user != 'admin': self.client.opendb('admin') self.db = self.client.db otks = self.db.getOTKManager() # change the password newpw = password.generatePassword() cl = self.db.user # XXX we need to make the "default" page be able to display errors! try: # set the password cl.set(uid, password=password.Password(newpw)) # clear the props from the otk database otks.destroy(otk) self.db.commit() except (ValueError, KeyError), message: self.client.error_message.append(str(message)) return # user info address = self.db.user.get(uid, 'address') name = self.db.user.get(uid, 'username') # send the email tracker_name = self.db.config.TRACKER_NAME subject = 'Password reset for %s' % tracker_name body = ''' The password has been reset for username "%(name)s". Your password is now: %(password)s ''' % { 'name': name, 'password': newpw } if not self.client.standard_message([address], subject, body): return self.client.ok_message.append( self._('Password reset and email sent to %s') % address) return
def create(journaltag, create=True, debug=False): db = Database(new_config(debug), journaltag) # load standard schema schema = os.path.join(os.path.dirname(__file__), '../share/roundup/templates/classic/schema.py') vars = dict(globals()) vars['db'] = db execfile(schema, vars) initial_data = os.path.join( os.path.dirname(__file__), '../share/roundup/templates/classic/initial_data.py') vars = dict(db=db, admin_email='*****@*****.**', adminpw=password.Password('sekrit')) execfile(initial_data, vars) # load standard detectors thisdir = os.path.dirname(__file__) dirname = os.path.join(thisdir, '../share/roundup/templates/classic/detectors') for fn in os.listdir(dirname): if not fn.endswith('.py'): continue vars = {} execfile(os.path.join(dirname, fn), vars) vars['init'](db) vars = {} execfile(os.path.join(thisdir, "tx_Source_detector.py"), vars) vars['init'](db) ''' status = Class(db, "status", name=String()) status.setkey("name") priority = Class(db, "priority", name=String(), order=String()) priority.setkey("name") keyword = Class(db, "keyword", name=String(), order=String()) keyword.setkey("name") user = Class(db, "user", username=String(), password=Password(), assignable=Boolean(), age=Number(), roles=String(), address=String(), supervisor=Link('user'),realname=String(),alternate_addresses=String()) user.setkey("username") file = FileClass(db, "file", name=String(), type=String(), comment=String(indexme="yes"), fooz=Password()) file_nidx = FileClass(db, "file_nidx", content=String(indexme='no')) issue = IssueClass(db, "issue", title=String(indexme="yes"), status=Link("status"), nosy=Multilink("user"), deadline=Date(), foo=Interval(), files=Multilink("file"), assignedto=Link('user'), priority=Link('priority'), spam=Multilink('msg'), feedback=Link('msg')) stuff = Class(db, "stuff", stuff=String()) session = Class(db, 'session', title=String()) msg = FileClass(db, "msg", date=Date(), author=Link("user", do_journal='no'), files=Multilink('file'), inreplyto=String(), messageid=String(), summary=String(), content=String(), recipients=Multilink("user", do_journal='no') ) ''' if create: db.user.create(username="******", roles='User', password=password.Password('sekrit'), address='*****@*****.**') db.security.addPermissionToRole('User', 'Email Access') ''' db.security.addPermission(name='Register', klass='user') db.security.addPermissionToRole('User', 'Web Access') db.security.addPermissionToRole('Anonymous', 'Email Access') db.security.addPermissionToRole('Anonymous', 'Register', 'user') for cl in 'issue', 'file', 'msg', 'keyword': db.security.addPermissionToRole('User', 'View', cl) db.security.addPermissionToRole('User', 'Edit', cl) db.security.addPermissionToRole('User', 'Create', cl) for cl in 'priority', 'status': db.security.addPermissionToRole('User', 'View', cl) ''' return db