def get_session(self, url, stypes): sessions = self.db.openid_session.filter(None, {'url': url}) for session_id in sessions: # Match may not have been exact if self.db.openid_session.get(session_id, 'url') != url: continue expires = self.db.openid_session.get(session_id, 'expires') if expires > date.Date('.') + date.Interval("1:00"): # valid for another hour return self.db.openid_session.getnode(session_id) now = date.Date('.') session_data = openid2rp.associate(stypes, url) # check whether a session has expired a day ago sessions = self.db.openid_session.filter(None, {'expires': 'to -1d'}) if sessions: session = self.db.openid_session.getnode(sessions[0]) session.assoc_handle = session_data['assoc_handle'] else: session_id = self.db.openid_session.create( assoc_handle=session_data['assoc_handle']) session = self.db.openid_session.getnode(session_id) session.url = url session.mac_key = session_data['mac_key'] session.expires = now + date.Interval(int(session_data['expires_in'])) self.db.commit() return session
def testEmptyDateSet(self): nodeid = self.db.test.create(date=date.Date('.')) self.assertEqual(self.parseForm({'date': ''}, 'test', nodeid), ({('test', nodeid): {'date': None}}, [])) nodeid = self.db.test.create(date=date.Date('1970-01-01.00:00:00')) self.assertEqual(self.parseForm({'date': ' '}, 'test', nodeid), ({('test', nodeid): {'date': None}}, []))
def lastUserActivity(self): if ':lastactivity' in self.form: d = date.Date(self.form[':lastactivity'].value) elif '@lastactivity' in self.form: d = date.Date(self.form['@lastactivity'].value) else: return None d.second = int(d.second) return d
def lastUserActivity(self): if self.form.has_key(':lastactivity'): d = date.Date(self.form[':lastactivity'].value) elif self.form.has_key('@lastactivity'): d = date.Date(self.form['@lastactivity'].value) else: return None d.second = int(d.second) return d
def processorder(db, cl, nodeid, oldvalues): ''' For now let's just process the order files received from HBO.... ''' log_stream=StringIO() logging.basicConfig(stream=log_stream, level=logging.INFO) if cl.get(nodeid,'files'): if oldvalues and oldvalues['files']: new_file= [x for x in cl.get(nodeid,'files') if x not in oldvalues['files']] else: new_file=cl.get(nodeid,'files') if new_file: try: logging.info("Processing file:"+db.file.get(new_file[0],'name')) content=db.file.get(new_file[0],'content') csv_reader=csv.DictReader(content.splitlines()[1:]) for row in csv_reader: if not db.episode.filter(None,{'title':row['Title']}): episode_id=db.episode.create(title=row['Title'], duration=int(float(row['Length'])), barcode=row['Barcode'], screenerid=row['Screener Id'], deadline=date.Date(row['Deadline']), client=cl.get(nodeid,'partner')) logging.info('%s ... Episode%s created' % (row['Title'], episode_id)) task_id=db.issue.create(supplier='TPM', episode=episode_id, tasktype='csf', status='new') logging.info('Task%s created to collect source files' % (task_id)) else: episode_id=db.episode.filter(None,{'title':row['Title']}) logging.info('%s ... Episode%s exists, creation skipped.' % (row['Title'], episode_id)) if not db.project.filter(None,{'episode':episode_id, 'supplier':cl.get(nodeid,'partner'), 'projecttype':row['Category'], 'language':row['Language'] }): project_id=db.project.create(episode=episode_id, projecttype=row['Category'], language=row['Language'], order=nodeid, status='new') logging.info('%s ... Project%s created' % (row['Title'], project_id)) except Exception as e: logging.error(traceback.format_exc()) msg={} msg['content']=log_stream.getvalue() msg['date']=date.Date('.') msg['author']=str(db.getuid()) msg_id=db.msg.create(**msg) log_stream.close() all_messages=[msg_id] if cl.get(nodeid,'messages'): all_messages=all_messages+cl.get(nodeid,'messages') cl.set(nodeid,messages=all_messages) db.commit()
def testSetDate(self): self.assertEqual(self.parseForm({'date': '2003-01-01'}), ({ ('test', None): { 'date': date.Date('2003-01-01') } }, [])) nodeid = self.db.test.create(date=date.Date('2003-01-01')) self.assertEqual( self.parseForm({'date': '2003-01-01'}, 'test', nodeid), ({ ('test', nodeid): {} }, []))
def handle(self): ''' Perform some action. No return value is required. ''' db = self.client.db if not self.client.session_api.get('oauth_token'): print 'oath is %s' % (self.client.session_api.get('oauth_token')) else: print 'Do stuff if ' print 'oath is %s' % (self.client.session_api.get('oauth_token')) # print db.config.ext['QA_RECIPIENTS'] # print type(self.db.config.ext['DATAUG_OAUTH2_SCOPE'].split(",")) # print self.db.config.ext['DATAUG_OAUTH2_CLIENT_ID'] # print self.db.config.ext['DATAUG_OAUTH2_AUTHORIZATION_BASE_URL'] print db.user.list() today = date.Date() props = {'username':'******' + str(today)} passwd = password.generatePassword(100) # print str(today) print props userid = db.user.create(**props) self.db.user.set(userid, roles=self.db.config['NEW_WEB_USER_ROLES']) self.db.user.set(userid, password=password.Password(passwd)) db.commit() print "New password%s " % (password.generatePassword(100)) print "%s?@action=login1" % (self.client.base) print "db classname %s type %s" % (db.user.__class__.__name__, type(db.user)) self.client.add_ok_message(self._('You are loggedsxds out'))
def confirm_registration(self, otk): props = self.getOTKManager().getall(otk) for propname, proptype in self.user.getprops().items(): value = props.get(propname, None) if value is None: pass elif isinstance(proptype, hyperdb.Date): props[propname] = date.Date(value) elif isinstance(proptype, hyperdb.Interval): props[propname] = date.Interval(value) elif isinstance(proptype, hyperdb.Password): props[propname] = password.Password(encrypted=value) # tag new user creation with 'admin' self.journaltag = 'admin' # create the new user cl = self.user props['roles'] = self.config.NEW_WEB_USER_ROLES userid = cl.create(**props) # clear the props from the otk database self.getOTKManager().destroy(otk) self.commit() return userid
def refreeze(freezelist): d = date.Date('2005-12-31') for f in reversed(freezelist): if f.date != d: db.daily_record_freeze.set(f.id, frozen=False) for f in freezelist: if f.date != d: db.daily_record_freeze.set(f.id, frozen=True)
def doSaveJournal(self, classname, nodeid, action, params, creator, creation): if creator is None: creator = self.getuid() if creation is None: creation = date.Date() self.journals.setdefault(classname, {}).setdefault(nodeid, []).append( (nodeid, creation, creator, action, params))
def add_milestones(db, cl, nodeid, new_values): """auditor on release.create XXX: not sure if this should be in an auditor ??? - creates the milestones and attaches them to the newly created release. - set release's `status` to M000 """ milestones = \ [ ("M000", "+ 0d", "Release Planning has started") , ("M100", "+ 2m 15d", "Release Planning completed, Feature Freeze") , ("M200", "+ 3m 20d", "Design completed") , ("M210", "+ 3m 30d", "Check for Customer Appslications to be " "used as Testcases") , ("M300", "+ 6m 20d", "Implementation completed") , ("M310", "+ 7m ", "TC Spec & TC Implementation completed") , ("M400", "+ 8m 15d", "Integration Test completed; Beta Release") , ("M410", "+ 8m 20d", "Documentation Completed") , ("M490", "+ 9m ", "Bugfixing completed") , ("M500", "+ 9m 10d", "Test by Services completed; Production " "Release") , ("M600", "+10m" , "Shipment completed") ] order = 1 ms_ids = [] today = date.Date(".").pretty(format="%Y-%m-%d") today = date.Date(today) # to start today at 00:00 and not somewhere in # the day for name, interval, desc in milestones: planned = today + date.Interval(interval) ms = db.milestone.create(name=name, description=desc, order=order, planned=planned, release=nodeid) ms_ids.append(ms) order += 1 new_values["milestones"] = ms_ids # set status to M0 new_values["status"] = ms_ids[0]
def store_nonce(self, query): '''Store a nonce in the database.''' if 'openid.response_nonce' in query: nonce = query['openid.response_nonce'][0] stamp = openid2rp.parse_nonce(nonce) # Consume nonce; reuse expired nonces old = self.db.openid_nonce.filter(None, {'created': ';.-1d'}) stamp = date.Date(stamp) if old: self.db.openid_nonce.set(old[0], created=stamp, nonce=nonce) else: self.db.openid_nonce.create(created=stamp, nonce=nonce) self.db.commit()
def import_data_18(db, user, dep, olo): sd = dict(months=0.0, required_overtime=0, weekly=1) otp = db.overtime_period.filter(None, sd) assert len(otp) == 1 otp = otp[0] db.user_dynamic.create \ ( hours_fri = 8.0 , hours_sun = 0.0 , additional_hours = 40.0 , hours_wed = 8.0 , vacation_yearly = 30.0 , all_in = 0 , valid_from = date.Date ("2018-07-02.00:00:00") , durations_allowed = 0 , hours_tue = 8.0 , weekly_hours = 40.0 , hours_mon = 8.0 , hours_thu = 8.0 , vacation_day = 1.0 , booking_allowed = 1 , supp_weekly_hours = 40.0 , valid_to = date.Date ("2019-01-03.00:00:00") , weekend_allowed = 0 , travel_full = 1 , vacation_month = 1.0 , hours_sat = 0.0 , department = dep , org_location = olo , overtime_period = otp , user = user , vac_aliq = '2' ) vcorr = db.vacation_correction.create \ ( user = user , date = date.Date ('2018-01-01') , absolute = 1 , days = 0.0 ) db.commit()
def set_closer(db, cl, nodeid, new_values): """auditor on defect's set when status changes to one of closed-*: - `closer` is set to the current user - `closed` is set to current time """ status_id = new_values.get("status") if status_id: id_min = db.defect_status.lookup("closed") id_max = db.defect_status.lookup("closed-rejected") if status_id >= id_min and status_id <= id_max: new_values["closer"] = db.getuid() new_values["closed"] = date.Date(".")
def __init__ (self, opt) : self.opt = opt tracker = instance.open (opt.dir) self.db = tracker.open ('admin') now = date.Date ('.') exp = now + date.Interval ('%s days' % opt.days) wps = self.db.time_wp.filter \ ( None , dict (time_end = '.;%s' % exp.pretty ('%Y-%m-%d')) , sort=[('+', 'responsible')] ) self.wps = [self.db.time_wp.getnode (i) for i in wps] self.messages = {} self.build_mails ()
def pending_approval (db, userid, itemid) : """ Users are allowed to edit message if a pending approval from them is linked to the PR. """ if not linked_pr (db, userid, itemid) : return False if open_or_approving (db, userid, itemid) : return True # Also allow for reject because message is tried to attach twice # We allow this only for some time (5 min after last change) st_reject = db.pr_status.lookup ('rejected') pr = db.purchase_request.getnode (itemid) if pr.status != st_reject : return False now = date.Date ('.') if pr.activity + date.Interval ('00:05:00') > now : return True return False
def create(db, **issue): # first we need to create a 'msg' node containing the message msg = {} # check if the message was passed on the command line if issue.has_key('messages'): # from command line msg['content'] = issue['messages'] else: msg['content'] = ' No Message Provided ' msg['author'] = db.getuid() msg['date'] = date.Date('.') # create the 'msg' node issue['messages'] = db.msg.create(** msg) # will be put in list in next part # resolve linked and multilinked properties properties = db.issue.getprops() for key in issue.keys(): if properties.has_key(key): if isinstance(properties[key], hyperdb.Link): if not issue[key].isdigit(): # resolve linked property names into node ids klass = db.getclass(properties[key].classname) issue[key] = klass.lookup(issue[key]) elif isinstance(properties[key], hyperdb.Multilink): # resolve multilinked property names into node ids klass = db.getclass(properties[key].classname) links = [] for item in issue[key].split(','): if not item.isdigit(): nodeid = klass.lookup(item) else: nodeid = item if not nodeid in links: links.append(nodeid) issue[key] = links # next we create the issue itself issue_id = db.issue.create(**issue) print "Created issue%s" % issue_id
def confirm_registration(self, otk): props = self.getOTKManager().getall(otk) for propname, proptype in self.user.getprops().items(): value = props.get(propname, None) if value is None: pass elif isinstance(proptype, hyperdb.Date): props[propname] = date.Date(value) elif isinstance(proptype, hyperdb.Interval): props[propname] = date.Interval(value) elif isinstance(proptype, hyperdb.Password): props[propname] = password.Password(encrypted=value) # tag new user creation with 'admin' self.journaltag = 'admin' # create the new user cl = self.user props['roles'] = self.config.NEW_WEB_USER_ROLES try: # ASSUME:: ValueError raised during create due to key value # conflict. I an use message in exception to determine # when I should intercept the exception with a more # friendly error message. If i18n is used to translate # original exception message this will fail and translated # text (probably unfriendly) will be used. userid = cl.create(**props) except ValueError as e: username = props['username'] # Try to make error message less cryptic to the user. if str(e) == 'node with key "%s" exists' % username: raise ValueError(_("Username '%s' already exists." % username)) else: raise # clear the props from the otk database self.getOTKManager().destroy(otk) # commit cl.create (and otk changes) self.commit() return userid
def getjournal(self, classname, nodeid): # our journal result res = [] # add any journal entries for transactions not committed to the # database for method, args in self.transactions: if method != self.doSaveJournal: continue (cache_classname, cache_nodeid, cache_action, cache_params, cache_creator, cache_creation) = args if cache_classname == classname and cache_nodeid == nodeid: if not cache_creator: cache_creator = self.getuid() if not cache_creation: cache_creation = date.Date() res.append((cache_nodeid, cache_creation, cache_creator, cache_action, cache_params)) try: res += self.journals.get(classname, {})[nodeid] except KeyError: if res: return res raise IndexError(nodeid) return res
def __init__ \ ( self , user = None , tpath = None , oname = None , toplevel = None , verbose = None ) : if verbose is not None : self.VERBOSE = int (verbose) if self.VERBOSE > 1 : self.debug ("Pygantt_XML, Version", self.VERSION) self.user = user or self.DEFAULT_USER self.tpath = tpath or self.DEFAULT_PATH self.oname = oname or self.DEFAULT_FILE self.toplevel = eval (toplevel or self.DEFAULT_ISSUE) self.now = date.Date (".") if self.VERBOSE > 1 : self.debug ("Roundup user = %s" % self.user) self.debug ("Path to tracker = %s" % self.tpath) self.debug ("Name of output file = %s" % self.oname) self.debug ("Considering toplevel issues %s ..." % self.toplevel) self.debug ("Launching tracker ...") self.tracker = instance.open (self.tpath) sys.path.insert (1, os.path.join (self.tpath, "lib")) import user_dynamic self.get_user_dynamic = user_dynamic.get_user_dynamic self.last_user_dynamic = user_dynamic.last_user_dynamic self.weekly_hours = user_dynamic.weekly_hours if self.VERBOSE > 1 : self.debug ( "Loading roundup data base ...") self.db = self.tracker.open (self.user) self.s_closed = self.db.status.lookup ("closed") self.s_test = self.db.status.lookup ("testing") self.s_susp = self.db.status.lookup ("suspended")
class Database(rdbms_common.Database): """Sqlite DB backend implementation attributes: dbtype: holds the value for the type of db. It is used by indexer to identify the database type so it can import the correct indexer module when using native text search mode. """ # char to use for positional arguments if sqlite_version in (2, 3): arg = '?' else: arg = '%s' dbtype = "sqlite" # used by some code to switch styles of query implements_intersect = 1 # used in generic backend to determine if db supports # 'DOUBLE PRECISION' for floating point numbers. Note that sqlite # already has double precision as its standard 'REAL' type. So this # is set to False here. implements_double_precision = False hyperdb_to_sql_datatypes = { hyperdb.String: 'VARCHAR(255)', hyperdb.Date: 'VARCHAR(30)', hyperdb.Link: 'INTEGER', hyperdb.Interval: 'VARCHAR(255)', hyperdb.Password: '******', hyperdb.Boolean: 'BOOLEAN', hyperdb.Number: 'REAL', hyperdb.Integer: 'INTEGER', } hyperdb_to_sql_value = { hyperdb.String: str, hyperdb.Date: lambda x: x.serialise(), hyperdb.Link: int, hyperdb.Interval: str, hyperdb.Password: str, hyperdb.Boolean: int, hyperdb.Integer: int, hyperdb.Number: lambda x: x, hyperdb.Multilink: lambda x: x, # used in journal marshalling } sql_to_hyperdb_value = { hyperdb.String: uany2s, hyperdb.Date: lambda x: date.Date(str(x)), hyperdb.Link: str, # XXX numeric ids hyperdb.Interval: date.Interval, hyperdb.Password: lambda x: password.Password(encrypted=x), hyperdb.Boolean: int, hyperdb.Integer: int, hyperdb.Number: rdbms_common._num_cvt, hyperdb.Multilink: lambda x: x, # used in journal marshalling } # We're using DBM for managing session info and one-time keys: # For SQL database storage of this info we would need two concurrent # connections to the same database which SQLite doesn't support def getSessionManager(self): if not self.Session: self.Session = Sessions(self) return self.Session def getOTKManager(self): if not self.Otk: self.Otk = OneTimeKeys(self) return self.Otk def sqlite_busy_handler(self, data, table, count): """invoked whenever SQLite tries to access a database that is locked""" now = time.time() if count == 1: # Timeout for handling locked database (default 30s) self._busy_handler_endtime = now + self.config.RDBMS_SQLITE_TIMEOUT elif now > self._busy_handler_endtime: # timeout expired - no more retries return 0 # sleep adaptively as retry count grows, # starting from about half a second time_to_sleep = 0.01 * (2 << min(5, count)) time.sleep(time_to_sleep) return 1 def sql_open_connection(self): """Open a standard, non-autocommitting connection. pysqlite will automatically BEGIN TRANSACTION for us. """ # make sure the database directory exists # database itself will be created by sqlite if needed if not os.path.isdir(self.config.DATABASE): os.makedirs(self.config.DATABASE) db = os.path.join(self.config.DATABASE, 'db') logging.getLogger('roundup.hyperdb').info('open database %r' % db) # set timeout (30 second default is extraordinarily generous) # for handling locked database if sqlite_version == 1: conn = sqlite.connect(db=db) conn.db.sqlite_busy_handler(self.sqlite_busy_handler) else: conn = sqlite.connect(db, timeout=self.config.RDBMS_SQLITE_TIMEOUT) conn.row_factory = sqlite.Row # pysqlite2 / sqlite3 want us to store Unicode in the db but # that's not what's been done historically and it's definitely # not what the other backends do, so we'll stick with UTF-8 if sqlite_version in (2, 3): conn.text_factory = str cursor = conn.cursor() return (conn, cursor) def open_connection(self): # ensure files are group readable and writable os.umask(self.config.UMASK) (self.conn, self.cursor) = self.sql_open_connection() try: self.load_dbschema() except sqlite.DatabaseError as error: if str(error) != 'no such table: schema': raise self.init_dbschema() self.sql('create table schema (schema varchar)') self.sql('create table ids (name varchar, num integer)') self.sql('create index ids_name_idx on ids(name)') self.create_version_2_tables() def create_version_2_tables(self): self.sql('create table otks (otk_key varchar, ' 'otk_value varchar, otk_time integer)') self.sql('create index otks_key_idx on otks(otk_key)') self.sql('create table sessions (session_key varchar, ' 'session_time integer, session_value varchar)') self.sql('create index sessions_key_idx on ' 'sessions(session_key)') # full-text indexing store self.sql( 'CREATE TABLE __textids (_class varchar, ' '_itemid varchar, _prop varchar, _textid integer primary key) ') self.sql('CREATE TABLE __words (_word varchar, ' '_textid integer)') self.sql('CREATE INDEX words_word_ids ON __words(_word)') self.sql('CREATE INDEX words_by_id ON __words (_textid)') self.sql('CREATE UNIQUE INDEX __textids_by_props ON ' '__textids (_class, _itemid, _prop)') sql = 'insert into ids (name, num) values (%s,%s)' % (self.arg, self.arg) self.sql(sql, ('__textids', 1)) def add_new_columns_v2(self): # update existing tables to have the new actor column tables = self.database_schema['tables'] for classname, spec in self.classes.items(): if classname in tables: dbspec = tables[classname] self.update_class(spec, dbspec, force=1, adding_v2=1) # we've updated - don't try again tables[classname] = spec.schema() def fix_version_3_tables(self): # NOOP - no restriction on column length here pass def update_class(self, spec, old_spec, force=0, adding_v2=0): """ Determine the differences between the current spec and the database version of the spec, and update where necessary. If 'force' is true, update the database anyway. SQLite doesn't have ALTER TABLE, so we have to copy and regenerate the tables with the new schema. """ new_spec = spec.schema() new_spec[1].sort() old_spec[1].sort() if not force and new_spec == old_spec: # no changes return 0 logging.getLogger('roundup.hyperdb').info('update_class %s' % spec.classname) # detect multilinks that have been removed, and drop their table old_has = {} for name, prop in old_spec[1]: old_has[name] = 1 if name in spec.properties or not isinstance( prop, hyperdb.Multilink): continue # it's a multilink, and it's been removed - drop the old # table. First drop indexes. self.drop_multilink_table_indexes(spec.classname, name) sql = 'drop table %s_%s' % (spec.classname, prop) self.sql(sql) # now figure how we populate the new table if adding_v2: fetch = ['_activity', '_creation', '_creator'] else: fetch = ['_actor', '_activity', '_creation', '_creator'] properties = spec.getprops() for propname, x in new_spec[1]: prop = properties[propname] if isinstance(prop, hyperdb.Multilink): if propname not in old_has: # we need to create the new table self.create_multilink_table(spec, propname) elif force: tn = '%s_%s' % (spec.classname, propname) # grabe the current values sql = 'select linkid, nodeid from %s' % tn self.sql(sql) rows = self.cursor.fetchall() # drop the old table self.drop_multilink_table_indexes(spec.classname, propname) sql = 'drop table %s' % tn self.sql(sql) # re-create and populate the new table self.create_multilink_table(spec, propname) sql = """insert into %s (linkid, nodeid) values (%s, %s)""" % (tn, self.arg, self.arg) for linkid, nodeid in rows: self.sql(sql, (int(linkid), int(nodeid))) elif propname in old_has: # we copy this col over from the old table fetch.append('_' + propname) # select the data out of the old table fetch.append('id') fetch.append('__retired__') fetchcols = ','.join(fetch) cn = spec.classname sql = 'select %s from _%s' % (fetchcols, cn) self.sql(sql) olddata = self.cursor.fetchall() # TODO: update all the other index dropping code self.drop_class_table_indexes(cn, old_spec[0]) # drop the old table self.sql('drop table _%s' % cn) # create the new table self.create_class_table(spec) if olddata: inscols = [ 'id', '_actor', '_activity', '_creation', '_creator', '__retired__' ] for propname, x in new_spec[1]: prop = properties[propname] if isinstance(prop, hyperdb.Multilink): continue elif isinstance(prop, hyperdb.Interval): inscols.append('_' + propname) inscols.append('__' + propname + '_int__') elif propname in old_has: # we copy this col over from the old table inscols.append('_' + propname) # do the insert of the old data - the new columns will have # NULL values args = ','.join([self.arg for x in inscols]) cols = ','.join(inscols) sql = 'insert into _%s (%s) values (%s)' % (cn, cols, args) for entry in olddata: d = [] retired_id = None for name in inscols: # generate the new value for the Interval int column if name.endswith('_int__'): name = name[2:-6] if sqlite_version in (2, 3): try: v = hyperdb.Interval(entry[name]).as_seconds() except IndexError: v = None elif name in entry: v = hyperdb.Interval(entry[name]).as_seconds() else: v = None elif sqlite_version in (2, 3): try: v = entry[name] except IndexError: v = None elif (sqlite_version == 1 and name in entry): v = entry[name] else: v = None if name == 'id': retired_id = v elif name == '__retired__' and retired_id and v not in [ '0', 0 ]: v = retired_id d.append(v) self.sql(sql, tuple(d)) return 1 def sql_close(self): """ Squash any error caused by us already having closed the connection. """ try: self.conn.close() except sqlite.ProgrammingError as value: if str(value) != 'close failed - Connection is closed.': raise def sql_rollback(self): """ Squash any error caused by us having closed the connection (and therefore not having anything to roll back) """ try: self.conn.rollback() except sqlite.ProgrammingError as value: if str(value) != 'rollback failed - Connection is closed.': raise def __repr__(self): return '<roundlite 0x%x>' % id(self) def sql_commit(self): """ Actually commit to the database. Ignore errors if there's nothing to commit. """ try: self.conn.commit() except sqlite.DatabaseError as error: if str(error) != 'cannot commit - no transaction is active': raise # open a new cursor for subsequent work self.cursor = self.conn.cursor() def sql_index_exists(self, table_name, index_name): self.sql('pragma index_list(%s)' % table_name) for entry in self.cursor.fetchall(): if entry[1] == index_name: return 1 return 0 # old-skool id generation def newid(self, classname): """ Generate a new id for the given class """ # Prevent other processes from reading while we increment. # Otherwise multiple processes can end up with the same # new id and hilarity results. # # Defeat pysqlite's attempts to do locking by setting # isolation_level to None. Pysqlite can commit # on it's own even if we don't want it to end the transaction. # If we rewrite to use another sqlite library like apsw we # don't have to deal with this autocommit/autotransact foolishness. self.conn.isolation_level = None # Manage the transaction locks manually. self.sql("BEGIN IMMEDIATE") # get the next ID sql = 'select num from ids where name=%s' % self.arg self.sql(sql, (classname, )) newid = int(self.cursor.fetchone()[0]) # leave the next larger number as the next newid sql = 'update ids set num=num+1 where name=%s' % self.arg vals = (classname, ) self.sql(sql, vals) # reset pysqlite's auto transact stuff to default since the # rest of the code expects it. self.conn.isolation_level = '' # commit writing the data, clearing locks for other processes # and create a new cursor to the database. self.sql_commit() # return as string return str(newid) def setid(self, classname, setid): """ Set the id counter: used during import of database We add one to make it behave like the sequences in postgres. """ sql = 'update ids set num=%s where name=%s' % (self.arg, self.arg) vals = (int(setid) + 1, classname) self.sql(sql, vals) def clear(self): rdbms_common.Database.clear(self) # set the id counters to 0 (setid adds one) so we start at 1 for cn in self.classes.keys(): self.setid(cn, 0) def create_class(self, spec): rdbms_common.Database.create_class(self, spec) sql = 'insert into ids (name, num) values (%s, %s)' % (self.arg, self.arg) vals = (spec.classname, 1) self.sql(sql, vals) if sqlite_version in (2, 3): def load_journal(self, classname, cols, nodeid): """We need to turn the sqlite3.Row into a tuple so it can be unpacked""" l = rdbms_common.Database.load_journal(self, classname, cols, nodeid) cols = range(5) return [[row[col] for col in cols] for row in l]
def import_data_11(db, user): dr = db.daily_record.create \ ( user = user , date = date.Date ('2013-06-03') ) db.time_record.create \ ( daily_record = dr , start = '11:00' , end = '13:00' , work_location = '1' , wp = '4' ) db.time_record.create \ ( daily_record = dr , start = '08:00' , end = '11:00' , work_location = '1' , wp = '4' ) db.time_record.create \ ( daily_record = dr , start = '14:00' , end = '16:00' , work_location = '1' ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2013-06-04') ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2013-06-05') ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2013-06-06') ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2013-06-07') ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2013-06-08') ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2013-06-09') ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2013-06-10') ) db.time_record.create \ ( daily_record = dr , duration = 1.0 , work_location = '1' , wp = '1' ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2013-06-11') ) db.time_record.create \ ( daily_record = dr , duration = 2.0 , work_location = '1' , wp = '3' ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2013-06-12') ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2013-06-13') ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2013-06-14') ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2013-06-15') ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2013-06-16') ) db.commit()
class Database(rdbms_common.Database): # char to use for positional arguments if sqlite_version in (2,3): arg = '?' else: arg = '%s' # used by some code to switch styles of query implements_intersect = 1 hyperdb_to_sql_datatypes = { hyperdb.String : 'VARCHAR(255)', hyperdb.Date : 'VARCHAR(30)', hyperdb.Link : 'INTEGER', hyperdb.Interval : 'VARCHAR(255)', hyperdb.Password : '******', hyperdb.Boolean : 'BOOLEAN', hyperdb.Number : 'REAL', } hyperdb_to_sql_value = { hyperdb.String : str, hyperdb.Date : lambda x: x.serialise(), hyperdb.Link : int, hyperdb.Interval : str, hyperdb.Password : str, hyperdb.Boolean : int, hyperdb.Number : lambda x: x, hyperdb.Multilink : lambda x: x, # used in journal marshalling } sql_to_hyperdb_value = { hyperdb.String : lambda x: isinstance(x, unicode) and x.encode('utf8') or str(x), hyperdb.Date : lambda x: date.Date(str(x)), hyperdb.Link : str, # XXX numeric ids hyperdb.Interval : date.Interval, hyperdb.Password : lambda x: password.Password(encrypted=x), hyperdb.Boolean : int, hyperdb.Number : rdbms_common._num_cvt, hyperdb.Multilink : lambda x: x, # used in journal marshalling } def sqlite_busy_handler(self, data, table, count): """invoked whenever SQLite tries to access a database that is locked""" if count == 1: # use a 30 second timeout (extraordinarily generous) # for handling locked database self._busy_handler_endtime = time.time() + 30 elif time.time() > self._busy_handler_endtime: # timeout expired - no more retries return 0 # sleep adaptively as retry count grows, # starting from about half a second time_to_sleep = 0.01 * (2 << min(5, count)) time.sleep(time_to_sleep) return 1 def sql_open_connection(self): '''Open a standard, non-autocommitting connection. pysqlite will automatically BEGIN TRANSACTION for us. ''' # make sure the database directory exists # database itself will be created by sqlite if needed if not os.path.isdir(self.config.DATABASE): os.makedirs(self.config.DATABASE) db = os.path.join(self.config.DATABASE, 'db') logging.getLogger('hyperdb').info('open database %r'%db) # set a 30 second timeout (extraordinarily generous) for handling # locked database if sqlite_version == 1: conn = sqlite.connect(db=db) conn.db.sqlite_busy_handler(self.sqlite_busy_handler) else: conn = sqlite.connect(db, timeout=30) conn.row_factory = sqlite.Row cursor = conn.cursor() return (conn, cursor) def open_connection(self): # ensure files are group readable and writable os.umask(self.config.UMASK) (self.conn, self.cursor) = self.sql_open_connection() try: self.load_dbschema() except sqlite.DatabaseError, error: if str(error) != 'no such table: schema': raise self.init_dbschema() self.sql('create table schema (schema varchar)') self.sql('create table ids (name varchar, num integer)') self.sql('create index ids_name_idx on ids(name)') self.create_version_2_tables()
def parse(self, create=0, num_re=re.compile(r'^\d+$')): """ Item properties and their values are edited with html FORM variables and their values. You can: - Change the value of some property of the current item. - Create a new item of any class, and edit the new item's properties, - Attach newly created items to a multilink property of the current item. - Remove items from a multilink property of the current item. - Specify that some properties are required for the edit operation to be successful. In the following, <bracketed> values are variable, "@" may be either ":" or "@", and other text "required" is fixed. Most properties are specified as form variables: <propname> - property on the current context item <designator>"@"<propname> - property on the indicated item (for editing related information) Designators name a specific item of a class. <classname><N> Name an existing item of class <classname>. <classname>"-"<N> Name the <N>th new item of class <classname>. If the form submission is successful, a new item of <classname> is created. Within the submitted form, a particular designator of this form always refers to the same new item. Once we have determined the "propname", we look at it to see if it's special: @required The associated form value is a comma-separated list of property names that must be specified when the form is submitted for the edit operation to succeed. When the <designator> is missing, the properties are for the current context item. When <designator> is present, they are for the item specified by <designator>. The "@required" specifier must come before any of the properties it refers to are assigned in the form. @remove@<propname>=id(s) or @add@<propname>=id(s) The "@add@" and "@remove@" edit actions apply only to Multilink properties. The form value must be a comma-separate list of keys for the class specified by the simple form variable. The listed items are added to (respectively, removed from) the specified property. @link@<propname>=<designator> If the edit action is "@link@", the simple form variable must specify a Link or Multilink property. The form value is a comma-separated list of designators. The item corresponding to each designator is linked to the property given by simple form variable. These are collected up and returned in all_links. None of the above (ie. just a simple form value) The value of the form variable is converted appropriately, depending on the type of the property. For a Link('klass') property, the form value is a single key for 'klass', where the key field is specified in dbinit.py. For a Multilink('klass') property, the form value is a comma-separated list of keys for 'klass', where the key field is specified in dbinit.py. Note that for simple-form-variables specifiying Link and Multilink properties, the linked-to class must have a key field. For a String() property specifying a filename, the file named by the form value is uploaded. This means we try to set additional properties "filename" and "type" (if they are valid for the class). Otherwise, the property is set to the form value. For Date(), Interval(), Boolean(), and Number(), Integer() properties, the form value is converted to the appropriate Any of the form variables may be prefixed with a classname or designator. Two special form values are supported for backwards compatibility: @note This is equivalent to:: @link@messages=msg-1 msg-1@content=value except that in addition, the "author" and "date" properties of "msg-1" are set to the userid of the submitter, and the current time, respectively. @file This is equivalent to:: @link@files=file-1 file-1@content=value The String content value is handled as described above for file uploads. If "multiple" is turned on for file uploads in the html template, multiple links are generated:: @link@files=file-2 file-2@content=value ... depending on how many files the user has attached. If both the "@note" and "@file" form variables are specified, the action:: @link@msg-1@files=file-1 is also performed. If "multiple" is specified this is carried out for each of the attached files. We also check that FileClass items have a "content" property with actual content, otherwise we remove them from all_props before returning. The return from this method is a dict of (classname, id): properties ... this dict _always_ has an entry for the current context, even if it's empty (ie. a submission for an existing issue that doesn't result in any changes would return {('issue','123'): {}}) The id may be None, which indicates that an item should be created. """ # some very useful variables db = self.db form = self.form if not hasattr(self, 'FV_SPECIAL'): # generate the regexp for handling special form values classes = '|'.join(db.classes.keys()) # specials for parsePropsFromForm # handle the various forms (see unit tests) self.FV_SPECIAL = re.compile(self.FV_LABELS % classes, re.VERBOSE) self.FV_DESIGNATOR = re.compile(r'(%s)([-\d]+)' % classes) # these indicate the default class / item default_cn = self.classname default_cl = self.db.classes[default_cn] default_nodeid = self.nodeid # we'll store info about the individual class/item edit in these all_required = {} # required props per class/item all_props = {} # props to set per class/item got_props = {} # props received per class/item all_propdef = {} # note - only one entry per class all_links = [] # as many as are required # we should always return something, even empty, for the context all_props[(default_cn, default_nodeid)] = {} keys = form.keys() timezone = db.getUserTimezone() # sentinels for the :note and :file props have_note = have_file = 0 # extract the usable form labels from the form matches = [] for key in keys: m = self.FV_SPECIAL.match(key) if m: matches.append((key, m.groupdict())) # now handle the matches for key, d in matches: if d['classname']: # we got a designator cn = d['classname'] cl = self.db.classes[cn] nodeid = d['id'] propname = d['propname'] elif d['note']: # the special note field cn = 'msg' cl = self.db.classes[cn] nodeid = '-1' propname = 'content' all_links.append( (default_cn, default_nodeid, 'messages', [('msg', '-1')])) have_note = 1 elif d['file']: # the special file field cn = default_cn cl = default_cl nodeid = default_nodeid propname = 'files' else: # default cn = default_cn cl = default_cl nodeid = default_nodeid propname = d['propname'] # the thing this value relates to is... this = (cn, nodeid) # skip implicit create if this isn't a create action if not create and nodeid is None: continue # get more info about the class, and the current set of # form props for it if cn not in all_propdef: all_propdef[cn] = cl.getprops() propdef = all_propdef[cn] if this not in all_props: all_props[this] = {} props = all_props[this] if this not in got_props: got_props[this] = {} # is this a link command? if d['link']: value = [] for entry in self.extractFormList(form[key]): m = self.FV_DESIGNATOR.match(entry) if not m: raise FormError( self._('link "%(key)s" ' 'value "%(entry)s" not a designator') % locals()) value.append((m.group(1), m.group(2))) # get details of linked class lcn = m.group(1) lcl = self.db.classes[lcn] lnodeid = m.group(2) if lcn not in all_propdef: all_propdef[lcn] = lcl.getprops() if (lcn, lnodeid) not in all_props: all_props[(lcn, lnodeid)] = {} if (lcn, lnodeid) not in got_props: got_props[(lcn, lnodeid)] = {} # make sure the link property is valid if (not isinstance(propdef[propname], hyperdb.Multilink) and not isinstance(propdef[propname], hyperdb.Link)): raise FormError( self._('%(class)s %(property)s ' 'is not a link or multilink property') % { 'class': cn, 'property': propname }) all_links.append((cn, nodeid, propname, value)) continue # detect the special ":required" variable if d['required']: for entry in self.extractFormList(form[key]): m = self.FV_SPECIAL.match(entry) if not m: raise FormError( self._('The form action claims to ' 'require property "%(property)s" ' 'which doesn\'t exist') % {'property': propname}) if m.group('classname'): this = (m.group('classname'), m.group('id')) entry = m.group('propname') if this not in all_required: all_required[this] = [] all_required[this].append(entry) continue # see if we're performing a special multilink action mlaction = 'set' if d['remove']: mlaction = 'remove' elif d['add']: mlaction = 'add' # does the property exist? if propname not in propdef: if mlaction != 'set': raise FormError( self._('You have submitted a %(action)s ' 'action for the property "%(property)s" ' 'which doesn\'t exist') % { 'action': mlaction, 'property': propname }) # the form element is probably just something we don't care # about - ignore it continue proptype = propdef[propname] # Get the form value. This value may be a MiniFieldStorage # or a list of MiniFieldStorages. value = form[key] # handle unpacking of the MiniFieldStorage / list form value if d['file']: assert isinstance(proptype, hyperdb.Multilink) # value is a file upload... we *always* handle multiple # files here (html5) if not isinstance(value, type([])): value = [value] elif isinstance(proptype, hyperdb.Multilink): value = self.extractFormList(value) else: # multiple values are not OK if isinstance(value, type([])): raise FormError( self._('You have submitted more than one ' 'value for the %s property') % propname) # value might be a single file upload if not getattr(value, 'filename', None): value = value.value.strip() # now that we have the props field, we need a teensy little # extra bit of help for the old :note field... if d['note'] and value: props['author'] = self.db.getuid() props['date'] = date.Date() # handle by type now if isinstance(proptype, hyperdb.Password): if not value: # ignore empty password values continue if d['confirm']: # ignore the "confirm" password value by itself continue for key, d in matches: if d['confirm'] and d['propname'] == propname: confirm = form[key] break else: raise FormError( self._('Password and confirmation text ' 'do not match')) if isinstance(confirm, type([])): raise FormError( self._('You have submitted more than one ' 'value for the %s property') % propname) if value != confirm.value: raise FormError( self._('Password and confirmation text ' 'do not match')) try: value = password.Password(value, scheme=proptype.scheme, config=self.db.config) except hyperdb.HyperdbValueError as msg: raise FormError(msg) elif d['file']: # This needs to be a Multilink and is checked above fcn = 'file' fcl = self.db.classes[fcn] fpropname = 'content' if fcn not in all_propdef: all_propdef[fcn] = fcl.getprops() fpropdef = all_propdef[fcn] have_file = [] for n, v in enumerate(value): if not hasattr(v, 'filename'): raise FormError(self._('Not a file attachment')) # skip if the upload is empty if not v.filename: continue fnodeid = str(-(n + 1)) have_file.append(fnodeid) fthis = (fcn, fnodeid) if fthis not in all_props: all_props[fthis] = {} fprops = all_props[fthis] all_links.append( (cn, nodeid, 'files', [('file', fnodeid)])) fprops['content'] = self.parse_file(fpropdef, fprops, v) value = None nodeid = None elif isinstance(proptype, hyperdb.Multilink): # convert input to list of ids try: l = hyperdb.rawToHyperdb(self.db, cl, nodeid, propname, value) except hyperdb.HyperdbValueError as msg: raise FormError(msg) # now use that list of ids to modify the multilink if mlaction == 'set': value = l else: # we're modifying the list - get the current list of ids if propname in props: existing = props[propname] elif nodeid and not nodeid.startswith('-'): existing = cl.get(nodeid, propname, []) else: existing = [] # now either remove or add if mlaction == 'remove': # remove - handle situation where the id isn't in # the list for entry in l: try: existing.remove(entry) except ValueError: raise FormError( self._('property ' '"%(propname)s": "%(value)s" ' 'not currently in list') % { 'propname': propname, 'value': entry }) else: # add - easy, just don't dupe for entry in l: if entry not in existing: existing.append(entry) value = existing # Sort the value in the same order used by # Multilink.from_raw. value.sort(key=int) elif value == '' or value == b'': # other types should be None'd if there's no value value = None else: # handle all other types try: # Try handling file upload if (isinstance(proptype, hyperdb.String) and hasattr(value, 'filename') and value.filename is not None): value = self.parse_file(propdef, props, value) else: value = hyperdb.rawToHyperdb(self.db, cl, nodeid, propname, value) except hyperdb.HyperdbValueError as msg: raise FormError(msg) # register that we got this property if isinstance(proptype, hyperdb.Multilink): if value != []: got_props[this][propname] = 1 elif value is not None: got_props[this][propname] = 1 # get the old value if nodeid and not nodeid.startswith('-'): try: existing = cl.get(nodeid, propname) except KeyError: # this might be a new property for which there is # no existing value if propname not in propdef: raise except IndexError as message: raise FormError(str(message)) # make sure the existing multilink is sorted. We must # be sure to use the same sort order in all places, # since we want to compare values with "=" or "!=". # The canonical order (given in Multilink.from_raw) is # by the numeric value of the IDs. if isinstance(proptype, hyperdb.Multilink): existing.sort(key=int) # "missing" existing values may not be None if not existing: if isinstance(proptype, hyperdb.String): # some backends store "missing" Strings as # empty strings if existing == self.db.BACKEND_MISSING_STRING: existing = None elif isinstance(proptype, hyperdb.Number) or \ isinstance(proptype, hyperdb.Integer): # some backends store "missing" Numbers as 0 :( if existing == self.db.BACKEND_MISSING_NUMBER: existing = None elif isinstance(proptype, hyperdb.Boolean): # likewise Booleans if existing == self.db.BACKEND_MISSING_BOOLEAN: existing = None # if changed, set it if value != existing: props[propname] = value else: # don't bother setting empty/unset values if value is None: continue elif isinstance(proptype, hyperdb.Multilink) and value == []: continue elif isinstance(proptype, hyperdb.String) and value == '': continue props[propname] = value # check to see if we need to specially link files to the note if have_note and have_file: for fid in have_file: all_links.append(('msg', '-1', 'files', [('file', fid)])) # see if all the required properties have been supplied s = [] for thing, required in all_required.items(): # register the values we got got = got_props.get(thing, {}) for entry in required[:]: if entry in got: required.remove(entry) # If a user doesn't have edit permission for a given property, # but the property is already set in the database, we don't # require a value. if not (create or nodeid is None): for entry in required[:]: if not self.db.security.hasPermission( 'Edit', self.client.userid, self.classname, entry): cl = self.db.classes[self.classname] if cl.get(nodeid, entry) is not None: required.remove(entry) # any required values not present? if not required: continue # tell the user to entry the values required s.append( self.ngettext( 'Required %(class)s property %(property)s not supplied', 'Required %(class)s properties %(property)s not supplied', len(required)) % { 'class': self._(thing[0]), 'property': ', '.join(map(self.gettext, required)) }) if s: raise FormError('\n'.join(s)) # When creating a FileClass node, it should have a non-empty content # property to be created. When editing a FileClass node, it should # either have a non-empty content property or no property at all. In # the latter case, nothing will change. for (cn, id), props in list(all_props.items()): if id is not None and id.startswith('-') and not props: # new item (any class) with no content - ignore del all_props[(cn, id)] elif isinstance(self.db.classes[cn], hyperdb.FileClass): # three cases: # id references existng file. If content is empty, # remove content from form so we don't wipe # existing file contents. # id is -1, -2 ... I.E. a new file. # if content is not defined remove all fields that # reference that file. # if content is defined, let it pass through even if # content is empty. Yes people can upload/create # empty files. if 'content' in props: if id is not None and \ not id.startswith('-') and \ not props['content']: # This is an existing file with emtpy content # value in the form. del props['content'] else: # this is a new file without any content property. if id is not None and id.startswith('-'): del all_props[(cn, id)] # if this is a new file with content (even 0 length content) # allow it through and create the zero length file. return all_props, all_links
def import_data_7(db, user): dr = db.daily_record.create \ ( user = user , date = date.Date ('2012-11-15') ) db.time_record.create \ ( daily_record = dr , start = '09:00' , end = '12:15' , work_location = '1' , wp = '4' ) db.time_record.create \ ( daily_record = dr , start = '13:30' , end = '17:45' , work_location = '1' , wp = '4' ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2012-11-16') ) db.time_record.create \ ( daily_record = dr , start = '07:45' , end = '12:15' , work_location = '1' , wp = '5' ) db.time_record.create \ ( daily_record = dr , start = '12:45' , end = '16:00' , work_location = '1' , wp = '4' ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2012-11-17') ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2012-11-18') ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2012-11-19') ) db.time_record.create \ ( daily_record = dr , start = '08:00' , end = '12:00' , work_location = '1' , wp = '4' ) db.time_record.create \ ( daily_record = dr , start = '12:30' , end = '15:30' , work_location = '1' , wp = '5' ) db.time_record.create \ ( daily_record = dr , start = '15:30' , end = '18:30' , work_location = '1' , wp = '6' ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2012-11-20') ) db.time_record.create \ ( daily_record = dr , start = '08:00' , end = '12:30' , work_location = '1' , wp = '5' ) db.time_record.create \ ( daily_record = dr , start = '13:00' , end = '17:45' , work_location = '1' , wp = '4' ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2012-11-21') ) db.time_record.create \ ( daily_record = dr , start = '07:45' , end = '13:15' , work_location = '1' , wp = '6' ) db.time_record.create \ ( daily_record = dr , start = '13:45' , end = '16:15' , work_location = '1' , wp = '7' ) db.time_record.create \ ( daily_record = dr , start = '16:15' , end = '18:15' , work_location = '1' , wp = '5' ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2012-11-22') ) db.time_record.create \ ( daily_record = dr , start = '08:15' , end = '12:00' , work_location = '1' , wp = '4' ) db.time_record.create \ ( daily_record = dr , start = '13:00' , end = '16:00' , work_location = '1' , wp = '6' ) db.time_record.create \ ( daily_record = dr , start = '16:00' , end = '18:30' , work_location = '1' , wp = '5' ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2012-11-23') ) db.time_record.create \ ( daily_record = dr , start = '07:30' , end = '09:00' , work_location = '1' , wp = '8' ) db.time_record.create \ ( daily_record = dr , start = '09:30' , end = '12:00' , work_location = '1' , wp = '4' ) db.time_record.create \ ( daily_record = dr , start = '12:30' , end = '15:00' , work_location = '1' , wp = '5' ) db.time_record.create \ ( daily_record = dr , start = '15:00' , end = '17:15' , work_location = '1' , wp = '6' ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2012-11-24') ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2012-11-25') ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2012-11-26') ) db.time_record.create \ ( daily_record = dr , start = '08:15' , end = '12:15' , work_location = '1' , wp = '4' ) db.time_record.create \ ( daily_record = dr , start = '12:45' , end = '14:15' , work_location = '1' , wp = '6' ) db.time_record.create \ ( daily_record = dr , start = '14:15' , end = '17:45' , work_location = '1' , wp = '5' ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2012-11-27') ) db.time_record.create \ ( daily_record = dr , start = '08:15' , end = '13:00' , work_location = '1' , wp = '4' ) db.time_record.create \ ( daily_record = dr , start = '13:30' , end = '16:00' , work_location = '1' , wp = '4' ) db.time_record.create \ ( daily_record = dr , start = '16:00' , end = '18:00' , work_location = '1' , wp = '5' ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2012-11-28') ) db.time_record.create \ ( daily_record = dr , start = '08:15' , end = '10:15' , work_location = '1' , wp = '6' ) db.time_record.create \ ( daily_record = dr , start = '10:15' , end = '12:15' , work_location = '1' , wp = '4' ) db.time_record.create \ ( daily_record = dr , start = '12:45' , end = '14:45' , work_location = '1' , wp = '5' ) db.time_record.create \ ( daily_record = dr , start = '14:45' , end = '17:45' , work_location = '1' , wp = '4' ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2012-11-29') ) db.time_record.create \ ( daily_record = dr , start = '08:15' , end = '13:15' , work_location = '1' , wp = '6' ) db.time_record.create \ ( daily_record = dr , start = '13:45' , end = '17:45' , work_location = '1' , wp = '4' ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2012-11-30') ) db.time_record.create \ ( daily_record = dr , start = '07:00' , end = '12:15' , work_location = '1' , wp = '4' ) db.time_record.create \ ( daily_record = dr , start = '12:45' , end = '16:00' , work_location = '1' , wp = '5' ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2012-12-01') ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2012-12-02') ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2012-12-03') ) db.time_record.create \ ( daily_record = dr , start = '08:15' , end = '12:15' , work_location = '1' , wp = '4' ) db.time_record.create \ ( daily_record = dr , start = '12:45' , end = '15:45' , work_location = '1' , wp = '6' ) db.time_record.create \ ( daily_record = dr , start = '15:45' , end = '17:45' , work_location = '1' , wp = '5' ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2012-12-04') ) db.time_record.create \ ( daily_record = dr , start = '08:15' , end = '12:15' , work_location = '1' , wp = '4' ) db.time_record.create \ ( daily_record = dr , start = '12:45' , end = '15:45' , work_location = '1' , wp = '8' ) db.time_record.create \ ( daily_record = dr , start = '15:45' , end = '18:15' , work_location = '1' , wp = '5' ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2012-12-05') ) db.time_record.create \ ( daily_record = dr , start = '08:15' , end = '11:15' , work_location = '1' , wp = '6' ) db.time_record.create \ ( daily_record = dr , start = '11:15' , end = '12:15' , work_location = '1' , wp = '7' ) db.time_record.create \ ( daily_record = dr , start = '12:45' , end = '16:30' , work_location = '1' , wp = '5' ) db.time_record.create \ ( daily_record = dr , start = '16:30' , end = '18:30' , work_location = '1' , wp = '4' ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2012-12-06') ) db.time_record.create \ ( daily_record = dr , start = '08:15' , end = '12:00' , work_location = '1' , wp = '4' ) db.time_record.create \ ( daily_record = dr , start = '13:00' , end = '17:30' , work_location = '1' , wp = '8' ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2012-12-07') ) db.time_record.create \ ( daily_record = dr , start = '08:15' , end = '12:00' , work_location = '1' , wp = '8' ) db.time_record.create \ ( daily_record = dr , start = '12:30' , end = '16:30' , work_location = '1' , wp = '6' ) db.time_record.create \ ( daily_record = dr , start = '16:30' , end = '17:45' , work_location = '1' , wp = '5' ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2012-12-08') ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2012-12-09') ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2012-12-10') ) db.time_record.create \ ( daily_record = dr , start = '08:15' , end = '10:45' , work_location = '1' , wp = '6' ) db.time_record.create \ ( daily_record = dr , start = '10:45' , end = '12:45' , work_location = '1' , wp = '5' ) db.time_record.create \ ( daily_record = dr , start = '13:15' , end = '18:15' , time_activity = '10' , work_location = '1' , wp = '5' ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2012-12-11') ) db.time_record.create \ ( daily_record = dr , start = '08:15' , end = '12:00' , work_location = '1' , wp = '4' ) db.time_record.create \ ( daily_record = dr , start = '13:00' , end = '15:45' , work_location = '1' , wp = '4' ) db.time_record.create \ ( daily_record = dr , start = '15:45' , end = '17:00' , time_activity = '10' , work_location = '1' , wp = '5' ) db.time_record.create \ ( daily_record = dr , start = '17:00' , end = '18:00' , work_location = '1' , wp = '5' ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2012-12-12') ) db.time_record.create \ ( daily_record = dr , start = '09:00' , end = '12:30' , work_location = '1' , wp = '5' ) db.time_record.create \ ( daily_record = dr , start = '13:00' , end = '17:00' , work_location = '1' , wp = '5' ) db.time_record.create \ ( daily_record = dr , start = '17:00' , end = '21:15' , time_activity = '10' , work_location = '1' , wp = '5' ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2012-12-13') ) db.time_record.create \ ( daily_record = dr , start = '07:45' , end = '09:00' , work_location = '1' , wp = '4' ) db.time_record.create \ ( daily_record = dr , start = '09:00' , end = '10:30' , work_location = '1' , wp = '6' ) db.time_record.create \ ( daily_record = dr , start = '10:30' , end = '13:15' , work_location = '1' , wp = '5' ) db.time_record.create \ ( daily_record = dr , start = '14:15' , end = '16:00' , work_location = '1' , wp = '8' ) db.time_record.create \ ( daily_record = dr , start = '16:00' , end = '18:00' , work_location = '1' , wp = '6' ) db.time_record.create \ ( daily_record = dr , start = '18:00' , end = '20:00' , work_location = '1' , wp = '5' ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2012-12-14') ) db.time_record.create \ ( daily_record = dr , start = '08:00' , end = '12:45' , work_location = '1' , wp = '6' ) db.time_record.create \ ( daily_record = dr , start = '13:15' , end = '15:00' , work_location = '1' , wp = '4' ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2012-12-15') ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2012-12-16') ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2012-12-17') ) db.time_record.create \ ( daily_record = dr , start = '08:15' , end = '12:00' , work_location = '1' , wp = '4' ) db.time_record.create \ ( daily_record = dr , start = '13:00' , end = '16:00' , work_location = '1' , wp = '4' ) db.time_record.create \ ( daily_record = dr , start = '16:00' , end = '17:30' , work_location = '1' , wp = '8' ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2012-12-18') ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2012-12-19') ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2012-12-20') ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2012-12-21') ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2012-12-22') ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2012-12-23') ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2012-12-24') ) db.time_record.create \ ( daily_record = dr , start = '08:00' , end = '12:00' , work_location = '5' , wp = '1' ) db.time_record.create \ ( daily_record = dr , start = '13:00' , end = '16:45' , work_location = '5' , wp = '2' ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2012-12-25') ) db.time_record.create \ ( daily_record = dr , duration = 7.75 , work_location = '5' , wp = '1' ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2012-12-26') ) db.time_record.create \ ( daily_record = dr , duration = 7.75 , work_location = '5' , wp = '1' ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2012-12-27') ) db.time_record.create \ ( daily_record = dr , duration = 7.75 , work_location = '5' , wp = '2' ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2012-12-28') ) db.time_record.create \ ( daily_record = dr , duration = 7.5 , work_location = '5' , wp = '2' ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2012-12-29') ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2012-12-30') ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2012-12-31') ) db.time_record.create \ ( daily_record = dr , start = '08:00' , end = '12:00' , work_location = '5' , wp = '1' ) db.time_record.create \ ( daily_record = dr , start = '13:00' , end = '16:45' , work_location = '5' , wp = '2' ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2013-01-01') ) db.time_record.create \ ( daily_record = dr , duration = 7.75 , work_location = '5' , wp = '1' ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2013-01-02') ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2013-01-03') ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2013-01-04') ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2013-01-05') ) dr = db.daily_record.create \ ( user = user , date = date.Date ('2013-01-06') ) db.commit()
def to_date(ts): return date.Date(time.gmtime(float(ts)))
def handle(self): """Performs an edit of all of a class' items in one go. The "rows" CGI var defines the CSV-formatted entries for the class. New nodes are identified by the ID 'X' (or any other non-existent ID) and removed lines are retired. """ cl = self.db.classes[self.classname] idlessprops = cl.getprops(protected=0).keys() idlessprops.sort() props = ['id'] + idlessprops # do the edit rows = StringIO.StringIO(self.form['rows'].value) reader = csv.reader(rows) found = {} line = 0 for values in reader: line += 1 if line == 1: continue # skip property names header if values == props: continue # extract the nodeid nodeid, values = values[0], values[1:] found[nodeid] = 1 # see if the node exists if nodeid in ('x', 'X') or not cl.hasnode(nodeid): exists = 0 else: exists = 1 # confirm correct weight if len(idlessprops) != len(values): self.client.error_message.append( self._('Not enough values on line %(line)s') % {'line': line}) return # extract the new values d = {} for name, value in zip(idlessprops, values): prop = cl.properties[name] value = value.strip() # only add the property if it has a value if value: # if it's a multilink, split it if isinstance(prop, hyperdb.Multilink): value = value.split(':') elif isinstance(prop, hyperdb.Password): value = password.Password(value) elif isinstance(prop, hyperdb.Interval): value = date.Interval(value) elif isinstance(prop, hyperdb.Date): value = date.Date(value) elif isinstance(prop, hyperdb.Boolean): value = value.lower() in ('yes', 'true', 'on', '1') elif isinstance(prop, hyperdb.Number): value = float(value) d[name] = value elif exists: # nuke the existing value if isinstance(prop, hyperdb.Multilink): d[name] = [] else: d[name] = None # perform the edit if exists: # edit existing cl.set(nodeid, **d) else: # new node found[cl.create(**d)] = 1 # retire the removed entries for nodeid in cl.list(): if not found.has_key(nodeid): cl.retire(nodeid) # all OK self.db.commit() self.client.ok_message.append(self._('Items edited OK'))
partner.create(name='' "HBO", partnertype="ContentOwner") partner.create(name='' "AltaMedia", partnertype="ProdHouse") partner.create(name='' "Mediatranslations", partnertype="ProdHouse") series = db.getclass('series') series.create(title='' "Breaking Bad S01", client="Netflix") series.create(title='' "Megborulva", client="HBO") episode = db.getclass("episode") episode.create(title="Pilot", series="1", duration=90, order=1, status='1', priority='4', deadline=date.Date('2019-02-01')) episode.create(title="Cat's in the Bag...", series="1", duration=90, order=2, status='1', priority='4') episode.create(title="...And the Bag's in the River", series="1", duration=90, order=3, status='1', priority='4') episode.create(title="Cancer Man", series="1", duration=90,
def main(): # most ldap info is now fetched from extensions/config.ini parser = ArgumentParser() parser.add_argument \ ( "file" , help = "CSV import file" ) parser.add_argument \ ( "-d", "--database-directory" , dest = "database_directory" , help = "Directory of the roundup installation" , default = '.' ) parser.add_argument \ ( '-D', '--delimiter' , dest = 'delimiter' , help = 'CSV delimiter character (tab)' , default = '\t' ) parser.add_argument \ ( "-f", "--field" , dest = "fields" , help = "Fields to update in dyn. user, e.g. sap_cc or department" " can be specified multiple times" , action = 'append' , default = [] ) parser.add_argument \ ( "-N", "--new" , help = "Date of new dynamic user" , default = '2017-10-01' ) parser.add_argument \ ( "-u", "--update" , help = "Update roundup" , default = False , action = 'store_true' ) parser.add_argument \ ( "-v", "--verbose" , help = "Verbose messages" , default = False , action = 'store_true' ) args = parser.parse_args() tracker = instance.open(args.database_directory) db = tracker.open('admin') sys.path.insert(1, os.path.join(args.database_directory, 'lib')) import user_dynamic r = Reader(args.file) d = DictReader(r, delimiter=args.delimiter) for line in d: if 'username' in line: try: user = db.user.getnode(db.user.lookup(line['username'])) except KeyError: print("User not found: %s" % line['username']) continue sn = user.lastname fn = user.firstname username = user.username else: sn = line['Surname'].decode('utf-8') fn = line['First name'].decode('utf-8') if not sn or not fn: print("Name empty: %(sn)s %(fn)s" % locals()) continue users = db.user.filter \ (None, dict (firstname = fn, lastname = sn, status = st)) if not users and ' ' in fn: fn = fn.split(' ', 1)[0] users = db.user.filter \ (None, dict (firstname = fn, lastname = sn, status = st)) if not users: print("User not found: %(sn)s %(fn)s" % locals()) continue if len(users) != 1: uu = [] for u in users: user = db.user.getnode(u) if (user.firstname.decode('utf-8') != fn or user.lastname.decode('utf-8') != sn): continue uu.append(u) users = uu if len(users) != 1: print(users, fn, sn) assert len(users) == 1 user = db.user.getnode(users[0]) if (user.firstname.decode('utf-8') != fn or user.lastname.decode('utf-8') != sn): print(user.firstname, user.lastname, fn, sn) username = user.username dt = date.Date(args.new) st = db.user_status.lookup('valid') # Get user dynamic record dyn = user_dynamic.get_user_dynamic(db, user.id, dt) if not dyn: print("No dyn. user record: %(username)s" % locals()) continue if dyn.valid_to: print("Dyn. user record limited: %(username)s" % locals()) continue if dyn.valid_from > dt: print("Dyn. record starts after date: %(username)s" % locals()) continue if not dyn.vacation_yearly: print("No yearly vacation: %(username)s" % locals()) continue do_create = True if dyn.valid_from == dt: do_create = False update = {} try: key = '' for k in fieldmap: f = fieldmap[k] if f in args.fields and k in line: key = line[k].strip() if f in item_map: key = item_map[f].get(key, key) cn = dyn.cl.properties[f].classname cls = db.getclass(cn) item = cls.lookup(key) if dyn[f] != item: update[f] = item except KeyError: print("%(f)s not found: %(key)s: %(username)s" % locals()) continue if update: if do_create: fields = user_dynamic.dynuser_copyfields param = dict((i, dyn[i]) for i in fields) param['valid_from'] = dt param.update(update) if args.update: id = db.user_dynamic.create(**param) if args.verbose: print("CREATED: %s" % id) else: if args.verbose: print("user_dynamic-create: %s" % param) else: if args.update: db.user_dynamic.set(dyn.id, **update) else: if args.verbose: print \ ( "user_dynamic-update: %s %s %s" % (update, fn, sn) ) if args.update: db.commit()
And you're done! """ import sys, os, csv, time, mimetypes try: import cElementTree as ElementTree except ImportError: from elementtree import ElementTree from roundup import instance, hyperdb, date, support, password from roundup.anypy import http_, urllib_ from roundup.anypy.strings import s2b, us2s today = date.Date('.') DL_URL = 'http://sourceforge.net/tracker/download.php?group_id=%(group_id)s&atid=%(atid)s&aid=%(aid)s' def get_url(aid): """ so basically we have to jump through hoops, given an artifact id, to figure what the URL should be to access that artifact, and hence any attached files.""" # first we hit this URL... conn = http_.client.HTTPConnection("sourceforge.net") conn.request("GET", "/support/tracker.php?aid=%s"%aid) response = conn.getresponse() # which should respond with a redirect to the correct url which has the # magic "group_id" and "atid" values in it that we need assert response.status == 302, 'response code was %s'%response.status location = response.getheader('location')