def test_migration_error_with_removed_language(self): store = Store(create_database(GLSettings.db_uri)) zyx = EnabledLanguage('zyx') store.add(zyx) store.commit() store.close() self.assertRaises(Exception, migration.perform_data_update, self.db_file)
def do_statspollute(dbfile): # source gl_database = create_database("sqlite:%s" % dbfile) source_store = Store(gl_database) stats = source_store.find(models.Stats) counter = 0 for s in stats: source_store.remove(s) counter += 1 print "removed %d entry in stats" % counter counter = 0 # 21 days in the past for past_hours in xrange(24 * 7 * 3): past_hours += 4 when = utc_past_date(hours=past_hours) newstat = models.Stats() newstat.freemb = randint(1000, 1050) newstat.year = when.isocalendar()[0] newstat.week = when.isocalendar()[1] level = round((randint(0, 1000) / 240.0), 1) - 2 def random_pollution(): return int(randint(0,11) + (5 * level)) activity_fake = { 'successfull_logins': random_pollution(), 'failed_logins': random_pollution(), 'started_submissions': random_pollution(), 'completed_submissions': random_pollution(), 'uploaded_files': int(randint(0,11) + (5 * level)), 'appended_files': random_pollution(), 'wb_comments': random_pollution(), 'wb_messages': random_pollution(), 'receiver_comments': random_pollution(), 'receiver_messages': random_pollution() } for k, v in activity_fake.iteritems(): if v < 0: activity_fake[k] = 0 newstat.start = when newstat.summary = activity_fake counter += 1 source_store.add(newstat) print "Committing %d stats" % counter source_store.commit()
def testCreate(self): """Test creation of new MusicTrack""" store = Store(self.db) musictrack = models.MusicTrack() musictrack.title = u"The Beautiful Ones" store.add(musictrack) store.commit() self.assertTrue(Store.of(musictrack) is store) musictrack_from_database = store.find(models.MusicTrack, models.MusicTrack.title == u"The Beautiful Ones").one() self.assertTrue(musictrack is musictrack_from_database)
def testCreate(self): """Test creation of new MusicAlbum""" store = Store(self.db) musicalbum = models.MusicAlbum() musicalbum.title = u"The Lady Dance" store.add(musicalbum) store.commit() self.assertTrue(Store.of(musicalbum) is store) musicalbum_from_database = store.find(models.MusicAlbum, models.MusicAlbum.title == u"The Lady Dance").one() self.assertTrue(musicalbum is musicalbum_from_database)
def testCreate(self): """Test creation of new VideoFile""" store = Store(self.db) videofile = models.VideoFile() videofile.filename = u"/home/user/foo.mpg" store.add(videofile) store.commit() self.assertTrue(Store.of(videofile) is store) videofile_from_database = store.find(models.VideoFile, models.VideoFile.filename == u"/home/user/foo.mpg").one() self.assertTrue(videofile is videofile_from_database)
def testCreate(self): """Test creation of new PhotoAlbum""" store = Store(self.db) photoalbum = models.PhotoAlbum() photoalbum.title = u"Photo Album Title" photoalbum.description = u"This is a photo description" store.add(photoalbum) store.commit() self.assertTrue(Store.of(photoalbum) is store) photoalbum_from_database = store.find(models.PhotoAlbum, models.PhotoAlbum.title == u"Photo Album Title").one() self.assertTrue(photoalbum is photoalbum_from_database)
def testCreate(self): """Test creation of new PhotoImage""" store = Store(self.db) photoimage = models.PhotoImage() photoimage.filename = u"/home/user/photo.jpg" store.add(photoimage) store.commit() self.assertTrue(Store.of(photoimage) is store) photoimage_from_database = store.find( models.PhotoImage, models.PhotoImage.filename == u"/home/user/photo.jpg" ).one() self.assertTrue(photoimage is photoimage_from_database)
def testCreate(self): """Test creation of new MusicPlaylist""" store = Store(self.db) musicplaylist = models.MusicPlaylist() musicplaylist.title = u"The Ultimate Heavy Metal Goth Emo" store.add(musicplaylist) store.commit() self.assertTrue(Store.of(musicplaylist) is store) musicplaylist_from_database = store.find( models.MusicPlaylist, models.MusicPlaylist.title == u"The Ultimate Heavy Metal Goth Emo" ).one() self.assertTrue(musicplaylist is musicplaylist_from_database)
def testCreate(self): '''Test creation of new MusicAlbum''' store = Store(self.db) musicalbum = models.MusicAlbum() musicalbum.title = u'The Lady Dance' store.add(musicalbum) store.commit() self.assertTrue(Store.of(musicalbum) is store) musicalbum_from_database = store.find( models.MusicAlbum, models.MusicAlbum.title == u'The Lady Dance').one() self.assertTrue(musicalbum is musicalbum_from_database)
def testCreate(self): '''Test creation of new MusicTrack''' store = Store(self.db) musictrack = models.MusicTrack() musictrack.title = u'The Beautiful Ones' store.add(musictrack) store.commit() self.assertTrue(Store.of(musictrack) is store) musictrack_from_database = store.find( models.MusicTrack, models.MusicTrack.title == u'The Beautiful Ones').one() self.assertTrue(musictrack is musictrack_from_database)
def testCreate(self): '''Test creation of new MusicPlaylist''' store = Store(self.db) musicplaylist = models.MusicPlaylist() musicplaylist.title = u'The Ultimate Heavy Metal Goth Emo' store.add(musicplaylist) store.commit() self.assertTrue(Store.of(musicplaylist) is store) musicplaylist_from_database = store.find(models.MusicPlaylist, models.MusicPlaylist.title == \ u'The Ultimate Heavy Metal Goth Emo').one() self.assertTrue(musicplaylist is musicplaylist_from_database)
def testCreate(self): '''Test creation of new PhotoImage''' store = Store(self.db) photoimage = models.PhotoImage() photoimage.filename = u'/home/user/photo.jpg' store.add(photoimage) store.commit() self.assertTrue(Store.of(photoimage) is store) photoimage_from_database = store.find( models.PhotoImage, models.PhotoImage.filename == u'/home/user/photo.jpg').one() self.assertTrue(photoimage is photoimage_from_database)
def testCreate(self): '''Test creation of new VideoFile''' store = Store(self.db) videofile = models.VideoFile() videofile.filename = u'/home/user/foo.mpg' store.add(videofile) store.commit() self.assertTrue(Store.of(videofile) is store) videofile_from_database = store.find( models.VideoFile, models.VideoFile.filename == u'/home/user/foo.mpg').one() self.assertTrue(videofile is videofile_from_database)
def testCreate(self): '''Test creation of new PhotoAlbum''' store = Store(self.db) photoalbum = models.PhotoAlbum() photoalbum.title = u'Photo Album Title' photoalbum.description = u'This is a photo description' store.add(photoalbum) store.commit() self.assertTrue(Store.of(photoalbum) is store) photoalbum_from_database = store.find( models.PhotoAlbum, models.PhotoAlbum.title == u'Photo Album Title').one() self.assertTrue(photoalbum is photoalbum_from_database)
def getStore(url, create = False): # register new Storm scheme register_scheme("sqlitefk", ForeignKeysSQLite) d = create_database(url) s = Store(d) if create: schema = file(os.path.join(os.path.dirname(__file__), "schema.sql"), "r").read().split("\n\n") for cmd in schema: s.execute(cmd) version = Meta() version.key = u"created" s.add(version) s.commit() return s
class TestChangeTracker(object): class A(object): __storm_table__ = 'testob' changehistory = ChangeHistory.configure("history") clt = ChangeTracker(changehistory) id = Int(primary=1) textval = Unicode(validator=clt) intval = Int(validator=clt) def setUp(self): database = create_database('sqlite:') self.store = Store(database) self.store.execute(""" CREATE table history ( id INTEGER PRIMARY KEY AUTOINCREMENT, ref_class VARCHAR(200), ref_pk VARCHAR(200), ref_attr VARCHAR(200), new_value VARCHAR(200), ctime DATETIME, cuser INT ) """) self.store.execute(""" CREATE TABLE testob ( id INTEGER PRIMARY KEY AUTOINCREMENT, textval VARCHAR(200), intval INT, dateval DATETIME )""") def tearDown(self): self.store.rollback() def test_calls_next_validator(self): clt = ChangeTracker(ChangeHistory.configure("history"), next_validator = lambda ob, attr, v: v*2) class B(self.A): textval = Unicode(validator=clt) b = B() b.textval = u'bork' assert b.textval == u'borkbork' def test_adds_log_entries(self): class B(self.A): clt = ChangeTracker(ChangeHistory.configure("history")) textval = Unicode(validator=clt) b = self.store.add(B()) b.textval = u'pointless' b.textval = u'aimless' changes = list(self.store.find(b.clt.change_cls)) assert_equal(len(changes), 2) assert_equal(changes[0].new_value, 'pointless') assert_equal(changes[1].new_value, 'aimless') def test_value_type_preserved(self): a = self.store.add(self.A()) a.textval = u'one' a.intval = 1 changes = list(self.store.find(a.clt.change_cls)) assert_equal(type(changes[0].new_value), unicode) assert_equal(type(changes[1].new_value), int) def test_ctime_set(self): start = datetime.now() a = self.store.add(self.A()) a.textval = u'x' changes = list(self.store.find(a.clt.change_cls)) assert_equal(type(changes[0].ctime), datetime) assert start < changes[0].ctime < datetime.now() def test_cuser_set(self): def getuser(): return u'Fred' history = ChangeHistory.configure("history", getuser=getuser, usertype=Unicode) class B(self.A): textval = Unicode(validator=ChangeTracker(history)) b = self.store.add(B()) b.textval = u'foo' changes = self.store.find(history) assert_equal(changes[0].cuser, u'Fred') def test_changes_for_returns_change_history(self): a = self.store.add(self.A()) b = self.store.add(self.A()) a.id = 1 a.textval = u'one' a.textval = u'two' b.id = 2 b.textval = u'ein' b.textval = u'zwei' assert_equal([c.new_value for c in a.changehistory.changes_for(a)], [u'one', u'two']) assert_equal([c.new_value for c in a.changehistory.changes_for(b)], [u'ein', u'zwei'])
x = self.x, y = self.y ) def __repr__( self ): return '<Point2D x:%s, y:%s>' % ( self.x, self.y ) database = create_database('sqlite://:memory:') store = Store(database) store.execute('''CREATE TABLE point2d ( id INTEGER PRIMARY KEY, x INTEGER, y INTEGER )''') p1 = Point2D(10,10) p2 = Point2D(10,20) p3 = Point2D(20,20) p4 = Point2D(20,10) store.add( p1 ) store.add( p2 ) store.add( p3 ) store.add( p4 ) points = store.find(Point2D, Point2D.x == 10) print points[0] + points[1]
class IssuesLog(): def __init__(self): self._connect() # it is not incremental so we first drop the table self._drop_db() self._create_db() def _connect(self): opts = Config() self.database = create_database('mysql://' + opts.db_user_out + ':' + opts.db_password_out + '@' + opts.db_hostname_out + ':' + opts.db_port_out + '/' + opts.db_database_out) self.store = Store(self.database) def _create_db(self): self.store.execute(self._get_sql_create()) def _drop_db(self): self.store.execute(self._get_sql_drop()) def _get_people_id(self, email): """ Gets the id of an user """ try: p = self.store.find(DBPeople, DBPeople.email == email).one() return p.id except (AttributeError, NotOneError): p = self.store.find(DBPeople, DBPeople.user_id == email).one() try: return p.id except AttributeError: # no person was found in People with the email above, so # we include it printdbg("Person not found. Inserted with email %s " % (email)) dp = DBPeople(email) self.store.add(dp) self.store.commit() return dp.id def _get_sql_drop(self): """ Abstract method for inserting extra data related to a change """ raise NotImplementedError def _get_sql_create(self): """ Abstract method for inserting extra data related to a change """ raise NotImplementedError def _get_tracker_id(self, issue_id): """ Returns tracker id from issues """ result = self.store.find(DBIssue.tracker_id, DBIssue.id == issue_id).one() return result def _copy_issue_ext(self, aux, db_ilog): """ Abstract method for inserting extra data related to a change """ raise NotImplementedError # TODO: reuse _copy_standard_values def _copy_issue(self, db_ilog): """ This method returns a copy of the DB*Log object """ aux = self._get_dbissues_object(db_ilog.issue, db_ilog.tracker_id) aux.issue_id = db_ilog.issue_id aux.change_id = db_ilog.change_id aux.type = db_ilog.type aux.summary = db_ilog.summary aux.description = db_ilog.description aux.status = db_ilog.status aux.resolution = db_ilog.resolution aux.priority = db_ilog.priority aux.submitted_by = db_ilog.submitted_by aux.date = db_ilog.date aux.assigned_to = db_ilog.assigned_to aux = self._copy_issue_ext(aux, db_ilog) return aux def _assign_values(self, db_ilog, field, value): """ Abstract method for inserting extra data related to a change """ raise NotImplementedError def _build_initial_state(self, db_ilog): """ This method gets the first changes of every field in order to get the initial state of the bug """ fields = self.store.execute("SELECT DISTINCT(field) FROM changes " + "WHERE issue_id=%s" % (db_ilog.issue_id)) for f in fields: values = self.store.execute( "SELECT old_value FROM changes WHERE issue_id=%s AND \ field=\"%s\" ORDER BY changed_on LIMIT 1" % (db_ilog.issue_id, f[0])) for v in values: db_ilog = self._assign_values(db_ilog, f[0], v[0]) # Initial status does not have a real change db_ilog.change_id = 0 return db_ilog def _get_dbissues_object(self, issue_name, tracker_id): """ Abstract method for inserting extra data related to a change """ raise NotImplementedError def _copy_standard_values(self, issue, issue_log): """ Copy the standard values from the issue object to the issue_log object """ issue_log.issue_id = issue.id issue_log.type = issue.type issue_log.summary = issue.summary issue_log.description = issue.description issue_log.status = issue.status issue_log.resolution = issue.resolution issue_log.priority = issue.priority issue_log.submitted_by = issue.submitted_by issue_log.date = issue.submitted_on issue_log.assigned_to = issue.assigned_to return issue_log def _print_final_msg(self): """ Abstract method for inserting extra data related to a change """ raise NotImplementedError def _get_changes(self, issue_id): aux = self.store.execute("SELECT id, field, new_value, changed_by, \ changed_on FROM changes where issue_id=%s" % (issue_id)) return aux def _post_history(self, issue_id): """ Abstract method for inserting extra data usign full issue history """ pass def run(self): ndone = 0 issues = self.store.find(DBIssue) total = str(issues.count()) print("[IssuesLog] Total issues to analyze: " + str(issues.count())) for i in issues: if (ndone % 1000 == 0): print("[IssuesLog] Analyzed " + str(ndone) + "/" + str(total)) db_ilog = self._get_dbissues_object(i.issue, i.tracker_id) db_ilog = self._copy_standard_values(i, db_ilog) final_status = db_ilog.status db_ilog = self._build_initial_state(db_ilog) self.store.add(db_ilog) self.store.flush() # the code below gets all the changes and insert a row per change changes = self._get_changes(db_ilog.issue_id) for ch in changes: change_id = ch[0] field = ch[1] new_value = ch[2] changed_by = ch[3] date = ch[4] # we need a new object to be inserted in the database db_ilog = self._copy_issue(db_ilog) db_ilog.date = date db_ilog.change_id = change_id db_ilog.submitted_by = changed_by db_ilog = self._assign_values(db_ilog, field, new_value) try: self.store.add(db_ilog) self.store.flush() except: # self.store.rollback() # is this useful in this context? traceback.print_exc() ##self._post_history(db_ilog, final_status) self.store.commit() ndone += 1 self._print_final_msg()
class IssuesLog(): def __init__(self, backend_name): self.backend_name = backend_name self.connect() self.create_db() def connect(self): opts = Config() self.database = create_database('mysql://' + opts.db_user_out + ':' + opts.db_password_out + '@' + opts.db_hostname_out + ':' + opts.db_port_out + '/' + opts.db_database_out) self.store = Store(self.database) def create_db(self): print("self.backend_name = %s" % (self.backend_name)) if self.backend_is_bugzilla(): self.store.execute(__sql_table_bugzilla__) elif self.backend_is_jira(): self.store.execute(__sql_table_jira__) def copy_issue(self, db_ilog): """ This method creates a copy of DBBugzilla/JiraIssuesLog object """ if self.backend_is_bugzilla(): aux = DBBugzillaIssuesLog(db_ilog.issue, db_ilog.tracker_id) aux.issue_id = db_ilog.issue_id aux.type = db_ilog.type aux.summary = db_ilog.summary aux.description = db_ilog.description aux.status = db_ilog.status aux.resolution = db_ilog.resolution aux.priority = db_ilog.priority aux.submitted_by = db_ilog.submitted_by aux.date = db_ilog.date aux.assigned_to = db_ilog.assigned_to #aux = DBBugzillaIssuesLog (db_ilog.issue_id) aux.alias = db_ilog.alias aux.delta_ts = db_ilog.delta_ts aux.reporter_accessible = db_ilog.reporter_accessible aux.cclist_accessible = db_ilog.cclist_accessible aux.classification_id = db_ilog.classification_id aux.classification = db_ilog.classification aux.product = db_ilog.product aux.component = db_ilog.component aux.version = db_ilog.version aux.rep_platform = db_ilog.rep_platform aux.op_sys = db_ilog.op_sys aux.dup_id = db_ilog.dup_id aux.bug_file_loc = db_ilog.bug_file_loc aux.status_whiteboard = db_ilog.status_whiteboard aux.target_milestone = db_ilog.target_milestone aux.votes = db_ilog.votes aux.everconfirmed = db_ilog.everconfirmed aux.qa_contact = db_ilog.qa_contact aux.estimated_time = db_ilog.estimated_time aux.remaining_time = db_ilog.remaining_time aux.actual_time = db_ilog.actual_time aux.deadline = db_ilog.deadline aux.keywords = db_ilog.keywords aux.cc = db_ilog.cc aux.group_bugzilla = db_ilog.group_bugzilla aux.flag = db_ilog.flag return aux elif self.backend_is_jira(): aux = DBJiraIssuesLog(db_ilog.issue, db_ilog.tracker_id) aux.issue_id = db_ilog.issue_id aux.type = db_ilog.type aux.summary = db_ilog.summary aux.description = db_ilog.description aux.status = db_ilog.status aux.resolution = db_ilog.resolution aux.priority = db_ilog.priority aux.submitted_by = db_ilog.submitted_by aux.date = db_ilog.date aux.assigned_to = db_ilog.assigned_to aux.link = db_ilog.link aux.component = db_ilog.component aux.version = db_ilog.version aux.issue_key = db_ilog.issue_key aux.environment = db_ilog.environment aux.project = db_ilog.project aux.project_key = db_ilog.project_key aux.security = db_ilog.security return aux def get_people_id(self, email, tracker_id): """ Gets the id of an user """ p = self.store.find(DBPeople, DBPeople.email == email).one() ## ## the code below was created ad-hoc for KDE solid ## try: return p.id except AttributeError: p = self.store.find(DBPeople, DBPeople.user_id == email).one() try: return p.id except AttributeError: # no person was found in People with the email above, so # we include it printdbg("Person not found. Inserted with email %s " % (email)) dp = DBPeople(email, tracker_id) self.store.add(dp) self.store.commit() return dp.id def get_last_change_date(self): """ This method gets the date of the last change included in the log table """ if self.backend_is_bugzilla(): result = self.store.find(DBBugzillaIssuesLog) aux = result.order_by(Desc(DBBugzillaIssuesLog.date))[:1] for entry in aux: return entry.date elif self.backend_is_jira(): result = self.store.find(DBJiraIssuesLog) aux = result.order_by(Desc(DBJiraIssuesLog.date))[:1] for entry in aux: return entry.date return None def get_issues_changed_since(self, date): """ This method fetchs the issues changes since date """ #SELECT DISTINCT(issues.id) FROM issues, changes #WHERE issues.id = changes.issue_id #AND (issues.submitted_on >= '2012-02-28 12:34:44' # OR changes.changed_on >= '2012-02-28 12:34:44'); result = self.store.find(DBIssue, DBChange.issue_id == DBIssue.id, Or(DBIssue.submitted_on > date, DBChange.changed_on > date )).group_by(DBIssue.id) return result def get_previous_state(self, issue_id): """ This method returns a db_ilog object with the last row found in the log table """ db_ilog = None if self.backend_is_jira(): rows = self.store.find(DBJiraIssuesLog, DBJiraIssuesLog.issue_id == issue_id) lrow = rows.order_by(Desc(DBJiraIssuesLog.id))[:1] for aux in lrow: # FIXME it only contains an element! db_ilog = DBJiraIssuesLog(aux.issue, aux.tracker_id) db_ilog.issue_id = aux.issue_id db_ilog.type = aux.type db_ilog.summary = aux.summary db_ilog.description = aux.description db_ilog.status = aux.status db_ilog.resolution = aux.resolution db_ilog.priority = aux.priority db_ilog.submitted_by = aux.submitted_by db_ilog.date = aux.date db_ilog.assigned_to = aux.assigned_to db_ilog.issue_key = aux.issue_key db_ilog.link = aux.link db_ilog.environment = aux.environment db_ilog.security = aux.security db_ilog.updated = aux.updated db_ilog.version = aux.version db_ilog.component = aux.component db_ilog.votes = aux.votes db_ilog.project = aux.project db_ilog.project_id = aux.project_id db_ilog.project_key = aux.project_key else: # elif self.backend_is_bugzilla(): rows = self.store.find(DBBugzillaIssuesLog, DBBugzillaIssuesLog.issue_id == issue_id) lrow = rows.order_by(Desc(DBBugzillaIssuesLog.id))[:1] for aux in lrow: # FIXME it only contains an element! db_ilog = DBBugzillaIssuesLog(aux.issue, aux.tracker_id) db_ilog.issue_id = aux.issue_id db_ilog.type = aux.type db_ilog.summary = aux.summary db_ilog.description = aux.description db_ilog.status = aux.status db_ilog.resolution = aux.resolution db_ilog.priority = aux.priority db_ilog.submitted_by = aux.submitted_by db_ilog.date = aux.date db_ilog.assigned_to = aux.assigned_to db_ilog.alias = aux.alias db_ilog.delta_ts = aux.delta_ts db_ilog.reporter_accessible = aux.reporter_accessible db_ilog.cclist_accessible = aux.cclist_accessible db_ilog.classification_id = aux.classification_id db_ilog.classification = aux.classification db_ilog.product = aux.product db_ilog.component = aux.component db_ilog.version = aux.version db_ilog.rep_platform = aux.rep_platform db_ilog.op_sys = aux.op_sys db_ilog.dup_id = aux.dup_id db_ilog.bug_file_loc = aux.bug_file_loc db_ilog.status_whiteboard = aux.status_whiteboard db_ilog.target_milestone = aux.target_milestone db_ilog.votes = aux.votes db_ilog.everconfirmed = aux.everconfirmed db_ilog.qa_contact = aux.qa_contact db_ilog.estimated_time = aux.estimated_time db_ilog.remaining_time = aux.remaining_time db_ilog.actual_time = aux.actual_time db_ilog.deadline = aux.deadline db_ilog.keywords = aux.keywords db_ilog.cc = aux.cc db_ilog.group_bugzilla = aux.group_bugzilla db_ilog.flag = aux.flag return db_ilog def issue_is_new(self, issue_id): """ This method returns True if the issue is not logged in the log table """ if self.backend_is_jira(): result = self.store.find(DBJiraIssuesLog, DBJiraIssuesLog.issue_id == issue_id) elif self.backend_is_bugzilla(): result = self.store.find(DBBugzillaIssuesLog, DBBugzillaIssuesLog.issue_id == issue_id) return (result.count() == 0) def build_initial_state(self, db_ilog): """ This method gets the first changes of every field in order to get the initial state of the bug """ fields = self.store.execute("SELECT DISTINCT(field) FROM changes\ where issue_id=%s" % (db_ilog.issue_id)) for f in fields: value = self.store.execute("SELECT old_value FROM changes \ WHERE issue_id=%s AND field=\"%s\" ORDER BY changed_on LIMIT 1" % (db_ilog.issue_id, f[0])) for v in value: if self.backend_is_bugzilla(): # Bugzilla section # if f[0] in bg_issues_links: table_field = bg_issues_links[f[0]] if table_field == 'summary': db_ilog.summary = v[0] elif table_field == 'priority': db_ilog.priority = v[0] elif table_field == 'type': db_ilog.type = v[0] elif table_field == 'assigned_to': db_ilog.assigned_to = self.get_people_id( v[0], self.get_tracker_id(db_ilog.issue_id)) elif table_field == 'status': db_ilog.status = v[0] elif table_field == 'resolution': db_ilog.resolution = v[0] elif table_field == 'alias': db_ilog.alias = v[0] elif table_field == 'reporter_accessible': db_ilog.reporter_accessible = v[0] elif table_field == 'cclist_accessible': db_ilog.cclist_accessible = v[0] elif table_field == 'product': db_ilog.product = v[0] elif table_field == 'component': db_ilog.component = v[0] elif table_field == 'version': db_ilog.version = v[0] elif table_field == 'rep_platform': db_ilog.rep_platform = v[0] elif table_field == 'op_sys': db_ilog.op_sys = v[0] elif table_field == 'bug_file_loc': db_ilog.bug_file_loc = v[0] elif table_field == 'status_whiteboard': db_ilog.status_whiteboard = v[0] elif table_field == 'target_milestone': db_ilog.target_milestone = v[0] elif table_field == 'votes': db_ilog.votes = v[0] elif table_field == 'everconfirmed': db_ilog.everconfirmed = v[0] elif table_field == 'qa_contact': db_ilog.qa_contact = v[0] elif table_field == 'keywords': db_ilog.Keywords = v[0] elif table_field == 'cc': db_ilog.cc = v[0] if self.backend_is_jira(): # Jira section # if f[0] in jira_issues_links: table_field = jira_issues_links[f[0]] if table_field == 'summary': db_ilog.summary = v[0] elif table_field == 'priority': db_ilog.priority = v[0] elif table_field == 'type': db_ilog.type = v[0] elif table_field == 'assigned_to': db_ilog.assigned_to = self.get_people_id(v[0]) elif table_field == 'status': db_ilog.status = v[0] elif table_field == 'resolution': db_ilog.resolution = v[0] elif table_field == 'link': db_ilog.link = v[0] elif table_field == 'environment': db_ilog.environment = v[0] elif table_field == 'component': db_ilog.component = v[0] elif table_field == 'description': db_ilog.description = v[0] elif table_field == 'security': db_ilog.security = v[0] return db_ilog def backend_is_bugzilla(self): return self.backend_name == 'bg' def backend_is_jira(self): return self.backend_name == 'jira' def get_last_values(self, issue_row): i = issue_row db_ilog = None if self.backend_is_bugzilla(): db_ilog_bugzilla = DBBugzillaIssuesLog(i.issue, i.tracker_id) db_ilog_bugzilla.issue_id = i.id db_ilog_bugzilla.type = i.type db_ilog_bugzilla.summary = i.summary db_ilog_bugzilla.description = i.description db_ilog_bugzilla.status = i.status db_ilog_bugzilla.resolution = i.resolution db_ilog_bugzilla.priority = i.priority db_ilog_bugzilla.submitted_by = i.submitted_by db_ilog_bugzilla.date = i.submitted_on db_ilog_bugzilla.assigned_to = i.assigned_to ib = self.store.find(DBBugzillaIssueExt, \ DBBugzillaIssueExt.issue_id == db_ilog_bugzilla.issue_id).one() #### db_ilog_bugzilla.alias = ib.alias db_ilog_bugzilla.delta_ts = ib.delta_ts db_ilog_bugzilla.reporter_accessible = ib.reporter_accessible db_ilog_bugzilla.cclist_accessible = ib.cclist_accessible db_ilog_bugzilla.classification_id = ib.classification_id db_ilog_bugzilla.classification = ib.classification db_ilog_bugzilla.product = ib.product db_ilog_bugzilla.component = ib.component db_ilog_bugzilla.version = ib.version db_ilog_bugzilla.rep_platform = ib.rep_platform db_ilog_bugzilla.op_sys = ib.op_sys db_ilog_bugzilla.dup_id = ib.dup_id db_ilog_bugzilla.bug_file_loc = ib.bug_file_loc db_ilog_bugzilla.status_whiteboard = ib.status_whiteboard db_ilog_bugzilla.target_milestone = ib.target_milestone db_ilog_bugzilla.votes = ib.votes db_ilog_bugzilla.everconfirmed = ib.everconfirmed db_ilog_bugzilla.qa_contact = ib.qa_contact db_ilog_bugzilla.estimated_time = ib.estimated_time db_ilog_bugzilla.remaining_time = ib.remaining_time db_ilog_bugzilla.actual_time = ib.actual_time db_ilog_bugzilla.deadline = ib.deadline db_ilog_bugzilla.keywords = ib.keywords db_ilog_bugzilla.cc = ib.cc db_ilog_bugzilla.group_bugzilla = ib.group_bugzilla db_ilog_bugzilla.flag = ib.flag db_ilog = db_ilog_bugzilla elif self.backend_is_jira(): db_ilog = DBJiraIssuesLog(i.issue, i.tracker_id) db_ilog.issue_id = i.id db_ilog.type = i.type db_ilog.summary = i.summary db_ilog.description = i.description db_ilog.status = i.status db_ilog.resolution = i.resolution db_ilog.priority = i.priority db_ilog.submitted_by = i.submitted_by db_ilog.date = i.submitted_on db_ilog.assigned_to = i.assigned_to ib = self.store.find(DBJiraIssueExt, \ DBJiraIssueExt.issue_id == db_ilog.issue_id).one() db_ilog.issue_key = ib.issue_key db_ilog.link = ib.link db_ilog.environment = ib.environment db_ilog.security = ib.security db_ilog.updated = ib.updated db_ilog.version = ib.version db_ilog.component = ib.component db_ilog.votes = ib.votes db_ilog.project = ib.project db_ilog.project_id = ib.project_id db_ilog.project_key = ib.project_key return db_ilog def insert_new_bugs_created(self, date_from, date_to): """ This method inserts an entry with the data of the creation time """ if (not date_from) and (not date_to): issues = self.store.find(DBIssue) elif not date_from: issues = self.store.find(DBIssue, DBIssue.submitted_on < date_to) elif not date_to: issues = self.store.find(DBIssue, DBIssue.submitted_on > date_from) else: issues = self.store.find(DBIssue, And(DBIssue.submitted_on <= date_to, DBIssue.submitted_on > date_from)) issues = issues.order_by(Asc(DBIssue.submitted_on)) ## we store the initial data for each bug found for i in issues: db_ilog = self.get_last_values(i) # from issues and change tables db_ilog = self.build_initial_state(db_ilog) self.store.add(db_ilog) printdbg("Issue #%s created at %s - date_from = %s - date_to = %s" % (db_ilog.issue, db_ilog.date, date_from, date_to)) def get_tracker_id(self, issue_id): """ Returns tracker id from issues """ result = self.store.find(DBIssue.tracker_id, DBIssue.id == issue_id).one() return result def run(self): last_change_date = self.get_last_change_date() printdbg("Last change logged at %s" % (last_change_date)) date_from = None date_to = None if last_change_date: changes = self.store.find(DBChange, DBChange.changed_on > last_change_date) date_from = last_change_date else: changes = self.store.find(DBChange) changes = changes.order_by(Asc(DBChange.changed_on)) for ch in changes: # insert creation if needed date_to = ch.changed_on self.insert_new_bugs_created(date_from, date_to) date_from = date_to field = ch.field new_value = ch.new_value changed_by = ch.changed_by date = ch.changed_on issue_id = ch.issue_id #print("field = %s, new_value = %s, changed_by = %s, date = %s" # % (field, new_value, str(changed_by), str(date))) db_ilog = self.get_previous_state(issue_id) printdbg("Issue #%s modified at %s" % (db_ilog.issue, date)) if self.backend_is_bugzilla(): # Bugzilla section # # if (field in bg_issues_links): table_field = bg_issues_links[field] db_ilog.submitted_by = changed_by db_ilog.date = date if table_field == 'summary': db_ilog.summary = new_value elif table_field == 'priority': db_ilog.priority = new_value elif table_field == 'type': db_ilog.type = new_value elif table_field == 'assigned_to': db_ilog.assigned_to = self.get_people_id( new_value, self.get_tracker_id(db_ilog.issue_id)) elif table_field == 'status': db_ilog.status = new_value elif table_field == 'resolution': db_ilog.resolution = new_value elif table_field == 'alias': db_ilog.alias = new_value elif table_field == 'reporter_accessible': db_ilog.reporter_accessible = new_value elif table_field == 'cclist_accessible': db_ilog.cclist_accessible = new_value elif table_field == 'product': db_ilog.product = new_value elif table_field == 'component': db_ilog.component = new_value elif table_field == 'version': db_ilog.version = new_value elif table_field == 'rep_platform': db_ilog.rep_platform = new_value elif table_field == 'op_sys': db_ilog.op_sys = new_value elif table_field == 'bug_file_loc': db_ilog.bug_file_loc = new_value elif table_field == 'status_whiteboard': db_ilog.status_whiteboard = new_value elif table_field == 'target_milestone': db_ilog.target_milestone = new_value elif table_field == 'votes': db_ilog.votes = new_value elif table_field == 'everconfirmed': db_ilog.everconfirmed = new_value elif table_field == 'qa_contact': db_ilog.qa_contact = new_value elif table_field == 'keywords': db_ilog.Keywords = new_value elif table_field == 'cc': db_ilog.cc = new_value try: self.store.add(db_ilog) except: traceback.print_exc() elif self.backend_is_jira(): # Jira section # # if (field in jira_issues_links): table_field = jira_issues_links[field] db_ilog.submitted_by = changed_by db_ilog.date = date if table_field == 'summary': db_ilog.summary = new_value elif table_field == 'priority': db_ilog.priority = new_value elif table_field == 'type': db_ilog.type = new_value elif table_field == 'assigned_to': db_ilog.assigned_to = self.get_people_id( new_value, self.get_tracker_id(db_ilog.issue_id)) elif table_field == 'status': db_ilog.status = new_value elif table_field == 'resolution': db_ilog.resolution = new_value elif table_field == 'description': db_ilog.description = new_value elif table_field == 'link': db_ilog.link = new_value elif table_field == 'component': db_ilog.component = new_value elif table_field == 'version': db_ilog.version = new_value elif table_field == 'security': db_ilog.security = new_value try: self.store.add(db_ilog) except: traceback.print_exc() # if there are changes, it stores the last bugs after the last # change. If there are no changes, insert all the created bugs self.insert_new_bugs_created(date_from, None) self.store.commit()
class TableReplacer(object): """ This is the base class used by every Updater """ def __init__(self, table_history, old_db_file, new_db_file, start_ver): self.table_history = table_history self.old_db_file = old_db_file self.new_db_file = new_db_file self.start_ver = start_ver self.std_fancy = " ł " self.debug_info = " [%d => %d] " % (start_ver, start_ver + 1) for k, v in table_history.iteritems(): length = DATABASE_VERSION + 1 - FIRST_DATABASE_VERSION_SUPPORTED if len(v) != length: msg = 'Expecting a table with {} statuses ({})'.format(length, k) raise TypeError(msg) log.msg('{} Opening old DB: {}'.format(self.debug_info, old_db_file)) old_database = create_database('sqlite:' + self.old_db_file) self.store_old = Store(old_database) GLSettings.db_file = new_db_file new_database = create_database('sqlite:' + new_db_file) self.store_new = Store(new_database) if self.start_ver + 1 == DATABASE_VERSION: log.msg('{} Acquire SQL schema {}'.format(self.debug_info, GLSettings.db_schema_file)) if not os.access(GLSettings.db_schema_file, os.R_OK): log.msg('Unable to access', GLSettings.db_schema_file) raise IOError('Unable to access db schema file') with open(GLSettings.db_schema_file) as f: create_queries = ''.join(f).split(';') for create_query in create_queries: try: self.store_new.execute(create_query + ';') except OperationalError: log.msg('OperationalError in "{}"'.format(create_query)) self.store_new.commit() return # return here and manage the migrant versions here: for k, v in self.table_history.iteritems(): create_query = self.get_right_sql_version(k, self.start_ver + 1) if not create_query: # table not present in the version continue try: self.store_new.execute(create_query + ';') except OperationalError as excep: log.msg('{} OperationalError in [{}]'.format(self.debug_info, create_query)) raise excep self.store_new.commit() def close(self): self.store_old.close() self.store_new.close() def initialize(self): pass def epilogue(self): pass def get_right_model(self, table_name, version): table_index = (version - FIRST_DATABASE_VERSION_SUPPORTED) if table_name not in self.table_history: msg = 'Not implemented usage of get_right_model {} ({} {})'.format( __file__, table_name, self.start_ver) raise NotImplementedError(msg) if version > DATABASE_VERSION: raise ValueError('Version supplied must be less or equal to {}'.format( DATABASE_VERSION)) if self.table_history[table_name][table_index]: return self.table_history[table_name][table_index] # else, it's none, and we've to take the previous valid version while version >= 0: if self.table_history[table_name][table_index]: return self.table_history[table_name][table_index] table_index -= 1 # This never want happen return None def get_right_sql_version(self, model_name, version): """ @param model_name: @param version: @return: The SQL right for the stuff we've """ modelobj = self.get_right_model(model_name, version) if not modelobj: return None right_query = generateCreateQuery(modelobj) return right_query def _perform_copy_list(self, table_name): objs_count = self.store_old.find( self.get_right_model(table_name, self.start_ver) ).count() log.msg('{} default {} migration assistant: #{}'.format( self.debug_info, table_name, objs_count)) old_objects = self.store_old.find(self.get_right_model(table_name, self.start_ver)) for old_obj in old_objects: new_obj = self.get_right_model(table_name, self.start_ver + 1)() # Storm internals simply reversed for _, v in new_obj._storm_columns.iteritems(): setattr(new_obj, v.name, getattr(old_obj, v.name)) self.store_new.add(new_obj) self.store_new.commit() def _perform_copy_single(self, table_name): log.msg('{} default {} migration assistant'.format(self.debug_info, table_name)) old_obj = self.store_old.find(self.get_right_model(table_name, self.start_ver)).one() new_obj = self.get_right_model(table_name, self.start_ver + 1)() # Storm internals simply reversed for _, v in new_obj._storm_columns.iteritems(): setattr(new_obj, v.name, getattr(old_obj, v.name)) self.store_new.add(new_obj) self.store_new.commit() def migrate_Context(self): self._perform_copy_list("Context") def migrate_Node(self): self._perform_copy_single("Node") def migrate_User(self): self._perform_copy_list("User") def migrate_ReceiverTip(self): self._perform_copy_list("ReceiverTip") def migrate_WhistleblowerTip(self): self._perform_copy_list("WhistleblowerTip") def migrate_Comment(self): self._perform_copy_list("Comment") def migrate_InternalTip(self): self._perform_copy_list("InternalTip") def migrate_Receiver(self): self._perform_copy_list("Receiver") def migrate_InternalFile(self): self._perform_copy_list("InternalFile") def migrate_ReceiverFile(self): self._perform_copy_list("ReceiverFile") def migrate_Notification(self): self._perform_copy_single("Notification") def migrate_ReceiverContext(self): self._perform_copy_list("ReceiverContext") def migrate_ReceiverInternalTip(self): self._perform_copy_list("ReceiverInternalTip") def migrate_Message(self): self._perform_copy_list("Message") def migrate_Stats(self): """ Stats has been created between 14 and 15 and is not migrated since 17 """ if self.start_ver < 17: return self._perform_copy_list("Stats") def migrate_ApplicationData(self): """ There is no need to migrate it the application data. Default application data is loaded by the application and stored onto the db at each new start. """ return def migrate_Field(self): """ Field has been created between 14 and 15! """ if self.start_ver < 15: return self._perform_copy_list("Field") def migrate_FieldAttr(self): """ FieldAttr has been created between 22 and 23! """ if self.start_ver < 23: return self._perform_copy_list("FieldAttr") def migrate_FieldOption(self): """ FieldOption has been created between 14 and 15! """ if self.start_ver < 15: return self._perform_copy_list("FieldOption") def migrate_FieldField(self): """ FieldField has been created between 14 and 15! """ if self.start_ver < 15: return self._perform_copy_list("FieldField") def migrate_Step(self): """ Step has been created between 14 and 15! """ if self.start_ver < 15: return self._perform_copy_list("Step") def migrate_StepField(self): """ StepField has been created between 14 and 15! """ if self.start_ver < 15: return self._perform_copy_list("StepField") def migrate_Anomalies(self): """ Anomalies has been created between 14 and 15! should be dropped before 22 """ if self.start_ver < 23: return self._perform_copy_list("Anomalies") def migrate_EventLogs(self): """ EventLogs has been created between 15 and 16! should be dropped before 20 """ if self.start_ver < 20: return self._perform_copy_list("EventLogs")
class TableReplacer: """ This is the base class used by every Updater """ def __init__(self, old_db_file, new_db_file, start_ver): from globaleaks.db.update_5_6 import User_version_5, Comment_version_5, Node_version_5 from globaleaks.db.update_6_7 import Node_version_6, Context_version_6 from globaleaks.db.update_7_8 import Node_version_7, Notification_version_7, Context_version_7, \ Receiver_version_7, InternalFile_version_7 from globaleaks.db.update_8_9 import Context_version_8, Receiver_version_8, Notification_version_8 from globaleaks.db.update_9_10 import Node_version_9, ApplicationData_version_10, \ Receiver_version_9, User_version_9 from globaleaks.db.update_10_11 import InternalTip_version_10, InternalFile_version_10 from globaleaks.db.update_11_12 import Node_version_11, ApplicationData_version_11, Context_version_11 self.old_db_file = old_db_file self.new_db_file = new_db_file self.start_ver = start_ver self.std_fancy = " ł " self.debug_info = " [%d => %d] " % (start_ver, start_ver + 1) self.table_history = { 'Node': [ Node_version_5, Node_version_6, Node_version_7, Node_version_9, None, Node_version_11, None, models.Node ], 'User': [ User_version_5, User_version_9, None, None, None, models.User, None, None ], 'Context': [ Context_version_6, None, Context_version_7, Context_version_8, Context_version_11, None, None, models.Context ], 'Receiver': [ Receiver_version_7, None, None, Receiver_version_8, Receiver_version_9, models.Receiver, None, None ], 'ReceiverFile': [models.ReceiverFile, None, None, None, None, None, None, None], 'Notification': [ Notification_version_7, None, None, Notification_version_8, models.Notification, None, None, None ], 'Comment': [ Comment_version_5, models.Comment, None, None, None, None, None, None ], 'InternalTip': [ InternalTip_version_10, None, None, None, None, None, models.InternalTip, None ], 'InternalFile': [ InternalFile_version_7, None, None, InternalFile_version_10, None, None, models.InternalFile, None ], 'WhistleblowerTip': [ models.WhistleblowerTip, None, None, None, None, None, None, None ], 'ReceiverTip': [models.ReceiverTip, None, None, None, None, None, None, None], 'ReceiverInternalTip': [ models.ReceiverInternalTip, None, None, None, None, None, None, None ], 'ReceiverContext': [models.ReceiverContext, None, None, None, None, None, None, None], 'Message': [models.Message, None, None, None, None, None, None, None], 'Stats': [models.Stats, None, None, None, None, None, None, None], 'ApplicationData': [ ApplicationData_version_10, None, None, None, None, None, None, models.ApplicationData ], } for k, v in self.table_history.iteritems(): # +1 because count start from 0, # -5 because the relase 0,1,2,3,4 are not supported anymore assert len(v) == (DATABASE_VERSION + 1 - 5), \ "I'm expecting a table with %d statuses (%s)" % (DATABASE_VERSION, k) print "%s Opening old DB: %s" % (self.debug_info, old_db_file) old_database = create_database("sqlite:%s" % self.old_db_file) self.store_old = Store(old_database) GLSetting.db_file = new_db_file new_database = create_database("sqlite:%s" % new_db_file) self.store_new = Store(new_database) if self.start_ver + 1 == DATABASE_VERSION: print "%s Acquire SQL schema %s" % (self.debug_info, GLSetting.db_schema_file) if not os.access(GLSetting.db_schema_file, os.R_OK): print "Unable to access %s" % GLSetting.db_schema_file raise Exception("Unable to access db schema file") with open(GLSetting.db_schema_file) as f: create_queries = ''.join(f.readlines()).split(';') for create_query in create_queries: try: self.store_new.execute(create_query + ';') except OperationalError: print "OperationalError in [%s]" % create_query self.store_new.commit() return # return here and manage the migrant versions here: for k, v in self.table_history.iteritems(): create_query = self.get_right_sql_version(k, self.start_ver + 1) if not create_query: # table not present in the version continue try: self.store_new.execute(create_query + ';') except OperationalError as excep: print "%s OperationalError in [%s]" % (self.debug_info, create_query) raise excep self.store_new.commit() def close(self): self.store_old.close() self.store_new.close() def initialize(self): pass def epilogue(self): pass def get_right_model(self, table_name, version): table_index = (version - 5) if not self.table_history.has_key(table_name): print "Not implemented usage of get_right_model %s (%s %d)" % ( __file__, table_name, self.start_ver) raise NotImplementedError assert version <= DATABASE_VERSION, "wrong developer brainsync" if self.table_history[table_name][table_index]: # print "Immediate return %s = %s at version %d" % \ # ( table_name, self.table_history[table_name][table_index], version ) return self.table_history[table_name][table_index] # else, it's none, and we've to take the previous valid version # # print "Requested version %d of %s need to be collected in the past" %\ # (version, table_name) while version >= 0: if self.table_history[table_name][table_index]: # print ".. returning %s = %s" %\ # ( table_name, self.table_history[table_name][table_index] ) return self.table_history[table_name][table_index] table_index -= 1 # This never want happen return None def get_right_sql_version(self, model_name, version): """ @param model_name: @param version: @return: The SQL right for the stuff we've """ modelobj = self.get_right_model(model_name, version) if not modelobj: return None right_query = generateCreateQuery(modelobj) return right_query def _perform_copy_list(self, table_name): print "%s default %s migration assistant: #%d" % ( self.debug_info, table_name, self.store_old.find( self.get_right_model(table_name, self.start_ver)).count()) old_objects = self.store_old.find( self.get_right_model(table_name, self.start_ver)) for old_obj in old_objects: new_obj = self.get_right_model(table_name, self.start_ver + 1)() # Storm internals simply reversed for k, v in new_obj._storm_columns.iteritems(): setattr(new_obj, v.name, getattr(old_obj, v.name)) self.store_new.add(new_obj) self.store_new.commit() def _perform_copy_single(self, table_name): print "%s default %s migration assistant" % (self.debug_info, table_name) old_obj = self.store_old.find( self.get_right_model(table_name, self.start_ver)).one() new_obj = self.get_right_model(table_name, self.start_ver + 1)() # Storm internals simply reversed for k, v in new_obj._storm_columns.iteritems(): setattr(new_obj, v.name, getattr(old_obj, v.name)) self.store_new.add(new_obj) self.store_new.commit() def migrate_Context(self): self._perform_copy_list("Context") def migrate_Node(self): self._perform_copy_single("Node") def migrate_User(self): self._perform_copy_list("User") def migrate_ReceiverTip(self): self._perform_copy_list("ReceiverTip") def migrate_WhistleblowerTip(self): self._perform_copy_list("WhistleblowerTip") def migrate_Comment(self): self._perform_copy_list("Comment") def migrate_InternalTip(self): self._perform_copy_list("InternalTip") def migrate_Receiver(self): self._perform_copy_list("Receiver") def migrate_InternalFile(self): self._perform_copy_list("InternalFile") def migrate_ReceiverFile(self): self._perform_copy_list("ReceiverFile") def migrate_Notification(self): self._perform_copy_single("Notification") def migrate_ReceiverContext(self): self._perform_copy_list("ReceiverContext") def migrate_ReceiverInternalTip(self): self._perform_copy_list("ReceiverInternalTip") def migrate_Message(self): """ has been created between 7 and 8! """ if self.start_ver < 8: return self._perform_copy_list("Message") def migrate_Stats(self): """ has been created between 9 and 10! """ if self.start_ver < 10: return self._perform_copy_list("Stats") def migrate_ApplicationData(self): """ has been created between 9 and 10! """ if self.start_ver < 10: return self._perform_copy_list("ApplicationData")
class IssuesLog: def __init__(self, backend_name): self.backend_name = backend_name self.connect() self.create_db() def connect(self): opts = Config() self.database = create_database( "mysql://" + opts.db_user_out + ":" + opts.db_password_out + "@" + opts.db_hostname_out + ":" + opts.db_port_out + "/" + opts.db_database_out ) self.store = Store(self.database) def create_db(self): self.store.execute(__sql_table__) def copy_issue(self, db_ilog): """ This method is create a copy of a DBIssueLog object """ aux = DBIssuesLog(db_ilog.issue, db_ilog.tracker_id) aux.issue_id = db_ilog.issue_id aux.type = db_ilog.type aux.summary = db_ilog.summary aux.description = db_ilog.description aux.status = db_ilog.status aux.resolution = db_ilog.resolution aux.priority = db_ilog.priority aux.submitted_by = db_ilog.submitted_by aux.date = db_ilog.date aux.assigned_to = db_ilog.assigned_to return aux def build_initial_state(self, db_ilog): """ This method gets the first changes of every field in order to get the initial state of the bug """ fields = self.store.execute("SELECT DISTINCT(field) FROM changes where issue_id=%s" % (db_ilog.issue_id)) for f in fields: value = self.store.execute( 'SELECT old_value FROM changes WHERE issue_id=%s AND field="%s" ORDER BY changed_on LIMIT 1' % (db_ilog.issue_id, f[0]) ) for v in value: # Bugzilla section # if f[0] in bg_issues_links: table_field = bg_issues_links[f[0]] if table_field == "summary": db_ilog.summary = v[0] elif table_field == "priority": db_ilog.priority = v[0] elif table_field == "assigned_to": db_ilog.assigned_to = v[0] elif table_field == "status": db_ilog.status = v[0] elif table_field == "resolution": db_ilog.resolution = v[0] return db_ilog def run(self): issues = self.store.find(DBIssue) for i in issues: db_ilog = DBIssuesLog(i.issue, i.tracker_id) db_ilog.issue_id = i.id db_ilog.type = i.type db_ilog.summary = i.summary db_ilog.description = i.description db_ilog.status = i.status db_ilog.resolution = i.resolution db_ilog.priority = i.priority db_ilog.submitted_by = i.submitted_by db_ilog.date = i.submitted_on db_ilog.assigned_to = i.assigned_to db_ilog = self.build_initial_state(db_ilog) self.store.add(db_ilog) # the code below gets all the changes and insert a row per change changes = self.store.execute( "SELECT field, new_value, changed_by, changed_on FROM changes where issue_id=%s" % (db_ilog.issue_id) ) for ch in changes: field = ch[0] new_value = ch[1] changed_by = ch[2] date = ch[3] db_ilog = self.copy_issue(db_ilog) # Bugzilla section # if field in bg_issues_links: table_field = bg_issues_links[field] if table_field == "summary": db_ilog.summary = new_value elif table_field == "priority": db_ilog.priority = new_value elif table_field == "assignted_to": db_ilog.assigned_to = new_value elif table_field == "status": db_ilog.status = new_value elif table_field == "resolution": db_ilog.resolution = new_value db_ilog.submitted_by = changed_by db_ilog.date = date try: self.store.add(db_ilog) except: traceback.print_exc() self.store.commit()
class NCBITaxnomyInserter(object): def __init__(self, divisions_file_path, taxonomy_divisions_file_path): self.included_divisions= {0:"Bacteria",3:"Phages",9:"Viruses", 11:"Environmental samples", 1:"Invertebrates", 4:"Plants and Fungi"} self.divisions_file_path= divisions_file_path self.taxonomy_divisions_file_path= taxonomy_divisions_file_path self.__init_database() if not self.init_tables(): self.create_tables() def __init_database(self): """ creates the sqlite database instance and checks if the database exists in biodb. """ database= create_database("sqlite:%s" % biodb_sql_db_path) print "Created storm database from %s." % biodb_sql_db_path self.store= Store(database) def init_tables(self): self.biodb_table= "biodb_ncbi" self.taxonomy_division_table = "biodb_ncbi_taxonomy_division" self.division_table= "biodb_ncbi_division" #### check if the db_name exists in the database table_list= [table[0] for table in self.store.execute('select tbl_name from SQLITE_MASTER')] return 0 if self.taxonomy_division_table not in table_list else 1 def create_tables(self): self.create_taxonomy_division_string='CREATE TABLE '+ self.taxonomy_division_table +' (taxonID INTEGER PRIMARY KEY, divisionID INTEGER, FOREIGN KEY (taxonID) REFERENCES '+ self.biodb_table+'(id), FOREIGN KEY (divisionID) REFERENCES '+ self.division_table +'(id) )' self.create_division_string='CREATE TABLE '+ self.division_table +' (id INTEGER PRIMARY KEY, name VARCHAR)' self.store.execute(self.create_taxonomy_division_string) self.store.execute(self.create_division_string) def insert_division(self, div_id, name): div= NCBIDivision() div.id = int(div_id) div.name= unicode(name) self.store.add(div) def insert_taxonomy_division(self, taxon_id, div_id): n_tax_div= NCBITaxonomyDivision() n_tax_div.taxonID= int(taxon_id) n_tax_div.divisionID= int(div_id) self.store.add(n_tax_div) def insert_divisions_from_file(self): with open(self.divisions_file_path) as div_file: for line in div_file: cols= line.rstrip('\n').split(sep) div_id= cols[0] name= cols[2] self.insert_division(div_id, name) self.store.commit() def insert_taxonomy_divisions_from_file(self): i=0 with open(self.taxonomy_divisions_file_path) as tax_div_file: for line in tax_div_file: cols= line.rstrip('\n').split(sep) division_id= int(cols[4].strip()) if division_id in self.included_divisions: tax_id= cols[0].strip() self.insert_taxonomy_division(tax_id, division_id) self.store.commit() i+=1 if i % 10000 == 0: print "%d taxa inserted!" %i
)''') persons = [ Person( name=u'Amy', age=52 ), Person( name=u'Bob', age=48 ), Person( name=u'Cat', age=23 ), Person( name=u'Dan', age=17 ), Person( name=u'Edd', age=77 ), Person( name=u'Fan', age=65 ), Person( name=u'Gin', age=27 ), Person( name=u'Hil', age=30 ), Person( name=u'Iri', age=62 ), Person( name=u'Jac', age=18 ) ] for person in persons: store.add(person) def getColumnNames( row ): columns = row._storm_columns names = [] for column in columns: names.append( columns[column].name ) return names # Custom function to save a csv file def writeClassToFile( class_, store, fileHandle ): # Get the data and determine the fieldnames rows = store.find(class_) headers = [x for x in rows[0].as_dict()]
class StormStorageBackend(StorageBackend): """Storage back-end based on the Storm ORM framework.""" def __init__(self): self.store = None def set_config(self, **kwargs): """Set the configuration of this back-end.""" uri = kwargs['uri'] database = create_database(uri) self.store = Store(database) self.logger = logging.getLogger('StormStorageBackend') handler = logging.StreamHandler() formatter = logging.Formatter(kwargs['log_format']) handler.setFormatter(formatter) self.logger.addHandler(handler) self.logger.setLevel( logging.__getattribute__(kwargs['log_level'])) def create_node(self, node, jid, node_config): """Create a PubSub node with the given configuration. Creates the Node, NodeConfig, Affiliation and Subscription model for the given node. """ self.logger.debug('Creating node %s for jid %s with config %s' % (node, jid, node_config)) new_node = Node(node) self.store.add(new_node) config = copy.deepcopy(DEFAULT_CONFIG) config.update(node_config) for key, value in config.items(): new_node_config = NodeConfig(node, key, value) new_node_config.updated = datetime.utcnow() self.store.add(new_node_config) affiliation = Affiliation(node, jid, u'owner', datetime.utcnow()) self.store.add(affiliation) subscription = Subscription(node, jid, jid, u'subscribed', datetime.utcnow()) self.store.add(subscription) def create_channel(self, jid): """Create a channel for the given JID. Creates all the required PubSub nodes that constitute a channel, with the appropriate permissions. """ self.logger.debug('Creating channel for %s' % jid) creation_date = unicode(datetime.utcnow().isoformat()) self.create_node(u'/user/%s/posts' % jid, jid, {u'channelType': u'personal', u'creationDate': creation_date, u'defaultAffiliation': u'publisher', u'description': u'buddycloud channel for %s' % jid, u'title': jid}) self.create_node(u'/user/%s/geo/current' % jid, jid, {u'creationDate': creation_date, u'description': u'Where %s is at now' % jid, u'title': u'%s Current Location' % jid}) self.create_node(u'/user/%s/geo/next' % jid, jid, {u'creationDate': creation_date, u'description': u'Where %s intends to go' % jid, u'title': u'%s Next Location' % jid}) self.create_node(u'/user/%s/geo/previous' % jid, jid, {u'creationDate': creation_date, u'description': u'Where %s has been before' % jid, u'title': u'%s Previous Location' % jid}) self.create_node(u'/user/%s/status' % jid, jid, {u'creationDate': creation_date, u'description': u'M000D', u'title': u'%s status updates' % jid}) self.create_node(u'/user/%s/subscriptions' % jid, jid, {u'creationDate': creation_date, u'description': u'Browse my interests', u'title': u'%s subscriptions' % jid}) self.store.commit() def get_node(self, node): """Get the requested PubSub node.""" self.logger.debug('Getting node %s' % node) the_node = self.store.get(Node, node) self.logger.debug('Returning node %s' % the_node) return the_node def get_nodes(self): """Get a list of all the available PubSub nodes.""" self.logger.debug('Getting list of available nodes.') node_list = self.store.find(Node) self.logger.debug('Returning list of available node %s' % node_list) return node_list def add_item(self, node, item_id, item): """Add an item to the requested PubSub node.""" new_item = Item(node, unicode(item_id), datetime.utcnow(), item) self.store.add(new_item) self.store.commit() def shutdown(self): """Shut down this storage module - flush, commit and close the store.""" self.store.flush() self.store.commit() self.store.close()
# instantiate with: # instance = Person() # instance.name = u'John' # instance.age = 55 def __init__( self, name, age ): self.name = name self.age = age def __repr__( self ): return '<Person (name=%s, age=%s)>' % ( self.name, self.age, ) database = create_database('sqlite://:memory:') store = Store(database) store.execute('''CREATE TABLE person ( id INTEGER PRIMARY KEY, name VARCHAR, age INTEGER )''') amy = Person( name=u'Amy', age=52 ) bob = Person( name=u'Bob', age=48 ) store.add( amy ) store.add( bob ) person = store.find(Person, Person.name == u'Bob').one() print person
class TableReplacer(object): """ This is the base class used by every Updater """ def __init__(self, old_db_file, new_db_file, start_ver): from globaleaks.db.update_8_9 import Context_v_8, Receiver_v_8, Notification_v_8 from globaleaks.db.update_9_10 import Node_v_9, Receiver_v_9, User_v_9 from globaleaks.db.update_10_11 import InternalTip_v_10, InternalFile_v_10 from globaleaks.db.update_11_12 import Node_v_11, Context_v_11 from globaleaks.db.update_12_13 import Node_v_12, Context_v_12 from globaleaks.db.update_13_14 import Node_v_13, Context_v_13 from globaleaks.db.update_14_15 import Node_v_14, User_v_14, Context_v_14, Receiver_v_14, \ InternalTip_v_14, Notification_v_14, Stats_v_14, Comment_v_14 from globaleaks.db.update_15_16 import Receiver_v_15, Notification_v_15 from globaleaks.db.update_16_17 import Node_v_16, Receiver_v_16, Notification_v_16, Stats_v_16 from globaleaks.db.update_17_18 import Node_v_17 from globaleaks.db.update_18_19 import Node_v_18 self.old_db_file = old_db_file self.new_db_file = new_db_file self.start_ver = start_ver self.std_fancy = " ł " self.debug_info = " [%d => %d] " % (start_ver, start_ver + 1) self.table_history = { 'Node': [ Node_v_9, None, Node_v_11, None, Node_v_12, Node_v_13, Node_v_14, Node_v_16, None, Node_v_17, Node_v_18, models.Node ], 'User': [ User_v_9, None, User_v_14, None, None, None, None, models.User, None, None, None, None ], 'Context': [ Context_v_8, Context_v_11, None, None, Context_v_12, Context_v_13, Context_v_14, models.Context, None, None, None, None ], 'Receiver': [ Receiver_v_8, Receiver_v_9, Receiver_v_14, None, None, None, None, Receiver_v_15, Receiver_v_16, models.Receiver, None, None ], 'ReceiverFile': [ models.ReceiverFile, None, None, None, None, None, None, None, None, None, None, None ], 'Notification': [ Notification_v_8, Notification_v_14, None, None, None, None, None, Notification_v_15, Notification_v_16, models.Notification, None, None ], 'Comment': [ Comment_v_14, None, None, None, None, None, None, models.Comment, None, None, None, None ], 'InternalTip': [ InternalTip_v_10, None, None, InternalTip_v_14, None, None, None, models.InternalTip, None, None, None, None ], 'InternalFile': [ InternalFile_v_10, None, None, models.InternalFile, None, None, None, None, None, None, None, None ], 'WhistleblowerTip': [ models.WhistleblowerTip, None, None, None, None, None, None, None, None, None, None, None ], 'ReceiverTip': [ models.ReceiverTip, None, None, None, None, None, None, None, None, None, None, None ], 'ReceiverInternalTip': [ models.ReceiverInternalTip, None, None, None, None, None, None, None, None, None, None, None ], 'ReceiverContext': [ models.ReceiverContext, None, None, None, None, None, None, None, None, None, None, None ], 'Message': [ models.Message, None, None, None, None, None, None, None, None, None, None, None ], 'Stats': [ Stats_v_14, None, None, None, None, None, None, Stats_v_16, None, models.Stats, None, None ], 'ApplicationData': [ models.ApplicationData, None, None, None, None, None, None, None, None, None, None, None ], 'Field': [ models.Field, None, None, None, None, None, None, None, None, None, None, None ], 'FieldOption': [ models.FieldOption, None, None, None, None, None, None, None, None, None, None, None ], 'FieldField': [ models.FieldField, None, None, None, None, None, None, None, None, None, None, None ], 'Step': [ models.Step, None, None, None, None, None, None, None, None, None, None, None ], 'StepField': [ models.StepField, None, None, None, None, None, None, None, None, None, None, None ], 'Anomalies': [ models.Anomalies, None, None, None, None, None, None, None, None, None, None, None ], 'EventLogs': [ models.EventLogs, None, None, None, None, None, None, None, None, None, None, None ], } for k, v in self.table_history.iteritems(): # +1 because count start from 0, # -8 because the relase befor the 8th are not supported anymore length = DATABASE_VERSION + 1 - 8 if len(v) != length: msg = 'Expecting a table with {} statuses ({})'.format( length, k) raise TypeError(msg) log.msg('{} Opening old DB: {}'.format(self.debug_info, old_db_file)) old_database = create_database('sqlite:' + self.old_db_file) self.store_old = Store(old_database) GLSetting.db_file = new_db_file new_database = create_database('sqlite:' + new_db_file) self.store_new = Store(new_database) if self.start_ver + 1 == DATABASE_VERSION: log.msg('{} Acquire SQL schema {}'.format( self.debug_info, GLSetting.db_schema_file)) if not os.access(GLSetting.db_schema_file, os.R_OK): log.msg('Unable to access', GLSetting.db_schema_file) raise IOError('Unable to access db schema file') with open(GLSetting.db_schema_file) as f: create_queries = ''.join(f).split(';') for create_query in create_queries: try: self.store_new.execute(create_query + ';') except OperationalError: log.msg( 'OperationalError in "{}"'.format(create_query)) self.store_new.commit() return # return here and manage the migrant versions here: for k, v in self.table_history.iteritems(): create_query = self.get_right_sql_version(k, self.start_ver + 1) if not create_query: # table not present in the version continue try: self.store_new.execute(create_query + ';') except OperationalError as excep: log.msg('{} OperationalError in [{}]'.format( self.debug_info, create_query)) raise excep self.store_new.commit() def close(self): self.store_old.close() self.store_new.close() def initialize(self): pass def epilogue(self): pass def get_right_model(self, table_name, version): table_index = (version - 8) if table_name not in self.table_history: msg = 'Not implemented usage of get_right_model {} ({} {})'.format( __file__, table_name, self.start_ver) raise NotImplementedError(msg) if version > DATABASE_VERSION: raise ValueError( 'Version supplied must be less or equal to {}'.format( DATABASE_VERSION)) if self.table_history[table_name][table_index]: return self.table_history[table_name][table_index] # else, it's none, and we've to take the previous valid version while version >= 0: if self.table_history[table_name][table_index]: return self.table_history[table_name][table_index] table_index -= 1 # This never want happen return None def get_right_sql_version(self, model_name, version): """ @param model_name: @param version: @return: The SQL right for the stuff we've """ modelobj = self.get_right_model(model_name, version) if not modelobj: return None right_query = generateCreateQuery(modelobj) return right_query def _perform_copy_list(self, table_name): models_count = self.store_old.find( self.get_right_model(table_name, self.start_ver)).count() log.msg('{} default {} migration assistant: #{}'.format( self.debug_info, table_name, models_count)) old_objects = self.store_old.find( self.get_right_model(table_name, self.start_ver)) for old_obj in old_objects: new_obj = self.get_right_model(table_name, self.start_ver + 1)() # Storm internals simply reversed for _, v in new_obj._storm_columns.iteritems(): setattr(new_obj, v.name, getattr(old_obj, v.name)) self.store_new.add(new_obj) self.store_new.commit() def _perform_copy_single(self, table_name): log.msg('{} default {} migration assistant'.format( self.debug_info, table_name)) old_obj = self.store_old.find( self.get_right_model(table_name, self.start_ver)).one() new_obj = self.get_right_model(table_name, self.start_ver + 1)() # Storm internals simply reversed for _, v in new_obj._storm_columns.iteritems(): setattr(new_obj, v.name, getattr(old_obj, v.name)) self.store_new.add(new_obj) self.store_new.commit() def migrate_Context(self): self._perform_copy_list("Context") def migrate_Node(self): self._perform_copy_single("Node") def migrate_User(self): self._perform_copy_list("User") def migrate_ReceiverTip(self): self._perform_copy_list("ReceiverTip") def migrate_WhistleblowerTip(self): self._perform_copy_list("WhistleblowerTip") def migrate_Comment(self): self._perform_copy_list("Comment") def migrate_InternalTip(self): self._perform_copy_list("InternalTip") def migrate_Receiver(self): self._perform_copy_list("Receiver") def migrate_InternalFile(self): self._perform_copy_list("InternalFile") def migrate_ReceiverFile(self): self._perform_copy_list("ReceiverFile") def migrate_Notification(self): self._perform_copy_single("Notification") def migrate_ReceiverContext(self): self._perform_copy_list("ReceiverContext") def migrate_ReceiverInternalTip(self): self._perform_copy_list("ReceiverInternalTip") def migrate_Message(self): """ has been created between 7 and 8! """ if self.start_ver < 8: return self._perform_copy_list("Message") def migrate_Stats(self): """ has been created between 14 and 15 and is not migrated since 17 """ if self.start_ver < 17: return self._perform_copy_list("Stats") def migrate_ApplicationData(self): """ There is no need to migrate it the application data. Default application data is loaded by the application and stored onto the db at each new start. """ return def migrate_Field(self): """ has been created between 14 and 15! """ if self.start_ver < 15: return self._perform_copy_list("Field") def migrate_FieldOption(self): """ has been created between 14 and 15! """ if self.start_ver < 15: return self._perform_copy_list("FieldOption") def migrate_FieldField(self): """ has been created between 14 and 15! """ if self.start_ver < 15: return self._perform_copy_list("FieldField") def migrate_Step(self): """ has been created between 14 and 15! """ if self.start_ver < 15: return self._perform_copy_list("Step") def migrate_StepField(self): """ has been created between 14 and 15! """ if self.start_ver < 15: return self._perform_copy_list("StepField") def migrate_Anomalies(self): """ has been created between 14 and 15! """ if self.start_ver < 15: return self._perform_copy_list("Anomalies") def migrate_EventLogs(self): """ has been created between 15 and 16! """ if self.start_ver < 16: return self._perform_copy_list("EventLogs")
person = Reference(person_id, Person.id) store.execute("CREATE TABLE person (id INTEGER PRIMARY KEY, name VARCHAR)") store.execute( "CREATE TABLE address (id INTEGER PRIMARY KEY, address VARCHAR, person_id INTEGER, " "FOREIGN KEY(person_id) REFERENCES person(id))") person = Person() person.name = u'person' print(person) print("%r, %r" % (person.id, person.name)) # None, u'person' # Notice that person.id is None since the Person instance is not attached to a valid database store yet. store.add(person) print("%r, %r" % (person.id, person.name)) # None, u'person' # Since the store hasn't flushed the Person instance into the sqlite database yet, person.id is still None. store.flush() print("%r, %r" % (person.id, person.name)) # 1, u'person' # Now the store has flushed the Person instance, we got an id value for person. address = Address() address.person = person address.address = 'address' print("%r, %r, %r" % (address.id, address.person, address.address))
class IssuesLog(): def __init__(self, backend_name): self.backend_name = backend_name self.connect() self.create_db() def connect(self): opts = Config() self.database = create_database('mysql://' + opts.db_user_out + ':' + opts.db_password_out + '@' + opts.db_hostname_out + ':' + opts.db_port_out + '/' + opts.db_database_out) self.store = Store(self.database) def create_db(self): print("self.backend_name = %s" % (self.backend_name)) if self.backend_is_bugzilla(): self.store.execute(__sql_table_bugzilla__) elif self.backend_is_jira(): self.store.execute(__sql_table_jira__) def copy_issue(self, db_ilog): """ This method creates a copy of DBBugzilla/JiraIssuesLog object """ if self.backend_is_bugzilla(): aux = DBBugzillaIssuesLog(db_ilog.issue, db_ilog.tracker_id) aux.issue_id = db_ilog.issue_id aux.type = db_ilog.type aux.summary = db_ilog.summary aux.description = db_ilog.description aux.status = db_ilog.status aux.resolution = db_ilog.resolution aux.priority = db_ilog.priority aux.submitted_by = db_ilog.submitted_by aux.date = db_ilog.date aux.assigned_to = db_ilog.assigned_to #aux = DBBugzillaIssuesLog (db_ilog.issue_id) aux.alias = db_ilog.alias aux.delta_ts = db_ilog.delta_ts aux.reporter_accessible = db_ilog.reporter_accessible aux.cclist_accessible = db_ilog.cclist_accessible aux.classification_id = db_ilog.classification_id aux.classification = db_ilog.classification aux.product = db_ilog.product aux.component = db_ilog.component aux.version = db_ilog.version aux.rep_platform = db_ilog.rep_platform aux.op_sys = db_ilog.op_sys aux.dup_id = db_ilog.dup_id aux.bug_file_loc = db_ilog.bug_file_loc aux.status_whiteboard = db_ilog.status_whiteboard aux.target_milestone = db_ilog.target_milestone aux.votes = db_ilog.votes aux.everconfirmed = db_ilog.everconfirmed aux.qa_contact = db_ilog.qa_contact aux.estimated_time = db_ilog.estimated_time aux.remaining_time = db_ilog.remaining_time aux.actual_time = db_ilog.actual_time aux.deadline = db_ilog.deadline aux.keywords = db_ilog.keywords aux.cc = db_ilog.cc aux.group_bugzilla = db_ilog.group_bugzilla aux.flag = db_ilog.flag return aux elif self.backend_is_jira(): aux = DBJiraIssuesLog(db_ilog.issue, db_ilog.tracker_id) aux.issue_id = db_ilog.issue_id aux.type = db_ilog.type aux.summary = db_ilog.summary aux.description = db_ilog.description aux.status = db_ilog.status aux.resolution = db_ilog.resolution aux.priority = db_ilog.priority aux.submitted_by = db_ilog.submitted_by aux.date = db_ilog.date aux.assigned_to = db_ilog.assigned_to aux.link = db_ilog.link aux.component = db_ilog.component aux.version = db_ilog.version aux.issue_key = db_ilog.issue_key aux.environment = db_ilog.environment aux.project = db_ilog.project aux.project_key = db_ilog.project_key aux.security = db_ilog.security return aux def get_people_id(self, email, tracker_id): """ Gets the id of an user """ p = self.store.find(DBPeople, DBPeople.email == email).one() ## ## the code below was created ad-hoc for KDE solid ## try: return p.id except AttributeError: p = self.store.find(DBPeople, DBPeople.user_id == email).one() try: return p.id except AttributeError: # no person was found in People with the email above, so # we include it printdbg("Person not found. Inserted with email %s " % (email)) dp = DBPeople(email, tracker_id) self.store.add(dp) self.store.commit() return dp.id def get_last_change_date(self): """ This method gets the date of the last change included in the log table """ if self.backend_is_bugzilla(): result = self.store.find(DBBugzillaIssuesLog) aux = result.order_by(Desc(DBBugzillaIssuesLog.date))[:1] for entry in aux: return entry.date elif self.backend_is_jira(): result = self.store.find(DBJiraIssuesLog) aux = result.order_by(Desc(DBJiraIssuesLog.date))[:1] for entry in aux: return entry.date return None def get_issues_changed_since(self, date): """ This method fetchs the issues changes since date """ #SELECT DISTINCT(issues.id) FROM issues, changes #WHERE issues.id = changes.issue_id #AND (issues.submitted_on >= '2012-02-28 12:34:44' # OR changes.changed_on >= '2012-02-28 12:34:44'); result = self.store.find( DBIssue, DBChange.issue_id == DBIssue.id, Or(DBIssue.submitted_on > date, DBChange.changed_on > date)).group_by(DBIssue.id) return result def get_previous_state(self, issue_id): """ This method returns a db_ilog object with the last row found in the log table """ db_ilog = None if self.backend_is_jira(): rows = self.store.find(DBJiraIssuesLog, DBJiraIssuesLog.issue_id == issue_id) lrow = rows.order_by(Desc(DBJiraIssuesLog.id))[:1] for aux in lrow: # FIXME it only contains an element! db_ilog = DBJiraIssuesLog(aux.issue, aux.tracker_id) db_ilog.issue_id = aux.issue_id db_ilog.type = aux.type db_ilog.summary = aux.summary db_ilog.description = aux.description db_ilog.status = aux.status db_ilog.resolution = aux.resolution db_ilog.priority = aux.priority db_ilog.submitted_by = aux.submitted_by db_ilog.date = aux.date db_ilog.assigned_to = aux.assigned_to db_ilog.issue_key = aux.issue_key db_ilog.link = aux.link db_ilog.environment = aux.environment db_ilog.security = aux.security db_ilog.updated = aux.updated db_ilog.version = aux.version db_ilog.component = aux.component db_ilog.votes = aux.votes db_ilog.project = aux.project db_ilog.project_id = aux.project_id db_ilog.project_key = aux.project_key else: # elif self.backend_is_bugzilla(): rows = self.store.find(DBBugzillaIssuesLog, DBBugzillaIssuesLog.issue_id == issue_id) lrow = rows.order_by(Desc(DBBugzillaIssuesLog.id))[:1] for aux in lrow: # FIXME it only contains an element! db_ilog = DBBugzillaIssuesLog(aux.issue, aux.tracker_id) db_ilog.issue_id = aux.issue_id db_ilog.type = aux.type db_ilog.summary = aux.summary db_ilog.description = aux.description db_ilog.status = aux.status db_ilog.resolution = aux.resolution db_ilog.priority = aux.priority db_ilog.submitted_by = aux.submitted_by db_ilog.date = aux.date db_ilog.assigned_to = aux.assigned_to db_ilog.alias = aux.alias db_ilog.delta_ts = aux.delta_ts db_ilog.reporter_accessible = aux.reporter_accessible db_ilog.cclist_accessible = aux.cclist_accessible db_ilog.classification_id = aux.classification_id db_ilog.classification = aux.classification db_ilog.product = aux.product db_ilog.component = aux.component db_ilog.version = aux.version db_ilog.rep_platform = aux.rep_platform db_ilog.op_sys = aux.op_sys db_ilog.dup_id = aux.dup_id db_ilog.bug_file_loc = aux.bug_file_loc db_ilog.status_whiteboard = aux.status_whiteboard db_ilog.target_milestone = aux.target_milestone db_ilog.votes = aux.votes db_ilog.everconfirmed = aux.everconfirmed db_ilog.qa_contact = aux.qa_contact db_ilog.estimated_time = aux.estimated_time db_ilog.remaining_time = aux.remaining_time db_ilog.actual_time = aux.actual_time db_ilog.deadline = aux.deadline db_ilog.keywords = aux.keywords db_ilog.cc = aux.cc db_ilog.group_bugzilla = aux.group_bugzilla db_ilog.flag = aux.flag return db_ilog def issue_is_new(self, issue_id): """ This method returns True if the issue is not logged in the log table """ if self.backend_is_jira(): result = self.store.find(DBJiraIssuesLog, DBJiraIssuesLog.issue_id == issue_id) elif self.backend_is_bugzilla(): result = self.store.find(DBBugzillaIssuesLog, DBBugzillaIssuesLog.issue_id == issue_id) return (result.count() == 0) def build_initial_state(self, db_ilog): """ This method gets the first changes of every field in order to get the initial state of the bug """ fields = self.store.execute("SELECT DISTINCT(field) FROM changes\ where issue_id=%s" % (db_ilog.issue_id)) for f in fields: value = self.store.execute("SELECT old_value FROM changes \ WHERE issue_id=%s AND field=\"%s\" ORDER BY changed_on LIMIT 1" % (db_ilog.issue_id, f[0])) for v in value: if self.backend_is_bugzilla(): # Bugzilla section # if f[0] in bg_issues_links: table_field = bg_issues_links[f[0]] if table_field == 'summary': db_ilog.summary = v[0] elif table_field == 'priority': db_ilog.priority = v[0] elif table_field == 'type': db_ilog.type = v[0] elif table_field == 'assigned_to': db_ilog.assigned_to = self.get_people_id( v[0], self.get_tracker_id(db_ilog.issue_id)) elif table_field == 'status': db_ilog.status = v[0] elif table_field == 'resolution': db_ilog.resolution = v[0] elif table_field == 'alias': db_ilog.alias = v[0] elif table_field == 'reporter_accessible': db_ilog.reporter_accessible = v[0] elif table_field == 'cclist_accessible': db_ilog.cclist_accessible = v[0] elif table_field == 'product': db_ilog.product = v[0] elif table_field == 'component': db_ilog.component = v[0] elif table_field == 'version': db_ilog.version = v[0] elif table_field == 'rep_platform': db_ilog.rep_platform = v[0] elif table_field == 'op_sys': db_ilog.op_sys = v[0] elif table_field == 'bug_file_loc': db_ilog.bug_file_loc = v[0] elif table_field == 'status_whiteboard': db_ilog.status_whiteboard = v[0] elif table_field == 'target_milestone': db_ilog.target_milestone = v[0] elif table_field == 'votes': db_ilog.votes = v[0] elif table_field == 'everconfirmed': db_ilog.everconfirmed = v[0] elif table_field == 'qa_contact': db_ilog.qa_contact = v[0] elif table_field == 'keywords': db_ilog.Keywords = v[0] elif table_field == 'cc': db_ilog.cc = v[0] if self.backend_is_jira(): # Jira section # if f[0] in jira_issues_links: table_field = jira_issues_links[f[0]] if table_field == 'summary': db_ilog.summary = v[0] elif table_field == 'priority': db_ilog.priority = v[0] elif table_field == 'type': db_ilog.type = v[0] elif table_field == 'assigned_to': db_ilog.assigned_to = self.get_people_id(v[0]) elif table_field == 'status': db_ilog.status = v[0] elif table_field == 'resolution': db_ilog.resolution = v[0] elif table_field == 'link': db_ilog.link = v[0] elif table_field == 'environment': db_ilog.environment = v[0] elif table_field == 'component': db_ilog.component = v[0] elif table_field == 'description': db_ilog.description = v[0] elif table_field == 'security': db_ilog.security = v[0] return db_ilog def backend_is_bugzilla(self): return self.backend_name == 'bg' def backend_is_jira(self): return self.backend_name == 'jira' def get_last_values(self, issue_row): i = issue_row db_ilog = None if self.backend_is_bugzilla(): db_ilog_bugzilla = DBBugzillaIssuesLog(i.issue, i.tracker_id) db_ilog_bugzilla.issue_id = i.id db_ilog_bugzilla.type = i.type db_ilog_bugzilla.summary = i.summary db_ilog_bugzilla.description = i.description db_ilog_bugzilla.status = i.status db_ilog_bugzilla.resolution = i.resolution db_ilog_bugzilla.priority = i.priority db_ilog_bugzilla.submitted_by = i.submitted_by db_ilog_bugzilla.date = i.submitted_on db_ilog_bugzilla.assigned_to = i.assigned_to ib = self.store.find(DBBugzillaIssueExt, \ DBBugzillaIssueExt.issue_id == db_ilog_bugzilla.issue_id).one() #### db_ilog_bugzilla.alias = ib.alias db_ilog_bugzilla.delta_ts = ib.delta_ts db_ilog_bugzilla.reporter_accessible = ib.reporter_accessible db_ilog_bugzilla.cclist_accessible = ib.cclist_accessible db_ilog_bugzilla.classification_id = ib.classification_id db_ilog_bugzilla.classification = ib.classification db_ilog_bugzilla.product = ib.product db_ilog_bugzilla.component = ib.component db_ilog_bugzilla.version = ib.version db_ilog_bugzilla.rep_platform = ib.rep_platform db_ilog_bugzilla.op_sys = ib.op_sys db_ilog_bugzilla.dup_id = ib.dup_id db_ilog_bugzilla.bug_file_loc = ib.bug_file_loc db_ilog_bugzilla.status_whiteboard = ib.status_whiteboard db_ilog_bugzilla.target_milestone = ib.target_milestone db_ilog_bugzilla.votes = ib.votes db_ilog_bugzilla.everconfirmed = ib.everconfirmed db_ilog_bugzilla.qa_contact = ib.qa_contact db_ilog_bugzilla.estimated_time = ib.estimated_time db_ilog_bugzilla.remaining_time = ib.remaining_time db_ilog_bugzilla.actual_time = ib.actual_time db_ilog_bugzilla.deadline = ib.deadline db_ilog_bugzilla.keywords = ib.keywords db_ilog_bugzilla.cc = ib.cc db_ilog_bugzilla.group_bugzilla = ib.group_bugzilla db_ilog_bugzilla.flag = ib.flag db_ilog = db_ilog_bugzilla elif self.backend_is_jira(): db_ilog = DBJiraIssuesLog(i.issue, i.tracker_id) db_ilog.issue_id = i.id db_ilog.type = i.type db_ilog.summary = i.summary db_ilog.description = i.description db_ilog.status = i.status db_ilog.resolution = i.resolution db_ilog.priority = i.priority db_ilog.submitted_by = i.submitted_by db_ilog.date = i.submitted_on db_ilog.assigned_to = i.assigned_to ib = self.store.find(DBJiraIssueExt, \ DBJiraIssueExt.issue_id == db_ilog.issue_id).one() db_ilog.issue_key = ib.issue_key db_ilog.link = ib.link db_ilog.environment = ib.environment db_ilog.security = ib.security db_ilog.updated = ib.updated db_ilog.version = ib.version db_ilog.component = ib.component db_ilog.votes = ib.votes db_ilog.project = ib.project db_ilog.project_id = ib.project_id db_ilog.project_key = ib.project_key return db_ilog def insert_new_bugs_created(self, date_from, date_to): """ This method inserts an entry with the data of the creation time """ if (not date_from) and (not date_to): issues = self.store.find(DBIssue) elif not date_from: issues = self.store.find(DBIssue, DBIssue.submitted_on < date_to) elif not date_to: issues = self.store.find(DBIssue, DBIssue.submitted_on > date_from) else: issues = self.store.find( DBIssue, And(DBIssue.submitted_on <= date_to, DBIssue.submitted_on > date_from)) issues = issues.order_by(Asc(DBIssue.submitted_on)) ## we store the initial data for each bug found for i in issues: db_ilog = self.get_last_values(i) # from issues and change tables db_ilog = self.build_initial_state(db_ilog) self.store.add(db_ilog) printdbg( "Issue #%s created at %s - date_from = %s - date_to = %s" % (db_ilog.issue, db_ilog.date, date_from, date_to)) def get_tracker_id(self, issue_id): """ Returns tracker id from issues """ result = self.store.find(DBIssue.tracker_id, DBIssue.id == issue_id).one() return result def run(self): last_change_date = self.get_last_change_date() printdbg("Last change logged at %s" % (last_change_date)) date_from = None date_to = None if last_change_date: changes = self.store.find(DBChange, DBChange.changed_on > last_change_date) date_from = last_change_date else: changes = self.store.find(DBChange) changes = changes.order_by(Asc(DBChange.changed_on)) for ch in changes: # insert creation if needed date_to = ch.changed_on self.insert_new_bugs_created(date_from, date_to) date_from = date_to field = ch.field new_value = ch.new_value changed_by = ch.changed_by date = ch.changed_on issue_id = ch.issue_id #print("field = %s, new_value = %s, changed_by = %s, date = %s" # % (field, new_value, str(changed_by), str(date))) db_ilog = self.get_previous_state(issue_id) printdbg("Issue #%s modified at %s" % (db_ilog.issue, date)) if self.backend_is_bugzilla(): # Bugzilla section # # if (field in bg_issues_links): table_field = bg_issues_links[field] db_ilog.submitted_by = changed_by db_ilog.date = date if table_field == 'summary': db_ilog.summary = new_value elif table_field == 'priority': db_ilog.priority = new_value elif table_field == 'type': db_ilog.type = new_value elif table_field == 'assigned_to': db_ilog.assigned_to = self.get_people_id( new_value, self.get_tracker_id(db_ilog.issue_id)) elif table_field == 'status': db_ilog.status = new_value elif table_field == 'resolution': db_ilog.resolution = new_value elif table_field == 'alias': db_ilog.alias = new_value elif table_field == 'reporter_accessible': db_ilog.reporter_accessible = new_value elif table_field == 'cclist_accessible': db_ilog.cclist_accessible = new_value elif table_field == 'product': db_ilog.product = new_value elif table_field == 'component': db_ilog.component = new_value elif table_field == 'version': db_ilog.version = new_value elif table_field == 'rep_platform': db_ilog.rep_platform = new_value elif table_field == 'op_sys': db_ilog.op_sys = new_value elif table_field == 'bug_file_loc': db_ilog.bug_file_loc = new_value elif table_field == 'status_whiteboard': db_ilog.status_whiteboard = new_value elif table_field == 'target_milestone': db_ilog.target_milestone = new_value elif table_field == 'votes': db_ilog.votes = new_value elif table_field == 'everconfirmed': db_ilog.everconfirmed = new_value elif table_field == 'qa_contact': db_ilog.qa_contact = new_value elif table_field == 'keywords': db_ilog.Keywords = new_value elif table_field == 'cc': db_ilog.cc = new_value try: self.store.add(db_ilog) except: traceback.print_exc() elif self.backend_is_jira(): # Jira section # # if (field in jira_issues_links): table_field = jira_issues_links[field] db_ilog.submitted_by = changed_by db_ilog.date = date if table_field == 'summary': db_ilog.summary = new_value elif table_field == 'priority': db_ilog.priority = new_value elif table_field == 'type': db_ilog.type = new_value elif table_field == 'assigned_to': db_ilog.assigned_to = self.get_people_id( new_value, self.get_tracker_id(db_ilog.issue_id)) elif table_field == 'status': db_ilog.status = new_value elif table_field == 'resolution': db_ilog.resolution = new_value elif table_field == 'description': db_ilog.description = new_value elif table_field == 'link': db_ilog.link = new_value elif table_field == 'component': db_ilog.component = new_value elif table_field == 'version': db_ilog.version = new_value elif table_field == 'security': db_ilog.security = new_value try: self.store.add(db_ilog) except: traceback.print_exc() # if there are changes, it stores the last bugs after the last # change. If there are no changes, insert all the created bugs self.insert_new_bugs_created(date_from, None) self.store.commit()
class StormORM(ORM): """ Storm implementation of ORM super class. """ def __init__(self, uri=None, store=None): ''' @param uri: Database URI following storm rules. @param store: Storm store. If uri is given a new store is instanciated and it is used to execute the statements. If both parameters are given the early created store overrides the store given. ''' from storm.locals import create_database, Store self.uri = uri self.store = store if self.uri: database = create_database(self.uri) self.store = Store(database) if not self.store: raise Exception('None storm store') self.attrParser = StormAttributeParser() def _getObject(self, csvType, csvStatement): """ Retrieves the object to be used at statement execution. @param csvType: The CSVType @param csvStatement: The CSVStatement @return: The object early instanciated (for insert statement) or retrieved from database (for update or delete statements). """ typo = csvType.type keys = csvType.keys attributes = csvStatement.attributes if csvStatement.action in [DELETE, UPDATE]: if csvType.hasPrimaryKey: return self.store.get(typo, attributes[ csvType.primaryKey[0] ]) else: pred = And([Eq(typo, key, attributes[i]) for i,key in keys.iteritems()]) result = self.store.find(typo, pred) if result.count() == 0: return None elif result.count() == 1: return result.one() else: return [r for r in result] elif csvStatement.action is INSERT: return typo() def executeStatement(self, csvType, csvStatement): """ Executes csv statements matched by the pair csvType, csvStatement. @param csvType: The CSVType @param csvStatement: The CSVStatement @return: Total statements executed or raises a ValueError if the object retrieved with the pair csvType, csvStatement is None. """ obj = self._getObject(csvType, csvStatement) if not obj: msg = 'Statement return None in line %d: %s' % (csvStatement.lineNumber, csvStatement.lineContent) raise ValueError(msg) objs = [] if type(obj) is list: objs += obj else: objs.append(obj) i = 0 for _obj in objs: self._executeStatement(_obj, csvType, csvStatement) i += 1 return i def _executeStatement(self, obj, csvType, csvStatement): """ Executes a single csv statement @param csvType: The CSVType @param csvStatement: The CSVStatement """ keys = csvType.keys attributes = csvType.attributes values = csvStatement.attributes if csvStatement.action is INSERT: pairs = [(key, values[i]) for i,key in keys.iteritems()] pairs += [(key, values[i]) for i,key in attributes.iteritems()] for key, value in pairs: setattr(obj, key, value) self.store.add(obj) elif csvStatement.action is UPDATE: pairs = [(key, values[i]) for i,key in attributes.iteritems()] for key, value in pairs: setattr(obj, key, value) elif csvStatement.action is DELETE: self.store.remove(obj) self.store.commit()
for description, price, date in [ ('Cup of coffee', 2.04, today - datetime.timedelta(1)), ('Chocolate bar', 1.85, today - datetime.timedelta(40)), ('Candy', 0.99, today - datetime.timedelta(30)), ('Grape Juice', 3.38, today - datetime.timedelta(23)), ('Ice tea', 1.25, today - datetime.timedelta(10)), ('Cookies', 0.85, today - datetime.timedelta(5)), ('Noogies', 1.45, today - datetime.timedelta(2)), ('Chocolate bar', 1.85, today)]: s = Sale() s.description=unicode(description) s.price=price s.date=date store.add(s) store.flush() class PurchaseViewer(gtk.Window): def __init__(self): gtk.Window.__init__(self) self.set_title('Purchases') self.search = SearchContainer(self.get_columns()) self.search.set_summary_label('price') self.add(self.search) self._setup_searching() self._create_filters() def _setup_searching(self): self.query = StormQueryExecuter(store)
class TableReplacer(object): """ This is the base class used by every Updater """ def __init__(self, old_db_file, new_db_file, start_ver): from globaleaks.db.update_8_9 import Context_v_8, Receiver_v_8, Notification_v_8 from globaleaks.db.update_9_10 import Node_v_9, Receiver_v_9, User_v_9 from globaleaks.db.update_10_11 import InternalTip_v_10, InternalFile_v_10 from globaleaks.db.update_11_12 import Node_v_11, Context_v_11 from globaleaks.db.update_12_13 import Node_v_12, Context_v_12 from globaleaks.db.update_13_14 import Node_v_13, Context_v_13 from globaleaks.db.update_14_15 import Node_v_14, User_v_14, Context_v_14, Receiver_v_14, \ InternalTip_v_14, Notification_v_14, Stats_v_14, Comment_v_14 from globaleaks.db.update_15_16 import Receiver_v_15, Notification_v_15 from globaleaks.db.update_16_17 import Node_v_16, Receiver_v_16, Notification_v_16, Stats_v_16 from globaleaks.db.update_17_18 import Node_v_17 from globaleaks.db.update_18_19 import Node_v_18 from globaleaks.db.update_19_20 import Node_v_19, Notification_v_19, Comment_v_19, Message_v_19, \ InternalTip_v_19, ReceiverTip_v_19, InternalFile_v_19, ReceiverFile_v_19, Receiver_v_19, \ Context_v_19 self.old_db_file = old_db_file self.new_db_file = new_db_file self.start_ver = start_ver self.std_fancy = " ł " self.debug_info = " [%d => %d] " % (start_ver, start_ver + 1) self.table_history = { 'Node': [Node_v_9, None, Node_v_11, None, Node_v_12, Node_v_13, Node_v_14, Node_v_16, None, Node_v_17, Node_v_18, Node_v_19, models.Node], 'User': [User_v_9, None, User_v_14, None, None, None, None, models.User, None, None, None, None, None], 'Context': [Context_v_8, Context_v_11, None, None, Context_v_12, Context_v_13, Context_v_14, Context_v_19, None, None, None, None, models.Context], 'Receiver': [Receiver_v_8, Receiver_v_9, Receiver_v_14, None, None, None, None, Receiver_v_15, Receiver_v_16, Receiver_v_19, None, None, models.Receiver], 'ReceiverFile': [ReceiverFile_v_19, None, None, None, None, None, None, None, None, None, None, None, models.ReceiverFile], 'Notification': [Notification_v_8, Notification_v_14, None, None, None, None, None, Notification_v_15, Notification_v_16, Notification_v_19, None, None, models.Notification], 'Comment': [Comment_v_14, None, None, None, None, None, None, Comment_v_19, None, None, None, None, models.Comment], 'InternalTip': [InternalTip_v_10, None, None, InternalTip_v_14, None, None, None, InternalTip_v_19, None, None, None, None, models.InternalTip], 'InternalFile': [InternalFile_v_10, None, None, InternalFile_v_19, None, None, None, None, None, None, None, None, models.InternalFile], 'WhistleblowerTip': [models.WhistleblowerTip, None, None, None, None, None, None, None, None, None, None, None, None], 'ReceiverTip': [ReceiverTip_v_19, None, None, None, None, None, None, None, None, None, None, None, models.ReceiverTip], 'ReceiverInternalTip': [models.ReceiverInternalTip, None, None, None, None, None, None, None, None, None, None, None, None], 'ReceiverContext': [models.ReceiverContext, None, None, None, None, None, None, None, None, None, None, None, None], 'Message': [Message_v_19, None, None, None, None, None, None, None, None, None, None, None, models.Message], 'Stats': [Stats_v_14, None, None, None, None, None, None, Stats_v_16, None, models.Stats, None, None, None], 'ApplicationData': [models.ApplicationData, None, None, None, None, None, None, None, None, None, None, None, None], 'Field': [models.Field, None, None, None, None, None, None, None, None, None, None, None, None], 'FieldOption': [models.FieldOption, None, None, None, None, None, None, None, None, None, None, None, None], 'FieldField': [models.FieldField, None, None, None, None, None, None, None, None, None, None, None, None], 'Step': [models.Step, None, None, None, None, None, None, None, None, None, None, None, None], 'StepField': [models.StepField, None, None, None, None, None, None, None, None, None, None, None, None], 'Anomalies': [models.Anomalies, None, None, None, None, None, None, None, None, None, None, None, None], 'EventLogs': [models.EventLogs, None, None, None, None, None, None, None, None, None, None, None, None], } for k, v in self.table_history.iteritems(): # +1 because count start from 0, # -8 because the relase befor the 8th are not supported anymore length = DATABASE_VERSION + 1 - 8 if len(v) != length: msg = 'Expecting a table with {} statuses ({})'.format(length, k) raise TypeError(msg) log.msg('{} Opening old DB: {}'.format(self.debug_info, old_db_file)) old_database = create_database('sqlite:' + self.old_db_file) self.store_old = Store(old_database) GLSetting.db_file = new_db_file new_database = create_database('sqlite:' + new_db_file) self.store_new = Store(new_database) if self.start_ver + 1 == DATABASE_VERSION: log.msg('{} Acquire SQL schema {}'.format(self.debug_info, GLSetting.db_schema_file)) if not os.access(GLSetting.db_schema_file, os.R_OK): log.msg('Unable to access', GLSetting.db_schema_file) raise IOError('Unable to access db schema file') with open(GLSetting.db_schema_file) as f: create_queries = ''.join(f).split(';') for create_query in create_queries: try: self.store_new.execute(create_query + ';') except OperationalError: log.msg('OperationalError in "{}"'.format(create_query)) self.store_new.commit() return # return here and manage the migrant versions here: for k, v in self.table_history.iteritems(): create_query = self.get_right_sql_version(k, self.start_ver + 1) if not create_query: # table not present in the version continue try: self.store_new.execute(create_query + ';') except OperationalError as excep: log.msg('{} OperationalError in [{}]'.format(self.debug_info, create_query)) raise excep self.store_new.commit() def close(self): self.store_old.close() self.store_new.close() def initialize(self): pass def epilogue(self): pass def get_right_model(self, table_name, version): table_index = (version - 8) if table_name not in self.table_history: msg = 'Not implemented usage of get_right_model {} ({} {})'.format( __file__, table_name, self.start_ver) raise NotImplementedError(msg) if version > DATABASE_VERSION: raise ValueError('Version supplied must be less or equal to {}'.format( DATABASE_VERSION)) if self.table_history[table_name][table_index]: return self.table_history[table_name][table_index] # else, it's none, and we've to take the previous valid version while version >= 0: if self.table_history[table_name][table_index]: return self.table_history[table_name][table_index] table_index -= 1 # This never want happen return None def get_right_sql_version(self, model_name, version): """ @param model_name: @param version: @return: The SQL right for the stuff we've """ modelobj = self.get_right_model(model_name, version) if not modelobj: return None right_query = generateCreateQuery(modelobj) return right_query def _perform_copy_list(self, table_name): models_count = self.store_old.find( self.get_right_model(table_name, self.start_ver) ).count() log.msg('{} default {} migration assistant: #{}'.format( self.debug_info, table_name, models_count)) old_objects = self.store_old.find(self.get_right_model(table_name, self.start_ver)) for old_obj in old_objects: new_obj = self.get_right_model(table_name, self.start_ver + 1)() # Storm internals simply reversed for _, v in new_obj._storm_columns.iteritems(): setattr(new_obj, v.name, getattr(old_obj, v.name)) self.store_new.add(new_obj) self.store_new.commit() def _perform_copy_single(self, table_name): log.msg('{} default {} migration assistant'.format(self.debug_info, table_name)) old_obj = self.store_old.find(self.get_right_model(table_name, self.start_ver)).one() new_obj = self.get_right_model(table_name, self.start_ver + 1)() # Storm internals simply reversed for _, v in new_obj._storm_columns.iteritems(): setattr(new_obj, v.name, getattr(old_obj, v.name)) self.store_new.add(new_obj) self.store_new.commit() def migrate_Context(self): self._perform_copy_list("Context") def migrate_Node(self): self._perform_copy_single("Node") def migrate_User(self): self._perform_copy_list("User") def migrate_ReceiverTip(self): self._perform_copy_list("ReceiverTip") def migrate_WhistleblowerTip(self): self._perform_copy_list("WhistleblowerTip") def migrate_Comment(self): self._perform_copy_list("Comment") def migrate_InternalTip(self): self._perform_copy_list("InternalTip") def migrate_Receiver(self): self._perform_copy_list("Receiver") def migrate_InternalFile(self): self._perform_copy_list("InternalFile") def migrate_ReceiverFile(self): self._perform_copy_list("ReceiverFile") def migrate_Notification(self): self._perform_copy_single("Notification") def migrate_ReceiverContext(self): self._perform_copy_list("ReceiverContext") def migrate_ReceiverInternalTip(self): self._perform_copy_list("ReceiverInternalTip") def migrate_Message(self): """ has been created between 7 and 8! """ if self.start_ver < 8: return self._perform_copy_list("Message") def migrate_Stats(self): """ has been created between 14 and 15 and is not migrated since 17 """ if self.start_ver < 17: return self._perform_copy_list("Stats") def migrate_ApplicationData(self): """ There is no need to migrate it the application data. Default application data is loaded by the application and stored onto the db at each new start. """ return def migrate_Field(self): """ has been created between 14 and 15! """ if self.start_ver < 15: return self._perform_copy_list("Field") def migrate_FieldOption(self): """ has been created between 14 and 15! """ if self.start_ver < 15: return self._perform_copy_list("FieldOption") def migrate_FieldField(self): """ has been created between 14 and 15! """ if self.start_ver < 15: return self._perform_copy_list("FieldField") def migrate_Step(self): """ has been created between 14 and 15! """ if self.start_ver < 15: return self._perform_copy_list("Step") def migrate_StepField(self): """ has been created between 14 and 15! """ if self.start_ver < 15: return self._perform_copy_list("StepField") def migrate_Anomalies(self): """ has been created between 14 and 15! """ if self.start_ver < 15: return self._perform_copy_list("Anomalies") def migrate_EventLogs(self): """ has been created between 15 and 16! should be dropped befor 20 """ if self.start_ver < 20: return self._perform_copy_list("EventLogs")
class TableReplacer: """ This is the base class used by every Updater """ def __init__(self, old_db_file, new_db_file, start_ver): from globaleaks.db.update_5_6 import User_version_5, Comment_version_5, Node_version_5 from globaleaks.db.update_6_7 import Node_version_6, Context_version_6 from globaleaks.db.update_7_8 import Node_version_7, Notification_version_7, Context_version_7, \ Receiver_version_7, InternalFile_version_7 from globaleaks.db.update_8_9 import Context_version_8, Receiver_version_8, Notification_version_8 from globaleaks.db.update_9_10 import Node_version_9, ApplicationData_version_10, \ Receiver_version_9, User_version_9 from globaleaks.db.update_10_11 import InternalTip_version_10, InternalFile_version_10 from globaleaks.db.update_11_12 import Node_version_11, ApplicationData_version_11, Context_version_11 self.old_db_file = old_db_file self.new_db_file = new_db_file self.start_ver = start_ver self.std_fancy = " ł " self.debug_info = " [%d => %d] " % (start_ver, start_ver + 1) self.table_history = { 'Node' : [ Node_version_5, Node_version_6, Node_version_7, Node_version_9, None, Node_version_11, None, models.Node], 'User' : [ User_version_5, User_version_9, None, None, None, models.User, None, None], 'Context' : [ Context_version_6, None, Context_version_7, Context_version_8, Context_version_11, None, None, models.Context], 'Receiver': [ Receiver_version_7, None, None, Receiver_version_8, Receiver_version_9, models.Receiver, None, None], 'ReceiverFile' : [ models.ReceiverFile, None, None, None, None, None, None, None], 'Notification': [ Notification_version_7, None, None, Notification_version_8, models.Notification, None, None, None], 'Comment': [ Comment_version_5, models.Comment, None, None, None, None, None, None], 'InternalTip' : [ InternalTip_version_10, None, None, None, None, None, models.InternalTip, None], 'InternalFile' : [ InternalFile_version_7, None, None, InternalFile_version_10, None, None, models.InternalFile, None], 'WhistleblowerTip' : [ models.WhistleblowerTip, None, None, None, None, None, None, None], 'ReceiverTip' : [ models.ReceiverTip, None, None, None, None, None, None , None], 'ReceiverInternalTip' : [ models.ReceiverInternalTip, None, None, None, None, None, None, None], 'ReceiverContext' : [ models.ReceiverContext, None, None, None, None, None, None, None], 'Message' : [ models.Message, None, None, None, None, None, None, None], 'Stats' : [models.Stats, None, None, None, None, None, None, None], 'ApplicationData' : [ApplicationData_version_10, None, None, None, None, None, None, models.ApplicationData], } for k, v in self.table_history.iteritems(): # +1 because count start from 0, # -5 because the relase 0,1,2,3,4 are not supported anymore assert len(v) == (DATABASE_VERSION + 1 - 5), \ "I'm expecting a table with %d statuses (%s)" % (DATABASE_VERSION, k) print "%s Opening old DB: %s" % (self.debug_info, old_db_file) old_database = create_database("sqlite:%s" % self.old_db_file) self.store_old = Store(old_database) GLSetting.db_file = new_db_file new_database = create_database("sqlite:%s" % new_db_file) self.store_new = Store(new_database) if self.start_ver + 1 == DATABASE_VERSION: print "%s Acquire SQL schema %s" % (self.debug_info, GLSetting.db_schema_file) if not os.access(GLSetting.db_schema_file, os.R_OK): print "Unable to access %s" % GLSetting.db_schema_file raise Exception("Unable to access db schema file") with open(GLSetting.db_schema_file) as f: create_queries = ''.join(f.readlines()).split(';') for create_query in create_queries: try: self.store_new.execute(create_query+';') except OperationalError: print "OperationalError in [%s]" % create_query self.store_new.commit() return # return here and manage the migrant versions here: for k, v in self.table_history.iteritems(): create_query = self.get_right_sql_version(k, self.start_ver +1) if not create_query: # table not present in the version continue try: self.store_new.execute(create_query+';') except OperationalError as excep: print "%s OperationalError in [%s]" % (self.debug_info, create_query) raise excep self.store_new.commit() def close(self): self.store_old.close() self.store_new.close() def initialize(self): pass def epilogue(self): pass def get_right_model(self, table_name, version): table_index = (version - 5) if not self.table_history.has_key(table_name): print "Not implemented usage of get_right_model %s (%s %d)" % ( __file__, table_name, self.start_ver) raise NotImplementedError assert version <= DATABASE_VERSION, "wrong developer brainsync" if self.table_history[table_name][table_index]: # print "Immediate return %s = %s at version %d" % \ # ( table_name, self.table_history[table_name][table_index], version ) return self.table_history[table_name][table_index] # else, it's none, and we've to take the previous valid version # # print "Requested version %d of %s need to be collected in the past" %\ # (version, table_name) while version >= 0: if self.table_history[table_name][table_index]: # print ".. returning %s = %s" %\ # ( table_name, self.table_history[table_name][table_index] ) return self.table_history[table_name][table_index] table_index -= 1 # This never want happen return None def get_right_sql_version(self, model_name, version): """ @param model_name: @param version: @return: The SQL right for the stuff we've """ modelobj = self.get_right_model(model_name, version) if not modelobj: return None right_query = generateCreateQuery(modelobj) return right_query def _perform_copy_list(self, table_name): print "%s default %s migration assistant: #%d" % ( self.debug_info, table_name, self.store_old.find(self.get_right_model(table_name, self.start_ver)).count()) old_objects = self.store_old.find(self.get_right_model(table_name, self.start_ver)) for old_obj in old_objects: new_obj = self.get_right_model(table_name, self.start_ver + 1)() # Storm internals simply reversed for k, v in new_obj._storm_columns.iteritems(): setattr(new_obj, v.name, getattr(old_obj, v.name) ) self.store_new.add(new_obj) self.store_new.commit() def _perform_copy_single(self, table_name): print "%s default %s migration assistant" % (self.debug_info, table_name) old_obj = self.store_old.find(self.get_right_model(table_name, self.start_ver)).one() new_obj = self.get_right_model(table_name, self.start_ver + 1)() # Storm internals simply reversed for k, v in new_obj._storm_columns.iteritems(): setattr(new_obj, v.name, getattr(old_obj, v.name) ) self.store_new.add(new_obj) self.store_new.commit() def migrate_Context(self): self._perform_copy_list("Context") def migrate_Node(self): self._perform_copy_single("Node") def migrate_User(self): self._perform_copy_list("User") def migrate_ReceiverTip(self): self._perform_copy_list("ReceiverTip") def migrate_WhistleblowerTip(self): self._perform_copy_list("WhistleblowerTip") def migrate_Comment(self): self._perform_copy_list("Comment") def migrate_InternalTip(self): self._perform_copy_list("InternalTip") def migrate_Receiver(self): self._perform_copy_list("Receiver") def migrate_InternalFile(self): self._perform_copy_list("InternalFile") def migrate_ReceiverFile(self): self._perform_copy_list("ReceiverFile") def migrate_Notification(self): self._perform_copy_single("Notification") def migrate_ReceiverContext(self): self._perform_copy_list("ReceiverContext") def migrate_ReceiverInternalTip(self): self._perform_copy_list("ReceiverInternalTip") def migrate_Message(self): """ has been created between 7 and 8! """ if self.start_ver < 8: return self._perform_copy_list("Message") def migrate_Stats(self): """ has been created between 9 and 10! """ if self.start_ver < 10: return self._perform_copy_list("Stats") def migrate_ApplicationData(self): """ has been created between 9 and 10! """ if self.start_ver < 10: return self._perform_copy_list("ApplicationData")
from storm.locals import Store, Int, Unicode, create_database conn = Store(create_database("postgres://*****:*****@127.0.0.1/test")) class Person(object): __storm_table__ = 'person' id = Int(primary=True) name = Unicode() person = Person() person.name = 'admin' conn.add(person) conn.flush() conn.find(Person, Person.name == 'admin').one()
class IssuesLog(): def __init__(self): self._connect() # it is not incremental so we first drop the table self._drop_db() self._create_db() def _connect(self): opts = Config() self.database = create_database('mysql://' + opts.db_user_out + ':' + opts.db_password_out + '@' + opts.db_hostname_out + ':' + opts.db_port_out + '/' + opts.db_database_out) self.store = Store(self.database) def _create_db(self): self.store.execute(self._get_sql_create()) def _drop_db(self): self.store.execute(self._get_sql_drop()) def _get_people_id(self, email): """ Gets the id of an user """ try: p = self.store.find(DBPeople, DBPeople.email == email).one() return p.id except (AttributeError, NotOneError): p = self.store.find(DBPeople, DBPeople.user_id == email).one() try: return p.id except AttributeError: # no person was found in People with the email above, so # we include it printdbg("Person not found. Inserted with email %s " % (email)) dp = DBPeople(email) self.store.add(dp) self.store.commit() return dp.id def _get_sql_drop(self): """ Abstract method for inserting extra data related to a change """ raise NotImplementedError def _get_sql_create(self): """ Abstract method for inserting extra data related to a change """ raise NotImplementedError def _get_tracker_id(self, issue_id): """ Returns tracker id from issues """ result = self.store.find(DBIssue.tracker_id, DBIssue.id == issue_id).one() return result def _copy_issue_ext(self, aux, db_ilog): """ Abstract method for inserting extra data related to a change """ raise NotImplementedError # TODO: reuse _copy_standard_values def _copy_issue(self, db_ilog): """ This method returns a copy of the DB*Log object """ aux = self._get_dbissues_object(db_ilog.issue, db_ilog.tracker_id) aux.issue_id = db_ilog.issue_id aux.change_id = db_ilog.change_id aux.changed_by = db_ilog.changed_by aux.type = db_ilog.type aux.summary = db_ilog.summary aux.description = db_ilog.description aux.status = db_ilog.status aux.resolution = db_ilog.resolution aux.priority = db_ilog.priority aux.submitted_by = db_ilog.submitted_by aux.date = db_ilog.date aux.assigned_to = db_ilog.assigned_to aux = self._copy_issue_ext(aux, db_ilog) return aux def _assign_values(self, db_ilog, field, value): """ Abstract method for inserting extra data related to a change """ raise NotImplementedError def _build_initial_state(self, db_ilog): """ This method gets the first changes of every field in order to get the initial state of the bug """ fields = self.store.execute("SELECT DISTINCT(field) FROM changes " + "WHERE issue_id=%s" % (db_ilog.issue_id)) for f in fields: values = self.store.execute( "SELECT old_value FROM changes WHERE issue_id=%s AND \ field=\"%s\" ORDER BY changed_on LIMIT 1" % (db_ilog.issue_id, f[0])) for v in values: db_ilog = self._assign_values(db_ilog, f[0], v[0]) return db_ilog def _get_dbissues_object(self, issue_name, tracker_id): """ Abstract method for inserting extra data related to a change """ raise NotImplementedError def _copy_standard_values(self, issue, issue_log): """ Copy the standard values from the issue object to the issue_log object """ issue_log.issue_id = issue.id issue_log.type = issue.type issue_log.summary = issue.summary issue_log.description = issue.description issue_log.status = issue.status issue_log.resolution = issue.resolution issue_log.priority = issue.priority issue_log.submitted_by = issue.submitted_by issue_log.date = issue.submitted_on issue_log.assigned_to = issue.assigned_to return issue_log def _print_final_msg(self): """ Abstract method for inserting extra data related to a change """ raise NotImplementedError def _get_changes(self, issue_id): aux = self.store.execute("SELECT id, field, new_value, changed_by, \ changed_on FROM changes where issue_id=%s" % (issue_id)) return aux def _post_history(self, issue_id): """ Abstract method for inserting extra data usign full issue history """ pass def run(self): ndone = 0 issues = self.store.find(DBIssue) total = str(issues.count()) print ("[IssuesLog] Total issues to analyze: " + str(issues.count())) for i in issues: if (ndone % 1000 == 0): print ("[IssuesLog] Analyzed " + str(ndone) + "/" + str(total)) db_ilog = self._get_dbissues_object(i.issue, i.tracker_id) db_ilog = self._copy_standard_values(i, db_ilog) final_status = db_ilog.status db_ilog = self._build_initial_state(db_ilog) self.store.add(db_ilog) self.store.flush() # the code below gets all the changes and insert a row per change changes = self._get_changes(db_ilog.issue_id) for ch in changes: change_id = ch[0] field = ch[1] new_value = ch[2] changed_by = ch[3] date = ch[4] # we need a new object to be inserted in the database db_ilog = self._copy_issue(db_ilog) db_ilog.date = date db_ilog.change_id = change_id db_ilog.changed_by = changed_by db_ilog = self._assign_values(db_ilog, field, new_value) try: self.store.add(db_ilog) self.store.flush() except: # self.store.rollback() # is this useful in this context? traceback.print_exc() self._post_history(db_ilog, final_status) self.store.commit() ndone += 1 self._print_final_msg()
class StormManager(Singleton): log = logging.getLogger('{}.StormManager'.format(__name__)) def __init__(self): pass @loggingInfo def init(self, *args): self.dbOK = False self.openDB() @loggingInfo def reset(self): self.closeDB() self.openDB() @loggingInfo def openDB(self): try: self._config = ConfigManager() self.db = self._config.config[self._config.database]["database"] create_db = False if self.db == self._config.Sqlite: folder = self._config.config[self._config.database]["folder"] loc = folder + '/icepapcms.db' print("Using Sqlite database at %s" % loc) create_db = not os.path.exists(loc) if create_db: print("No database file found, creating it") if not os.path.exists(folder): os.mkdir(folder) self._database = create_database("%s:%s" % (self.db, loc)) else: server = self._config.config[self._config.database]["server"] user = self._config.config[self._config.database]["user"] pwd = self._config.config[self._config.database]["password"] scheme = "{}://{}:{}@{}/icepapcms".format( self.db, user, pwd, server) if self.db == 'mysql': self._database = MySQL(scheme) else: self._database = create_database(scheme) self._store = Store(self._database) if create_db: self.dbOK = self.createSqliteDB() else: self.dbOK = True except Exception as e: self.log.error("Unexpected error on openDB: %s", e) self.dbOK = False @loggingInfo def createSqliteDB(self): try: sql_file = resource_filename('icepapcms.db', 'creates_sqlite.sql') with open(sql_file) as f: sql_script = f.read() statements = re.compile(r";[ \t]*$", re.M) for statement in statements.split(sql_script): # Remove any comments from the file statement = re.sub(r"--.*[\n\\Z]", "", statement) if statement.strip(): create = statement + ";" self._store.execute(create) self._store.commit() return True except Exception as e: self.log.error("Unexpected error on createSqliteDB: %s", e) return False @loggingInfo def closeDB(self): try: if self.dbOK: self._store.close() return True except Exception as e: self.log.error("Unexpected error on closeDB:", e) self.dbOK = False return False @loggingInfo def store(self, obj): self._store.add(obj) @loggingInfo def remove(self, obj): self._store.remove(obj) @loggingInfo def addIcepapSystem(self, icepap_system): try: self._store.add(icepap_system) self.commitTransaction() return True except Exception as e: self.log.error( "some exception trying to store the icepap system " "%s: %s", icepap_system, e) return False @loggingInfo def deleteLocation(self, location): if self.db == self._config.Sqlite: for system in location.systems: self.deleteIcepapSystem(system) self._store.remove(location) self.commitTransaction() @loggingInfo def deleteIcepapSystem(self, icepap_system): if self.db == self._config.Sqlite: for driver in icepap_system.drivers: self.deleteDriver(driver) self._store.remove(icepap_system) self.commitTransaction() @loggingInfo def deleteDriver(self, driver): for cfg in driver.historic_cfgs: for par in cfg.parameters: self._store.remove(par) self._store.remove(cfg) self._store.remove(driver) self.commitTransaction() @loggingInfo def getAllLocations(self): try: locations = self._store.find(Location) location_dict = {} for location in locations: location_dict[location.name] = location return location_dict except Exception as e: self.log.error("Unexpected error on getAllLocations: %s", e) return {} @loggingInfo def getLocation(self, name): return self._store.get(Location, name) @loggingInfo def getIcepapSystem(self, icepap_name): return self._store.get(IcepapSystem, icepap_name) @loggingInfo def existsDriver(self, mydriver, id): drivers = self._store.find( IcepapDriver, IcepapDriver.addr == IcepapDriverCfg.driver_addr, IcepapDriverCfg.id == CfgParameter.cfg_id, CfgParameter.name == str("ID"), CfgParameter.value == id) if drivers: for driver in drivers: if driver.addr != mydriver.addr: return driver return None else: return None @loggingInfo def getLocationIcepapSystem(self, location): try: icepaps = self._store.find(IcepapSystem, IcepapSystem.location_name == location) icepaps.order_by(IcepapSystem.name) ipapdict = {} for ipap_sys in icepaps: ipapdict[ipap_sys.name] = ipap_sys return ipapdict except Exception as e: self.log.error( "Unexpected error on getLocationIcepapSystem: " "%s", e) return {} @loggingInfo def rollback(self): self._store.rollback() @loggingInfo def commitTransaction(self): try: self._store.commit() return True except Exception: return False