def setup_tables(self): # we check for old, incompatible table names # and fix them before calling our regular setup stuff debug('setup_tables called!', 1) self.move_old_tables() # set up our top id # This is unique to metakit and not part of the normal setup_tables routine # since other DBs will presumably have auto-increment built into them. self.increment_vw = self.db.getas('incrementer[view:S,field:S,n:I]') self.vw_to_name = {} self.increment_dict = {} #self.top_id_vw.append({'id':1}) #self.top_id_row = self.top_id_vw[0] rdatabase.RecData.setup_tables(self) # If we've dumped our data, we want to re-import it! if self.import_when_done: old_db, ifi = self.import_when_done from gourmet.importers.gxml2_importer import converter converter( ifi, self, threaded=False, progress=lambda p, m: self.pd.set_progress(p * 0.5 + 0.5, m)) n = 0 tot = 3 for tabl, desc in [('shopcats_table', self.SHOPCATS_TABLE_DESC), ('shopcatsorder_table', self.SHOPCATSORDER_TABLE_DESC), ('pantry_table', self.PANTRY_TABLE_DESC)]: self.copy_table(old_db, tabl, table_cols=[i[0] for i in desc[1]], prog=lambda p, m: self.pd.set_progress( p / tot + (n * p / tot), m), convert_pickles=True) n += 1 self.pd.set_progress(1.0, 'Database successfully converted!') del old_db
def setup_tables (self): # set up our top id # This is unique to metakit and not part of the normal setup_tables routine # since other DBs will presumably have auto-increment built into them. self.increment_vw = self.db.getas('incrementer[view:S,field:S,n:I]') self.increment_vw = self.increment_vw.ordered() #ordered vw self.vw_to_name = {} # we check for old, incompatible table names # and fix them before calling our regular setup stuff debug('setup_tables called!',3) self.move_old_tables() debug('Setup tables',3) rdatabase.RecData.setup_tables(self) # If we've dumped our data, we want to re-import it! if self.import_when_done: debug('Do import of old recipes',3) old_db,ifi = self.import_when_done from gourmet.importers.gxml2_importer import converter converter( ifi, self, threaded=False, progress=lambda p,m: self.pd.set_progress(p*0.5+0.5,m) ) n = 0 tot = 3 for tabl,desc in [('shopcats_table',self.SHOPCATS_TABLE_DESC), ('shopcatsorder_table',self.SHOPCATSORDER_TABLE_DESC), ('pantry_table',self.PANTRY_TABLE_DESC)]: self.copy_table( old_db, tabl, table_cols=[i[0] for i in desc[1]], prog=lambda p,m: self.pd.set_progress(p/tot+(n*p/tot),m), convert_pickles=True ) n+=1 self.pd.set_progress(1.0,'Database successfully converted!') debug('Delete reference to old database',3) del old_db
def _move_row (self, table, old, new, converter=None): """Move data from old (propertyname, type) to new (propertyname, type). This is designed for backwards compatability (to allow for other database backends).""" debug('_move_row(table=%(table)s old=%(old)s new=%(new)s converter=%(converter)s'%locals(),1) if not hasattr(self.contentview[0],table) or not hasattr(getattr(self.contentview[0],table),old[0]): debug('Old property %s doesn\'t exist'%old[0],9) return tmpantry_table = self.setup_table(table, [new,old]) vw = tmpantry_table.filter(lambda x: getattr(x,old[0])) to_move_vw = tmpantry_table.remapwith(vw) to_move = len(to_move_vw) if to_move > 0: self._backup_database() for r in to_move_vw: if converter: setattr(r,new[0],converter(getattr(r,old[0]))) else: setattr(r,new[0],getattr(r,old[0])) setattr(r,old[0],None)