def update_user_tags(self, uid, tags=[], quantity=10): old_tags = map(lambda obj: obj[1].content, self.get_user_tags(uid, quantity=quantity)) removes = list(set(old_tags).difference(tags)) adds = list(set(tags).difference(old_tags)) """ remove tags """ if removes: remove_tags = ( object_session(self) .query(TagMark, TagContent) .select_from(outerjoin(TagMark, TagContent, TagMark.tag_id == TagContent.id)) .filter( and_( TagMark.object_type_id == self.object_type_id, TagMark.object_pk == self.id, TagMark.user_id == uid, TagContent.content.in_(removes), ) ) .all() ) for tm, tc in remove_tags: object_session(self).delete(tm) tc.minus_references() """ add tags """ if adds: for tag in adds: tm = TagMark(object_type_id=self.object_type_id, object_pk=self.id, user_id=uid) tm.mark_tag(object_session(self), tag) object_session(self).add(tm)
def update_user_tags(self, uid, tags=[], quantity=10): old_tags = map( lambda obj: obj[1].content, self.get_user_tags(uid, quantity=quantity), ) removes = list(set(old_tags).difference(tags)) adds = list(set(tags).difference(old_tags)) ''' remove tags ''' if removes: remove_tags = object_session(self).query(TagMark, TagContent)\ .select_from( outerjoin(TagMark, TagContent, TagMark.tag_id==TagContent.id), ).filter(and_( TagMark.object_type_id == self.object_type_id, TagMark.object_pk == self.id, TagMark.user_id == uid, TagContent.content.in_(removes), )).all() for tm, tc in remove_tags: object_session(self).delete(tm) tc.minus_references() ''' add tags ''' if adds: for tag in adds: tm = TagMark( object_type_id=self.object_type_id, object_pk=self.id, user_id=uid, ) tm.mark_tag(object_session(self), tag) object_session(self).add(tm)
def gridData(self): columns=self.rptCfg.uicolumns idName=self.rptCfg.getId() results = [] reqs = DBSession().query(Request).select_from(outerjoin(Request,RequestItem)) childrenDisp = [] dispense = reqs.filter(RequestItem.request_item_dispense_date == None).order_by(Request.request_id).all() for row in dispense: # if row.hasOpenItems() or len(row.items)==0: rowData = [] for col in columns: val = getattr(row,col.id,None) if isinstance(val,datetime): val = val.strftime('%m/%d/%Y') rowData.append(val) childrenDisp.append({'id':row.__dict__[idName], 'data':rowData}) childrenComplete = [] complete = reqs.filter(RequestItem.request_item_dispense_date > (datetime.now()-timedelta(days=1))).order_by(Request.request_id).all() for row in complete: if not row.hasOpenItems(): rowData = [] for col in columns: val = getattr(row,col.id,None) if isinstance(val,datetime): val = val.strftime('%m/%d/%Y') rowData.append(val) childrenComplete.append({'id':row.__dict__[idName], 'data':rowData}) results.append(dict(id='open',data=['Open Requests'],children=childrenDisp)) results.append(dict(id='closed',data=['Recently Closed'],children=childrenComplete)) results.append(dict(id='search',data=['Search'])) return results
def get(self, database, query): bs = block_size.c.block_size stmt = powa_getstatdata_detailed_db() stmt = stmt.where( (column("datname") == bindparam("database")) & (column("queryid") == bindparam("query"))) stmt = stmt.alias() from_clause = outerjoin(powa_statements, stmt, and_(powa_statements.c.queryid == stmt.c.queryid, powa_statements.c.dbid == stmt.c.dbid)) c = stmt.c rblk = mulblock(sum(c.shared_blks_read).label("shared_blks_read")) wblk = mulblock(sum(c.shared_blks_hit).label("shared_blks_hit")) stmt = (select([ column("query"), sum(c.calls).label("calls"), sum(c.runtime).label("runtime"), rblk, wblk, (rblk + wblk).label("total_blks")]) .select_from(from_clause) .where(powa_statements.c.queryid == bindparam("query")) .group_by(column("query"), bs)) value = self.execute(stmt, params={ "query": query, "database": database, "from": self.get_argument("from"), "to": self.get_argument("to") }) if value.rowcount < 1: self.render("xhr.html", content="No data") return self.render("database/query/detail.html", stats=value.first())
def get_user_tags(self, uid, quantity=10): return object_session(self).query(TagMark, TagContent)\ .select_from( outerjoin(TagMark, TagContent, TagMark.tag_id==TagContent.id), ).filter(and_( TagMark.object_type_id == self.object_type_id, TagMark.object_pk == self.id, TagMark.user_id == uid, ))[0:quantity]
def get_user_tags(self, uid, quantity=10): return ( object_session(self) .query(TagMark, TagContent) .select_from(outerjoin(TagMark, TagContent, TagMark.tag_id == TagContent.id)) .filter( and_( TagMark.object_type_id == self.object_type_id, TagMark.object_pk == self.id, TagMark.user_id == uid ) )[0:quantity] )
def test_filter_ref_select(self, Teacher, Student, teacher_to_student): teacher = Teacher.filter( exists([1]).select_from( outerjoin(Student, teacher_to_student) ).where( and_( Student.name == self.student1.name, teacher_to_student.c.teacher_id == Teacher.id ) ).correlate_except(Student) ).all() assert len(teacher) == 1 assert self.get(teacher, 'id', self.teacher1.id) is not None
class RoomStats(Base): # http://fulmicoton.com/posts/bayesian_rating/ C = 0.5 # closeness function M = 5.5 # mean rating __table__ = select([ Room.id.label('room_id'), func.count(func.distinct( Occupancy.resident_id)).label('resident_count'), func.count(Review.id).label('review_count'), func.count(Review.rating).label('rating_count'), ((M * C + func.sum(Review.rating)) / (C + func.count(Review.rating))).label('adjusted_rating') ]).select_from( outerjoin(Room, RoomListing).outerjoin(Occupancy).outerjoin( Review)).where((Review.id == None) | (Review.is_newest & ~Review.hidden)).group_by( Room.id).alias(name='room_stats')
def get_monthly_totals(self, start, end, paid=None): """ Return a list of categories and totals for the given month """ records = [] paid = bool(paid) if paid in (0,1) else None try: session = self.dal.Session() # records is a tuple of Category.name and total as type Decimal q = session.query(Category.name, func.sum(Bill.amount)).select_from(outerjoin(Bill, Category)).filter(Bill.dueDate >= start).filter(Bill.dueDate <= end).group_by(Category.name) if paid is not None: q = q.filter(Bill.paid == paid) records = q.all() except Exception, e: print str(e) pass
def get_monthly_totals(self, start, end, paid=None): """ Return a list of categories and totals for the given month """ records = [] paid = bool(paid) if paid in (0, 1) else None try: session = self.dal.Session() # records is a tuple of Category.name and total as type Decimal q = session.query( Category.name, func.sum(Bill.amount)).select_from( outerjoin(Bill, Category)).filter(Bill.dueDate >= start).filter( Bill.dueDate <= end).group_by(Category.name) if paid is not None: q = q.filter(Bill.paid == paid) records = q.all() except Exception, e: print str(e) pass
def get_all_study_summaries(self) -> List[StudySummary]: session = self.scoped_session() summarized_trial = (session.query( models.TrialModel.study_id, functions.min( models.TrialModel.datetime_start).label("datetime_start"), functions.count(models.TrialModel.trial_id).label("n_trial"), ).group_by(models.TrialModel.study_id).with_labels().subquery()) study_summary_stmt = session.query( models.StudyModel.study_id, models.StudyModel.study_name, models.StudyModel.direction, summarized_trial.c.datetime_start, functions.coalesce(summarized_trial.c.n_trial, 0).label("n_trial"), ).select_from(orm.outerjoin(models.StudyModel, summarized_trial)) study_summary = study_summary_stmt.all() study_summaries = [] for study in study_summary: best_trial: Optional[models.TrialModel] = None try: if study.direction == StudyDirection.MAXIMIZE: best_trial = models.TrialModel.find_max_value_trial( study.study_id, session) else: best_trial = models.TrialModel.find_min_value_trial( study.study_id, session) except ValueError: best_trial_frozen: Optional[FrozenTrial] = None if best_trial: params = (session.query( models.TrialParamModel.param_name, models.TrialParamModel.param_value, models.TrialParamModel.distribution_json, ).filter(models.TrialParamModel.trial_id == best_trial.trial_id).all()) param_dict = {} param_distributions = {} for param in params: distribution = distributions.json_to_distribution( param.distribution_json) param_dict[ param.param_name] = distribution.to_external_repr( param.param_value) param_distributions[param.param_name] = distribution user_attrs = session.query( models.TrialUserAttributeModel).filter( models.TrialUserAttributeModel.trial_id == best_trial.trial_id) system_attrs = session.query( models.TrialSystemAttributeModel).filter( models.TrialSystemAttributeModel.trial_id == best_trial.trial_id) intermediate = session.query(models.TrialValueModel).filter( models.TrialValueModel.trial_id == best_trial.trial_id) best_trial_frozen = FrozenTrial( best_trial.number, TrialState.COMPLETE, best_trial.value, best_trial.datetime_start, best_trial.datetime_complete, param_dict, param_distributions, {i.key: json.loads(i.value_json) for i in user_attrs}, {i.key: json.loads(i.value_json) for i in system_attrs}, {value.step: value.value for value in intermediate}, best_trial.trial_id, ) user_attrs = session.query(models.StudyUserAttributeModel).filter( models.StudyUserAttributeModel.study_id == study.study_id) system_attrs = session.query( models.StudySystemAttributeModel).filter( models.StudySystemAttributeModel.study_id == study.study_id) study_summaries.append( StudySummary( study_name=study.study_name, direction=study.direction, best_trial=best_trial_frozen, user_attrs={ i.key: json.loads(i.value_json) for i in user_attrs }, system_attrs={ i.key: json.loads(i.value_json) for i in system_attrs }, n_trials=study.n_trial, datetime_start=study.datetime_start, study_id=study.study_id, )) # Terminate transaction explicitly to avoid connection timeout during transaction. self._commit(session) return study_summaries
def get_all_study_summaries(self) -> List[StudySummary]: with _create_scoped_session(self.scoped_session) as session: summarized_trial = ( session.query( models.TrialModel.study_id, functions.min(models.TrialModel.datetime_start).label("datetime_start"), functions.count(models.TrialModel.trial_id).label("n_trial"), ) .group_by(models.TrialModel.study_id) .with_labels() .subquery() ) study_summary_stmt = session.query( models.StudyModel.study_id, models.StudyModel.study_name, summarized_trial.c.datetime_start, functions.coalesce(summarized_trial.c.n_trial, 0).label("n_trial"), ).select_from(orm.outerjoin(models.StudyModel, summarized_trial)) study_summary = study_summary_stmt.all() _directions = defaultdict(list) for d in session.query(models.StudyDirectionModel).all(): _directions[d.study_id].append(d.direction) _user_attrs = defaultdict(list) for a in session.query(models.StudyUserAttributeModel).all(): _user_attrs[d.study_id].append(a) _system_attrs = defaultdict(list) for a in session.query(models.StudySystemAttributeModel).all(): _system_attrs[d.study_id].append(a) study_summaries = [] for study in study_summary: directions = _directions[study.study_id] best_trial: Optional[models.TrialModel] = None try: if len(directions) > 1: raise ValueError elif directions[0] == StudyDirection.MAXIMIZE: best_trial = models.TrialModel.find_max_value_trial( study.study_id, 0, session ) else: best_trial = models.TrialModel.find_min_value_trial( study.study_id, 0, session ) except ValueError: best_trial_frozen: Optional[FrozenTrial] = None if best_trial: value = models.TrialValueModel.find_by_trial_and_objective( best_trial, 0, session ) assert value params = ( session.query( models.TrialParamModel.param_name, models.TrialParamModel.param_value, models.TrialParamModel.distribution_json, ) .filter(models.TrialParamModel.trial_id == best_trial.trial_id) .all() ) param_dict = {} param_distributions = {} for param in params: distribution = distributions.json_to_distribution(param.distribution_json) param_dict[param.param_name] = distribution.to_external_repr( param.param_value ) param_distributions[param.param_name] = distribution user_attrs = models.TrialUserAttributeModel.where_trial_id( best_trial.trial_id, session ) system_attrs = models.TrialSystemAttributeModel.where_trial_id( best_trial.trial_id, session ) intermediate = models.TrialIntermediateValueModel.where_trial_id( best_trial.trial_id, session ) best_trial_frozen = FrozenTrial( best_trial.number, TrialState.COMPLETE, value.value, best_trial.datetime_start, best_trial.datetime_complete, param_dict, param_distributions, {i.key: json.loads(i.value_json) for i in user_attrs}, {i.key: json.loads(i.value_json) for i in system_attrs}, {value.step: value.intermediate_value for value in intermediate}, best_trial.trial_id, ) user_attrs = _user_attrs.get(study.study_id, []) system_attrs = _system_attrs.get(study.study_id, []) study_summaries.append( StudySummary( study_name=study.study_name, direction=None, directions=directions, best_trial=best_trial_frozen, user_attrs={i.key: json.loads(i.value_json) for i in user_attrs}, system_attrs={i.key: json.loads(i.value_json) for i in system_attrs}, n_trials=study.n_trial, datetime_start=study.datetime_start, study_id=study.study_id, ) ) return study_summaries
def query_data(session, agency_code, period, year): """ Request A file data Args: session: DB session agency_code: FREC or CGAC code for generation period: The period for which to get GTAS data year: The year for which to get GTAS data Returns: The rows using the provided dates for the given agency. """ # set a boolean to determine if the original agency code is frec or cgac frec_provided = len(agency_code) == 4 tas_gtas = tas_gtas_combo(session, period, year) # Make a list of FRECs to compare to for 011 AID entries frec_list = [] if not frec_provided: frec_list = session.query(FREC.frec_code).select_from(outerjoin(CGAC, FREC, CGAC.cgac_id == FREC.cgac_id)).\ filter(CGAC.cgac_code == agency_code).all() # Group agencies together that need to be grouped agency_array = [] if agency_code == '097': agency_array = ['017', '021', '057', '097'] elif agency_code == '1601': agency_array = ['1601', '016'] elif agency_code == '1125': agency_array = ['1125', '011'] # Save the ATA filter agency_filters = [] if not agency_array: agency_filters.append(tas_gtas.c.allocation_transfer_agency == agency_code) else: agency_filters.append(tas_gtas.c.allocation_transfer_agency.in_(agency_array)) # Save the AID filter if agency_code == '097' and not frec_provided: agency_filters.append(and_(tas_gtas.c.allocation_transfer_agency.is_(None), tas_gtas.c.agency_identifier.in_(agency_array))) elif not frec_provided: agency_filters.append(and_(tas_gtas.c.allocation_transfer_agency.is_(None), tas_gtas.c.agency_identifier == agency_code)) else: agency_filters.append(and_(tas_gtas.c.allocation_transfer_agency.is_(None), tas_gtas.c.fr_entity_type == agency_code)) # If we're checking a CGAC, we want to filter on all of the related FRECs for AID 011, otherwise just filter on # that FREC if frec_list: agency_filters.append(and_(tas_gtas.c.allocation_transfer_agency.is_(None), tas_gtas.c.agency_identifier == '011', tas_gtas.c.fr_entity_type.in_(frec_list))) elif not frec_provided: agency_filters.append(and_(tas_gtas.c.allocation_transfer_agency.is_(None), tas_gtas.c.agency_identifier == '011', tas_gtas.c.fr_entity_type == agency_code)) rows = initial_query(session, tas_gtas.c).\ filter(func.coalesce(tas_gtas.c.financial_indicator2, '') != 'F').\ filter(or_(*agency_filters)).\ group_by(tas_gtas.c.allocation_transfer_agency, tas_gtas.c.agency_identifier, tas_gtas.c.beginning_period_of_availa, tas_gtas.c.ending_period_of_availabil, tas_gtas.c.availability_type_code, tas_gtas.c.main_account_code, tas_gtas.c.sub_account_code) return rows
return "<Occupancy(resident_id={!r}, listing={!r})>".format( self.resident_id, self.listing ) # finds the oldest occupancy for each (person, year) pair Occupancy2 = aliased(Occupancy) RoomListing2 = aliased(RoomListing) BallotSeason2 = aliased(BallotSeason) resident_year_to_first_occ_s = select([ Occupancy.id.label('occ1_id') ]).select_from( outerjoin( join(Occupancy, RoomListing).join(BallotSeason), join(Occupancy2, RoomListing2).join(BallotSeason2), (Occupancy.resident_id == Occupancy2.resident_id) & (BallotSeason.year == BallotSeason2.year) & (Occupancy.chosen_at > Occupancy2.chosen_at) ) ).where(Occupancy2.id == None).correlate(None).alias() # this occupancy was the first one Occupancy.is_first = column_property( exists().where(resident_year_to_first_occ_s.c.occ1_id==Occupancy.id) ) # a mapping of Occupancy to BallotSlot in which it was booked occ_to_slot_s = select([ Occupancy.id.label('occupancy_id'), BallotSlot.id.label('ballotslot_id') ]).select_from(
def __repr__(self): return "<Occupancy(resident_id={!r}, listing={!r})>".format( self.resident_id, self.listing) # finds the oldest occupancy for each (person, year) pair Occupancy2 = aliased(Occupancy) RoomListing2 = aliased(RoomListing) BallotSeason2 = aliased(BallotSeason) resident_year_to_first_occ_s = select( [Occupancy.id.label('occ1_id')]).select_from( outerjoin( join(Occupancy, RoomListing).join(BallotSeason), join(Occupancy2, RoomListing2).join(BallotSeason2), (Occupancy.resident_id == Occupancy2.resident_id) & (BallotSeason.year == BallotSeason2.year) & (Occupancy.chosen_at > Occupancy2.chosen_at))).where( Occupancy2.id == None).correlate(None).alias() # this occupancy was the first one Occupancy.is_first = column_property( exists().where(resident_year_to_first_occ_s.c.occ1_id == Occupancy.id)) # a mapping of Occupancy to BallotSlot in which it was booked occ_to_slot_s = select([ Occupancy.id.label('occupancy_id'), BallotSlot.id.label('ballotslot_id') ]).select_from( join(Occupancy, RoomListing).join(BallotSeason).join(BallotEvent).join( BallotSlot)).where((Occupancy.resident_id == BallotSlot.person_id)
def update(inputfile, outputfile): engine = create_engine('sqlite:///%s' % inputfile, echo=True) metadata = MetaData(bind=engine) outdb = Database(outputfile) outsession = outdb.session ledger = Table('ledger', metadata, Column('id', Integer, primary_key = True), Column('name', String), Column('type', Integer), ) subledger = Table('sub_ledger', metadata, Column('ledger', Integer), Column('name', String), Column('id', Integer, primary_key = True), Column('bed', Integer), Column('bes', Integer), ) moin = Table('moin', metadata, Column('sub_name', String), Column('ledger', Integer), Column('name', String), Column('sub', Integer), Column('number', Integer), Column('date', String), Column('des', String), Column('bed', Integer), Column('bes', Integer), Column('mondeh', Integer), Column('tashkhis', String), ) metadata.create_all() query = outsession.query(Subject.code).select_from(Subject).order_by(Subject.id.desc()) code = query.filter(Subject.parent_id == 0).first() if code == None : lastcode = 0 else : lastcode = int(code[0][-2:]) s = outerjoin(ledger, subledger, ledger.c.id == subledger.c.ledger).select().order_by(ledger.c.id) result = s.execute() parent_id = 0 pid = 0 sid = 0 pcode = "" mainids = {} #stores tuples like (oldid:newid) pairs to have old subject ids for later use subids = {} #stores tuples like (oldid:newid) pairs to have old subject ids for later use for row in result: if row[0] != parent_id: lastcode += 1 if lastcode > 99: logging.error("Ledgers with numbers greater than %d are not imported to the new database" \ "Because you can have just 99 Ledgers (level one subject)" % row[0]) break pcode = "%02d" % lastcode parentsub = Subject(pcode, row[1], 0, 2) childcode = 0 outsession.add(parentsub) if pid == 0: outsession.commit() sid = parentsub.id else: sid += 1 pid = sid parent_id = row[0] mainids[row[0]] = pid if row[3] != None: childcode += 1 if childcode > 99: logging.error("SubLedgers with number %d is not imported to the new database" \ "Because you can have just 99 subjects per level" % row[5]) continue childsub = Subject(pcode + "%02d" % childcode, row[4], pid, 2) outsession.add(childsub) sid += 1 subids[row[5]] = sid outsession.commit() s = moin.select().order_by(moin.c.number) result = s.execute() bnumber = 0 bid = 0 value = 0 cal = calverter.calverter() for row in result: if row.number != bnumber: fields = re.split(r"[:-]+",row.date) jd = cal.jalali_to_jd(int(fields[0]), int(fields[1]), int(fields[2])) (gyear, gmonth, gday) = cal.jd_to_gregorian(jd) ndate = date(gyear, gmonth, gday) bill = Bill(row.number, ndate, ndate, ndate) outsession.add(bill) if bid == 0: outsession.commit() bid = bill.id else: bid += 1 bnumber = row.number if row.sub == 0: subid = mainids[row.ledger] else: subid = subids[row.sub] if row.bed == 0: value = row.bes else: value = -(row.bed) n = Notebook(subid, bid, value, row.des) outsession.add(n) outsession.commit()
def test_select(self): person = self.session.query(Person.name).filter(Person.id == 1).first() self.assertEqual(person.name, "保登 心愛") persons = self.session.query(Person).filter(Person.height < 150).all() self.assertEqual(len(persons), 3) persons = self.session.query(Person).filter( Person.name.like("%保登%")).all() self.assertEqual(len(persons), 2) persons = self.session.query(Person).filter(Person.id.in_([1, 2])).all() self.assertEqual(persons[1].name, "香風 智乃") persons = self.session.query(Person).filter( ~Person.id.in_([1, 2])).all() self.assertEqual(persons[0].name, "天々座 理世") persons = self.session.query(Person).filter( and_(Person.age == 13, Person.blood_type == "A")).all() self.assertEqual(persons[0].nickname, "メグ") persons = self.session.query( func.avg(Person.height).label("avg_height")).first() self.assertTrue(isinstance(persons.avg_height, float)) print(persons) persons = self.session.query(func.sum( Person.age).label("sum_age")).first() # self.assertTrue(isinstance(persons.sum_age, int)) print(persons.sum_age) persons = self.session.query( func.count(Person.id).label("count_person")).first() print(persons.count_person) persons2 = self.session.query(Person.id).count() print(persons2) persons = self.session.query(Person).limit(5).offset(5).all() print(persons) self.assertEqual(persons[0].id, 6) persons = self.session.query(Person).order_by(Person.id.desc()).all() print(persons) self.assertEqual(persons[0].id, 12) persons = self.session.query(Person.age, func.count( Person.age)).group_by(Person.age).all() print(persons) persons = self.session.query(Person).select_from(join(Person, Shop)).all() print(persons) persons = self.session.query(Person).select_from( join(Person, Shop, Person.shop_id == Shop.id)).all() print(persons) persons = self.session.query(Person).select_from( outerjoin(Person, Shop, Person.shop_id == Shop.id)).all() print(persons) persons = self.session.query(Person).filter( Person.created_at < current_timestamp()).all() print(persons) print(self.session.execute(select([current_user()])).first()) persons = self.session.query( Person.id, Person.name, case([ (Person.height >= 165, '165以上'), ], else_='165未満')).all() print(persons) person = Person(name="秋山優花里", nickname="オッドボール三等軍曹", age=16, birthday="06-06", blood_type="O") self.session.add(person) self.session.commit() self.assertEqual(person.age, 16) person.age = 17 self.session.commit() self.assertEqual(person.age, 17) self.session.delete(person) self.session.commit()
def update(inputfile, outputfile): engine = create_engine('sqlite:///%s' % inputfile, echo=True) metadata = MetaData(bind=engine) outdb = Database(outputfile) outsession = outdb.session ledger = Table( 'ledger', metadata, Column('id', Integer, primary_key=True), Column('name', String), Column('type', Integer), ) subledger = Table( 'sub_ledger', metadata, Column('ledger', Integer), Column('name', String), Column('id', Integer, primary_key=True), Column('bed', Integer), Column('bes', Integer), ) moin = Table( 'moin', metadata, Column('sub_name', String), Column('ledger', Integer), Column('name', String), Column('sub', Integer), Column('number', Integer), Column('date', String), Column('des', String), Column('bed', Integer), Column('bes', Integer), Column('mondeh', Integer), Column('tashkhis', String), ) metadata.create_all() query = outsession.query(Subject.code).select_from(Subject).order_by( Subject.id.desc()) code = query.filter(Subject.parent_id == 0).first() if code == None: lastcode = 0 else: lastcode = int(code[0][-2:]) s = outerjoin(ledger, subledger, ledger.c.id == subledger.c.ledger).select().order_by( ledger.c.id) result = s.execute() parent_id = 0 pid = 0 sid = 0 pcode = "" mainids = { } #stores tuples like (oldid:newid) pairs to have old subject ids for later use subids = { } #stores tuples like (oldid:newid) pairs to have old subject ids for later use for row in result: if row[0] != parent_id: lastcode += 1 if lastcode > 99: logging.error("Ledgers with numbers greater than %d are not imported to the new database" \ "Because you can have just 99 Ledgers (level one subject)" % row[0]) break pcode = "%02d" % lastcode parentsub = Subject(pcode, row[1], 0, 2) childcode = 0 outsession.add(parentsub) if pid == 0: outsession.commit() sid = parentsub.id else: sid += 1 pid = sid parent_id = row[0] mainids[row[0]] = pid if row[3] != None: childcode += 1 if childcode > 99: logging.error("SubLedgers with number %d is not imported to the new database" \ "Because you can have just 99 subjects per level" % row[5]) continue childsub = Subject(pcode + "%02d" % childcode, row[4], pid, 2) outsession.add(childsub) sid += 1 subids[row[5]] = sid outsession.commit() s = moin.select().order_by(moin.c.number) result = s.execute() bnumber = 0 bid = 0 value = 0 cal = calverter.calverter() for row in result: if row.number != bnumber: fields = re.split(r"[:-]+", row.date) jd = cal.jalali_to_jd(int(fields[0]), int(fields[1]), int(fields[2])) (gyear, gmonth, gday) = cal.jd_to_gregorian(jd) ndate = date(gyear, gmonth, gday) bill = Bill(row.number, ndate, ndate, ndate) outsession.add(bill) if bid == 0: outsession.commit() bid = bill.id else: bid += 1 bnumber = row.number if row.sub == 0: subid = mainids[row.ledger] else: subid = subids[row.sub] if row.bed == 0: value = row.bes else: value = -(row.bed) n = Notebook(subid, bid, value, row.des) outsession.add(n) outsession.commit()