def join_uio_printerquotas(old_id, new_id): # Delayed import in case module is not installed from Cerebrum.modules.no.uio.printer_quota import PaidPrinterQuotas from Cerebrum.modules.no.uio.printer_quota import PPQUtil pq_util = PPQUtil.PPQUtil(db) if not pq_util.join_persons(old_id, new_id): return # No changes done, so no more work needed ppq = PaidPrinterQuotas.PaidPrinterQuotas(db) # Assert that user hasn't received a free quota more than once term_init_prefix = PPQUtil.get_term_init_prefix(*time.localtime()[0:3]) free_this_term = {} # Now figure out if the user has been granted the same free-quota # twice, and if so, undo the duplicate(s) for row in ppq.get_history_payments( transaction_type=co.pqtt_quota_fill_free, desc_mask=term_init_prefix + '%%', person_id=new_id, order_by_job_id=True): free_this_term[int(row['job_id'])] = row['description'] logger.debug("Free this_term: %s" % free_this_term) for row in ppq.get_history_payments(transaction_type=co.pqtt_undo, person_id=new_id, order_by_job_id=True): if int(row['target_job_id']) in free_this_term: del free_this_term[int(row['target_job_id'])] logger.debug("... removed already undone: %s" % free_this_term) tmp = {} for job_id, desc in free_this_term.items(): tmp.setdefault(desc, []).append(job_id) logger.debug("Potential duplicates: %s" % tmp) for desc, job_ids in tmp.items(): if len(job_ids) > 1: for job_id in job_ids[1:]: logger.debug("Undoing pq_job_id %i" % job_id) pq_util.undo_transaction( new_id, job_id, '', 'Join-persons resulted in duplicate free quota', update_program='join_persons', ignore_transaction_type=True)
def join_uio_printerquotas(old_id, new_id): # Delayed import in case module is not installed from Cerebrum.modules.no.uio.printer_quota import PaidPrinterQuotas from Cerebrum.modules.no.uio.printer_quota import PPQUtil pq_util = PPQUtil.PPQUtil(db) if not pq_util.join_persons(old_id, new_id): return # No changes done, so no more work needed ppq = PaidPrinterQuotas.PaidPrinterQuotas(db) # Assert that user hasn't received a free quota more than once term_init_prefix = PPQUtil.get_term_init_prefix(*time.localtime()[0:3]) free_this_term = {} # Now figure out if the user has been granted the same free-quota # twice, and if so, undo the duplicate(s) for row in ppq.get_history_payments( transaction_type=co.pqtt_quota_fill_free, desc_mask=term_init_prefix + "%%", person_id=new_id, order_by_job_id=True, ): free_this_term[int(row["job_id"])] = row["description"] logger.debug("Free this_term: %s" % free_this_term) for row in ppq.get_history_payments(transaction_type=co.pqtt_undo, person_id=new_id, order_by_job_id=True): if int(row["target_job_id"]) in free_this_term: del free_this_term[int(row["target_job_id"])] logger.debug("... removed already undone: %s" % free_this_term) tmp = {} for job_id, desc in free_this_term.items(): tmp.setdefault(desc, []).append(job_id) logger.debug("Potential duplicates: %s" % tmp) for desc, job_ids in tmp.items(): if len(job_ids) > 1: for job_id in job_ids[1:]: logger.debug("Undoing pq_job_id %i" % job_id) pq_util.undo_transaction( new_id, job_id, "", "Join-persons resulted in duplicate free quota", update_program="join_persons", ignore_transaction_type=True, )
def truncate_log(to_date, logfilename, person_id=None): pq_util = PPQUtil.PPQUtil(db) ppq = PaidPrinterQuotas.PaidPrinterQuotas(db) to_date = DateTime.Date( *([int(x) for x in (to_date + '-0-0-0').split('-')])) from_date = DateTime.Date(1980, 1, 1, 1, 1, 1) persons = {} if person_id: persons[person_id] = True else: # find potential victims for row in ppq.get_pagecount_stats(from_date, to_date, group_by=('person_id', )): if row['person_id'] is not None: persons[long(row['person_id'])] = True for row in ppq.get_payment_stats(from_date, to_date, group_by=('person_id', )): if row['person_id'] is not None: persons[long(row['person_id'])] = True out = open(logfilename, 'a') out.write("Truncate job started at %s\n" % time.asctime()) for person_id in persons.keys() + [None]: removed, new_status = pq_util.truncate_log( person_id, to_date, 'quota_tools', reset_balance=(person_id is None)) if not removed: continue logger.debug( "removed %i entries for %s" % (len(removed), db.pythonify_data(removed[0]['person_id']))) out.write("new balance: %s\n" % repr(new_status)) for row in removed: row = dict([(k, db.pythonify_data(v)) for k, v in row.items()]) row['tstamp'] = row['tstamp'].strftime('%Y-%m-%d %H:%M.%S') out.write("removed: %s\n" % repr(row)) try: db.commit() except: out.write("WARNING: Commit threw exception for this person\n") raise out.close()
person = Person.Person(db) logger = Factory.get_logger("cronjob") pq_logger = bofhd_pq_utils.SimpleLogger('pq_bofhd.log') processed_pids = {} utv_quota = 250 utv_person = {} # term_init_mask brukes til å identifisere de kvotetildelinger som har # skjedde i dette semesteret. Den definerer også tidspunktet da # forrige-semesters gratis-kvote nulles, og ny initiell kvote # tildeles. # All PQ_DATES has the format month, day. Date is inclusive require_kopipenger = True term_init_prefix = PPQUtil.get_term_init_prefix(*time.localtime()[0:3]) require_kopipenger = not PPQUtil.is_free_period(*time.localtime()[0:3]) def set_quota(person_id, has_quota=False, has_blocked_quota=False, quota=None): logger.debug("set_quota(%i, has=%i, blocked=%i)" % ( person_id, has_quota, has_blocked_quota)) processed_pids[int(person_id)] = True if quota is None: quota = [] # Opprett pquota_status entry m/riktige has_*quota settinger try: ppq_info = ppq.find(person_id) new_has = has_quota and 'T' or 'F' new_blocked = has_blocked_quota and 'T' or 'F'
from Cerebrum import Errors from Cerebrum import Group from Cerebrum import Person from Cerebrum.Utils import Factory from Cerebrum.modules.no import fodselsnr from Cerebrum.modules.no.uio import AutoStud from Cerebrum.modules.no.uio.printer_quota import bofhd_pq_utils from Cerebrum.modules.no.uio.printer_quota import PaidPrinterQuotas from Cerebrum.modules.no.uio.printer_quota import PPQUtil from Cerebrum.modules.no.uio.AutoStud.StudentInfo import GeneralDataParser from Cerebrum.modules.xmlutils.system2parser import system2parser db = Factory.get('Database')() update_program = 'quota_update' ppq = PaidPrinterQuotas.PaidPrinterQuotas(db) pu = PPQUtil.PPQUtil(db) const = Factory.get('Constants')(db) processed_person = {} person = Person.Person(db) logger = Factory.get_logger("cronjob") pq_logger = bofhd_pq_utils.SimpleLogger('pq_bofhd.log') processed_pids = {} utv_quota = 250 utv_person = {} # term_init_mask brukes til å identifisere de kvotetildelinger som har # skjedde i dette semesteret. Den definerer også tidspunktet da # forrige-semesters gratis-kvote nulles, og ny initiell kvote # tildeles. # All PQ_DATES has the format month, day. Date is inclusive
import cerebrum_path from Cerebrum import Errors from Cerebrum import Person from Cerebrum.Utils import Factory from Cerebrum.modules.no import fodselsnr from Cerebrum.modules.no.uio.printer_quota import bofhd_pq_utils from Cerebrum.modules.no.uio.printer_quota import PaidPrinterQuotas from Cerebrum.modules.no.uio.printer_quota import PPQUtil from Cerebrum.modules.no.uio.AutoStud.StudentInfo import GeneralDataParser db = Factory.get('Database')() co = Factory.get('Constants')(db) person = Person.Person(db) ppq = PaidPrinterQuotas.PaidPrinterQuotas(db) pq_util = PPQUtil.PPQUtil(db) logger = Factory.get_logger("cronjob") # we don't want the log of payment statements to be cluttered with # debug statements etc. payment_logger = bofhd_pq_utils.SimpleLogger('pq_bofhd.log') def import_data(fname): # Hent betalings-id'er hittil i år processed_payment_ids = {} for row in ppq.get_history_payments( transaction_type=co.pqtt_quota_fill_pay, bank_id_mask='FS:%%'): processed_payment_ids[row['bank_id']] = True for attrs in GeneralDataParser(fname, "betaling"): fnr = fodselsnr.personnr_ok("%06d%05d" % (int(attrs['fodselsdato']),