class ExpireDateMixin(object): """ A mixin for providing expire dates. """ expire_attr = 'expire_date' expire_dates = [None, now() - DateTimeDelta(10), now() + DateTimeDelta(10)] def __init__(self): parent = super(ExpireDateMixin, self) if hasattr(parent, '__init__'): getattr(parent, '__init__')() self.register_attr(self.expire_attr, self.get_expire_date) def get_expire_date(self, ident): """ Get the expire_date for a given ident. @type ident: str @param ident: The 'ident' or 'id of an item @rtype: mx.DateTime.DateTime or NoneType @return: An expire date or None """ if ident in getattr(self, 'items'): return getattr(self, 'items')[ident][self.expire_attr] return self._get_cyclic_value(ident, self.expire_dates)
def storicizza(self, tipologia="Documento", id_pdc=None, id_doc=None): if self.msg is None: raise Exception, "Impossibile storicizzare una mail non inizializzata" import Env bt = Env.Azienda.BaseTab adb = Env.adb from mx.DateTime import now s = adb.DbTable(bt.TABNAME_DOCSEMAIL, 'docsemail') s.CreateNewRow() s.datcoda = now() s.datsend = now() s.id_pdc = id_pdc s.id_doc = id_doc s.tipologia = tipologia s.mittente = self.SendFrom s.destinat = ', '.join(self.SendTo) s.oggetto = self.Subject s.testo = self.Message stream = None if self.Attachments: try: f = open(self.Attachments[0], 'rb') stream = f.read() f.close() except: pass if stream: s.documento = stream if not s.Save(): raise Exception, repr(s.GetError())
def write_forkurs_info(self, pre_course_file): from mx.DateTime import now logger.info("Writing pre-course file to '%s'", pre_course_file) f = SimilarSizeWriter(pre_course_file, mode='w', encoding=XML_ENCODING) f.max_pct_change = 50 cols, course_attendants = self._ext_cols(self.fs.forkurs.list()) f.write(xml.xml_hdr + "<data>\n") for a in course_attendants: f.write( '<regkort fodselsdato="{}" personnr="{}" dato_endring="{}" ' 'dato_opprettet="{}"/>\n'.format(a['fodselsdato'], a['personnr'], str(now()), str(now()))) f.write('<emnestud fodselsdato="{}" personnr="{}" etternavn="{}" ' 'fornavn="{}" adrlin2_semadr="" postnr_semadr="" ' 'adrlin3_semadr="" adrlin2_hjemsted="" postnr_hjemsted="" ' 'adrlin3_hjemsted="" sprakkode_malform="NYNORSK" ' 'kjonn="X" studentnr_tildelt="{}" emnekode="FORGLU" ' 'versjonskode="1" terminkode="VÅR" arstall="2016" ' 'telefonlandnr_mobil="{}" telefonnr_mobil="{}"/>\n'.format( a['fodselsdato'], a['personnr'], a['etternavn'], a['fornavn'], a['studentnr_tildelt'], a['telefonlandnr'], a['telefonnr'])) f.write("</data>\n") f.close()
def enforce_user_constraints(db): """ Check a number of business rules for our users. """ account = Factory.get("Account")(db) const = Factory.get("Constants")() for row in account.list(filter_expired=False): # We check FA/VA only if row["np_type"] not in (const.fedaccount_type, const.virtaccount_type): continue account.clear() account.find(row["entity_id"]) # Expiration is not set -> force it to default if row["expire_date"] is None: logger.warn("Account %s (id=%s) is missing expiration date.", account.account_name, account.entity_id) account.expire_date = now() + account.DEFAULT_ACCOUNT_LIFETIME account.write_db() # Expiration is too far in the future -> force it to default if row["expire_date"] - now() > account.DEFAULT_ACCOUNT_LIFETIME: logger.warn( "Account %s (id=%s) has expire date too far in the" " future.", account.account_name, account.entity_id) account.expire_date = now() + account.DEFAULT_ACCOUNT_LIFETIME account.write_db()
def actionScheduleAlarm(self, cmsg, dbcom): sms_dict = self.alarm.retrieveData(cmsg) activity = ACTIVE if sms_dict == NOTFOUND: self.log.LOG(LOG_ERROR, "sms.actionScheduleAlarm()", "TAGs are missing in the requisition to schedule an alarm. Aborting schedule.") return "NOTFOUND" blow = self.shared.mountTime(sms_dict[DATA_BLOW]) if blow == INVALID: return "INVALID" elif blow < now() or blow == now(): activity = FAILED ret = dbcom.registerSMS(sms_dict[DATA_ORG], sms_dict[DATA_EXT+"0"], sms_dict[DATA_BLOW], sms_dict[DATA_OPER], sms_dict[DATA_MSG], activity) if ret == OK and activity == ACTIVE: alarm_thread = Thread(target=self.alarm.launch, args=(blow,)) alarm_thread.start() self.log.LOG(LOG_INFO, "sms", "New alarm thread has been started.") return "OK" elif ret == NOTFOUND: return "NOTFOUND" elif activity == FAILED: return "INVALID" else: return "ERROR"
def default_get(self, cr, uid, fields, context=None): ''' Get the default values for the replenishment rule ''' res = super(stock_warehouse_order_cycle, self).default_get(cr, uid, fields, context=context) company_id = res.get('company_id') warehouse_id = res.get('warehouse_id') if not 'company_id' in res: company_id = self.pool.get('res.company')._company_default_get(cr, uid, 'stock.warehouse.automatic.supply', context=context) res.update({'company_id': company_id}) if not 'warehouse_id' in res: warehouse_id = self.pool.get('stock.warehouse').search(cr, uid, [('company_id', '=', company_id)], context=context)[0] res.update({'warehouse_id': warehouse_id}) if not 'location_id' in res: location_id = self.pool.get('stock.warehouse').browse(cr, uid, warehouse_id, context=context).lot_stock_id.id res.update({'location_id': location_id}) if not 'consumption_period_from' in res: res.update({'consumption_period_from': (DateFrom(now()) + RelativeDate(day=1)).strftime('%Y-%m-%d')}) if not 'consumption_period_to' in res: res.update({'consumption_period_to': (DateFrom(now()) + RelativeDate(months=1, day=1, days=-1)).strftime('%Y-%m-%d')}) return res
def test_classCreate(self): if not supports('fromDatabase'): return class OldAutoTest(SQLObject): _connection = getConnection() class sqlmeta(sqlmeta): idName = 'auto_id' fromDatabase = True john = OldAutoTest(firstName='john', lastName='doe', age=10, created=now(), wannahavefun=False, longField='x'*1000) jane = OldAutoTest(firstName='jane', lastName='doe', happy='N', created=now(), wannahavefun=True, longField='x'*1000) assert not john.wannahavefun assert jane.wannahavefun assert john.longField == 'x'*1000 assert jane.longField == 'x'*1000 del classregistry.registry( OldAutoTest.sqlmeta.registry).classes['OldAutoTest']
def _empty_inventory(self, cr, uid, data, context): """empty a location""" pool = pooler.get_pool(cr.dbname) inventory_line_obj = pooler.get_pool(cr.dbname).get('stock.inventory.line') location_obj = pooler.get_pool(cr.dbname).get('stock.location') res = {} res_location = {} if data['id']: # pylint: disable-msg=W0212 res = location_obj._product_get(cr, uid, data['id'], context = context) res_location[data['id']] = res product_ids = [] inventory_id = pool.get('stock.inventory').create(cr, uid, {'name': "INV:" + now().strftime('%Y-%m-%d %H:%M:%S'), 'date': now().strftime('%Y-%m-%d %H:%M:%S'), }) for location in res_location.keys(): res = res_location[location] for product_id in res.keys(): cr.execute('select prodlot_id, name as id from stock_report_prodlots where location_id = %s and product_id = %s and name > 0', (location, product_id)) prodlots = cr.fetchall() for prodlot in prodlots: prod = pool.get('product.template').browse(cr, uid, product_id) amount = prodlot[1] if(amount): inventory_line = {'inventory_id':inventory_id, 'location_id':location, 'product_id':product_id, 'product_uom':prod.uom_id.id, 'product_qty':0, 'prodlot_id':prodlot[0]} inventory_line_obj.create(cr, uid, inventory_line) product_ids.append(product_id) pool.get('stock.inventory').action_done(cr, uid, [inventory_id], context = context) if len(product_ids) == 0: raise wizard.except_wizard(_('Message !'), _('No product in this location.')) return {}
def check_too_many_attempts(self, account): """ Checks if a user has tried to use the service too many times. Creates the trait if it doesn't exist, and increments the numval. Raises an exception when too many attempts occur in the block period. """ attempts = 0 trait = account.get_trait(self.co.trait_password_failed_attempts) block_period = now() - RelativeDateTime( seconds=cereconf.INDIVIDUATION_ATTEMPTS_BLOCK_PERIOD) if trait and trait['date'] > block_period: attempts = int(trait['numval']) logger.debug('User %r has tried %r times', account.account_name, attempts) if attempts > cereconf.INDIVIDUATION_ATTEMPTS: logger.info("User %r too many attempts, temporarily blocked", account.account_name) raise Errors.CerebrumRPCException('toomanyattempts') account.populate_trait(code=self.co.trait_password_failed_attempts, target_id=account.entity_id, date=now(), numval=attempts + 1) account.write_db() account._db.commit()
def enforce_user_constraints(db): """ Check a number of business rules for our users. """ account = Factory.get("Account")(db) const = Factory.get("Constants")() for row in account.list(filter_expired=False): # We check FA/VA only if row["np_type"] not in (const.fedaccount_type, const.virtaccount_type): continue account.clear() account.find(row["entity_id"]) # Expiration is not set -> force it to default if row["expire_date"] is None: logger.warn("Account %s (id=%s) is missing expiration date.", account.account_name, account.entity_id) account.expire_date = now() + account.DEFAULT_ACCOUNT_LIFETIME account.write_db() # Expiration is too far in the future -> force it to default if row["expire_date"] - now() > account.DEFAULT_ACCOUNT_LIFETIME: logger.warn("Account %s (id=%s) has expire date too far in the" " future.", account.account_name, account.entity_id) account.expire_date = now() + account.DEFAULT_ACCOUNT_LIFETIME account.write_db()
def remove_expired_groups(db, days, pretend): """ Removes groups that have reached number of `days' past expiration-date. :param Cerebrum.Database db: The database connection :param int days: Amount of days after past expiration-date :param bool pretend: If True, do not actually remove from DB """ try: amount_to_be_removed_groups = 0 amount_removed_groups = 0 if pretend: logger.info('DRYRUN: Rolling back all changes') gr = Factory.get('Group')(db) expired_groups = gr.search(filter_expired=False, expired_only=True) for group in expired_groups: removal_deadline = group['expire_date'] + days if now() > removal_deadline: # deadline passed. remove! amount_to_be_removed_groups += 1 try: gr.clear() gr.find(group['group_id']) exts = gr.get_extensions() if exts: logger.debug("Skipping group %r, has extensions %r", gr.group_name, exts) continue gr.delete() if not pretend: db.commit() else: # do not actually remove when running with -d db.rollback() amount_removed_groups += 1 logger.info( 'Expired group (%s - %s) removed' % ( group['name'], group['description'])) except DatabaseError, e: logger.error( 'Database error: Could not delete expired group ' '(%s - %s): %s. Skipping' % ( group['name'], group['description'], str(e)), exc_info=True) db.rollback() continue else: time_until_removal = removal_deadline - now() logger.debug( 'Expired group (%s - %s), will be removed in %d days' % ( group['name'], group['description'], int(time_until_removal.days))) except Exception, e: logger.critical('Unexpected exception: %s' % (str(e)), exc_info=True) db.rollback() raise
def remove_expired_groups(db, days, pretend): """ Removes groups that have reached number of `days' past expiration-date. :param Cerebrum.Database db: The database connection :param int days: Amount of days after past expiration-date :param bool pretend: If True, do not actually remove from DB """ try: amount_to_be_removed_groups = 0 amount_removed_groups = 0 if pretend: logger.info('DRYRUN: Rolling back all changes') gr = Factory.get('Group')(db) expired_groups = gr.search(filter_expired=False, expired_only=True) for group in expired_groups: removal_deadline = group['expire_date'] + days if now() > removal_deadline: # deadline passed. remove! amount_to_be_removed_groups += 1 try: gr.clear() gr.find(group['group_id']) exts = gr.get_extensions() if exts: logger.debug("Skipping group %r, has extensions %r", gr.group_name, exts) continue gr.delete() if not pretend: db.commit() else: # do not actually remove when running with -d db.rollback() amount_removed_groups += 1 logger.info('Expired group (%s - %s) removed' % (group['name'], group['description'])) except DatabaseError, e: logger.error( 'Database error: Could not delete expired group ' '(%s - %s): %s. Skipping' % (group['name'], group['description'], str(e)), exc_info=True) db.rollback() continue else: time_until_removal = removal_deadline - now() logger.debug( 'Expired group (%s - %s), will be removed in %d days' % (group['name'], group['description'], int(time_until_removal.days))) except Exception, e: logger.critical('Unexpected exception: %s' % (str(e)), exc_info=True) db.rollback() raise
def action_no_picking(self, cr, uid, ids): """creates the final move for the production and a move by product in the bom from product's procurement location or a child her to production location""" for production in self.browse(cr, uid, ids): original_product_lines = map(lambda x: x.id, production.product_lines) self.pool.get('mrp.production.product.line').write(cr, uid, map(lambda x: x.id, production.product_lines), {'production_id': None}) production = self.browse(cr, uid, production.id) #create empty picking, delete super(mrp_production, self).action_confirm(cr, uid, ids) self.pool.get('mrp.production.product.line').write(cr, uid, original_product_lines, {'production_id': production.id}) production = self.browse(cr, uid, production.id) #final move final_moves_ids = self.pool.get('stock.move').search(cr, uid, [('production_id', '=', production.id)]) res_final_id = final_moves_ids[0] if production.product_lines: notvalidprodlots = [] order_product_lines = self.pool.get('mrp.production.product.line').search(cr, uid, [('production_id', '=', production.id)], order="product_qty DESC") for line in order_product_lines: obj_line = self.pool.get('mrp.production.product.line').browse(cr, uid, line) #search the default prodlot and the location of this prodlot default_prodlot, prodlot_location, default_qty = self.pool.get('stock.production.lot').get_default_production_lot(cr, uid, obj_line.product_id.product_tmpl_id.property_stock_procurement.id, obj_line.product_id.id, obj_line.product_qty, True, notvalidprodlots) notvalidprodlots.append((default_prodlot, prodlot_location, default_qty)) if not prodlot_location: prodlot_location = obj_line.product_id.product_tmpl_id.property_stock_procurement.id #creates the move in stock_move for the product not procure from deafult location of prodlot to production location production_move = self.pool.get('stock.move').create(cr, uid, vals={ 'product_uom' : obj_line.product_uom.id, 'product_uos_qty' : obj_line.product_uos_qty, 'date' : now().strftime('%Y-%m-%d %H:%M:%S'), 'product_qty' : obj_line.product_qty, 'product_uos' : obj_line.product_uos, 'location_id' : prodlot_location, 'product_id' : obj_line.product_id.id, 'prodlot_id' : default_prodlot, 'name' : 'NoProc:' + str(obj_line.product_id.product_tmpl_id.property_stock_procurement.id) + str(production.product_lines.index(obj_line)) + 'TO' + now().strftime('%Y-%m-%d %H:%M:%S'), 'date_planned' : now().strftime('%Y-%m-%d %H:%M:%S'), 'state' : 'draft', 'move_dest_id': res_final_id, 'location_dest_id' : obj_line.product_id.product_tmpl_id.property_stock_production.id, }) #searches the final move in production and insert a registry in mrp_production_move_ids if res_final_id: cr.execute('insert into mrp_production_move_ids (production_id, move_id) values (%s,%s)', (production.id, production_move)) self.write(cr, uid, [production.id], {'state':'confirmed'}) return True
def on_change_method(self, cr, uid, ids, method): ''' Unfill the consumption period if the method is FMC ''' res = {} if method and method == 'fmc': res.update({'consumption_period_from': False, 'consumption_period_to': False}) elif method and method == 'amc': res.update({'consumption_period_from': (now() + RelativeDate(day=1, months=-2)).strftime('%Y-%m-%d'), 'consumption_period_to': (now() + RelativeDate(day=1, months=1, days=-1)).strftime('%Y-%m-%d')}) return {'value': res}
class BasicPersonSource(BaseDataSource): birth_dates = [ now(), now() - DateTimeDelta(365 * 100), now() - DateTimeDelta(356 * 20) ] genders = ['M', 'F', None] birth_date_attr = 'birth_date' gender_attr = 'gender' def __init__(self): parent = super(BasicPersonSource, self) if hasattr(parent, '__init__'): getattr(parent, '__init__')() self.register_attr(self.birth_date_attr, self.get_birth_date) self.register_attr(self.gender_attr, self.get_gender) def get_gender(self, ident): """ Get the gender for a given ident. @type ident: str @param ident: The 'ident' or 'id of an item @rtype: str or NoneType @return: One of the values in self.genders """ if ident in getattr(self, 'items'): return getattr(self, 'items')[ident][self.expire_attr] return self._get_cyclic_value(ident, self.genders) def get_birth_date(self, ident): """ Get the expire_date for a given ident. @type ident: str @param ident: The 'ident' or 'id of an item @rtype: mx.DateTime.DateTime or NoneType @return: One of the values in self.birth_dates """ if ident in getattr(self, 'items'): return getattr(self, 'items')[ident][self.expire_attr] return self._get_cyclic_value(ident, self.birth_dates)
def delete_stale_events(cl_events, db): """Remove all events of type cl_events older than GRACE_PERIOD. cl_events is an iterable listing change_log event types that we want expunged. These events cannot require any state change in Cerebrum (other than their own deletion). It is the caller's responsibility to check that this is so. """ if not isinstance(cl_events, (list, tuple, set)): cl_events = [ cl_events, ] const = Factory.get("Constants")() typeset_request = ", ".join(str(const.ChangeType(x)) for x in cl_events) logger.debug("Deleting stale requests: %s", typeset_request) for event in db.get_log_events(types=cl_events): tstamp = event["tstamp"] timeout = cereconf.GRACE_PERIOD try: params = pickle.loads(event["change_params"]) if params['timeout'] is not None: timeout = DateTimeDelta(params['timeout']) logger.debug('Timeout set to %s for %s', (now() + timeout).strftime('%Y-%m-%d'), event['change_id']) if timeout > cereconf.MAX_INVITE_PERIOD: logger.warning('Too long timeout (%s) for for %s', timeout.strftime('%Y-%m-%d'), event['change_id']) timeout = cereconf.MAX_INVITE_PERIOD except KeyError: pass if now() - tstamp <= timeout: continue logger.debug("Deleting stale event %s (@%s) for entity %s (id=%s)", str(const.ChangeType(event["change_type_id"])), event["tstamp"].strftime("%Y-%m-%d"), fetch_name(event["subject_entity"], db), event["subject_entity"]) db.remove_log_event(event["change_id"]) db.commit() logger.debug("Deleted all stale requests: %s", typeset_request)
def onchange_partner_id(self, cr, uid, ids, move_id, partner_id, account_id=None, debit=0, credit=0, date=False, journal=False): val = {} val['date_maturity'] = False if not partner_id: return {'value':val} if not date: date = now().strftime('%Y-%m-%d') part = self.pool.get('res.partner').browse(cr, uid, partner_id) if part.property_payment_term: res = self.pool.get('account.payment.term').compute(cr, uid, part.property_payment_term.id, 100, date) if res: val['date_maturity'] = res[0][0] if not account_id: id1 = part.property_account_payable.id id2 = part.property_account_receivable.id if journal: jt = self.pool.get('account.journal').browse(cr, uid, journal).type if jt == 'sale': val['account_id'] = self.pool.get('account.fiscal.position').map_account(cr, uid, part and part.property_account_position or False, id2) elif jt == 'purchase': val['account_id'] = self.pool.get('account.fiscal.position').map_account(cr, uid, part and part.property_account_position or False, id1) if val.get('account_id', False): d = self.onchange_account_id(cr, uid, ids, val['account_id']) val.update(d['value']) return {'value':val}
def populate_fagmiljo(person_id, fagmiljo): """Add a given fagmiljo string to the given person.""" logger.debug("Populating fagmiljo for person_id=%s", person_id) pe.clear() pe.find(person_id) pe.populate_trait(code=co.trait_fagmiljo, date=now(), strval=fagmiljo) pe.write_db()
def test_mxDateTime(): setupClass(DateTime2) _now = now() dt2 = DateTime2(col1=_now, col2=_now.pydate(), col3=Time(_now.hour, _now.minute, _now.second)) assert isinstance(dt2.col1, col.DateTimeType) assert dt2.col1.year == _now.year assert dt2.col1.month == _now.month assert dt2.col1.day == _now.day assert dt2.col1.hour == _now.hour assert dt2.col1.minute == _now.minute assert dt2.col1.second == int(_now.second) assert isinstance(dt2.col2, col.DateTimeType) assert dt2.col2.year == _now.year assert dt2.col2.month == _now.month assert dt2.col2.day == _now.day assert dt2.col2.hour == 0 assert dt2.col2.minute == 0 assert dt2.col2.second == 0 assert isinstance(dt2.col3, (col.DateTimeType, col.TimeType)) assert dt2.col3.hour == _now.hour assert dt2.col3.minute == _now.minute assert dt2.col3.second == int(_now.second)
def update_trait(self): """ Update the 'trait_user_notified' trait by: 1. Creating it, if it doesn't exist 2. Resetting it if the date attribute is more than days_reset days old. 3. Incrementing the numval attribute. """ # Initial values for new trait last_reset = now() num_sent = 0 trait = self.ac.get_trait(self.co.trait_user_notified) # Trait date exists, and is not older than days_reset old. if trait and (last_reset - self.days_reset) < trait.get("date"): last_reset = trait.get("date") num_sent = trait.get("numval") or 0 # Else, reset trait # Increment and write the updated trait values num_sent += 1 self.ac.populate_trait(self.co.trait_user_notified, numval=num_sent, date=last_reset) self.ac.write_db() if self.dryrun: self.logger.warn( "Dryrun, not writing trait '%s' for user '%s" % (str(self.co.trait_user_notified), self.ac.account_name) ) self.ac._db.rollback() else: self.ac._db.commit()
def loadPAClass(self): pa_query = """select fk_pupil, fk_subject from prearranged_classes where (time_start <= '%s' and time_end >= '%s') and fk_tutor = %d """ date = now() d = self.db.db.runQuery(pa_query, (date, date, int(self.avId))) d.addCallback(self._loadPupilAndSubject)
def test_mxDateTime(): setupClass(DateTime2) _now = now() dt2 = DateTime2(col1=_now, col2=_now, col3=Time(_now.hour, _now.minute, int(_now.second))) assert isinstance(dt2.col1, col.DateTimeType) assert dt2.col1.year == _now.year assert dt2.col1.month == _now.month assert dt2.col1.day == _now.day assert dt2.col1.hour == _now.hour assert dt2.col1.minute == _now.minute assert dt2.col1.second == int(_now.second) assert isinstance(dt2.col2, col.DateTimeType) assert dt2.col2.year == _now.year assert dt2.col2.month == _now.month assert dt2.col2.day == _now.day if getConnection().dbName == "sqlite": assert dt2.col2.hour == _now.hour assert dt2.col2.minute == _now.minute assert dt2.col2.second == int(_now.second) else: assert dt2.col2.hour == 0 assert dt2.col2.minute == 0 assert dt2.col2.second == 0 assert isinstance(dt2.col3, (col.DateTimeType, col.TimeType)) assert dt2.col3.hour == _now.hour assert dt2.col3.minute == _now.minute assert dt2.col3.second == int(_now.second)
def entity_is_fresh(self, person, account): """Check if a person or account is 'fresh', i.e. if the account or person is newly created, or if the account has been restored lately. This is to be able to avoid blocking new phone numbers from systems where the account is just activated. """ delay = now() - getattr(cisconf, 'FRESH_DAYS', 10) # Check for traits only set for 'fresh' accounts: for tr in (self.co.trait_student_new, self.co.trait_sms_welcome): trait = account.get_trait(tr) if trait and trait['date'] > delay: logger.debug('Fresh trait %r for account %r, ' 'so considered fresh', tr, account.account_name) return True # Check if person has recently been created: for row in self.db.get_log_events(types=(self.clconst.person_create), any_entity=person.entity_id, sdate=delay): logger.debug("Person %r is fresh", person.entity_id) return True logger.debug("Person %r (account %r) is not fresh", person.entity_id, account.entity_id) return False
def _pending_amount(self, cr, uid, ids, name, arg, context=None): res = {} today = now().strftime('%Y-%m-%d') for partner in self.browse(cr, uid, ids, context): accounts = [] if partner.property_account_receivable: accounts.append(partner.property_account_receivable.id) if partner.property_account_payable: accounts.append(partner.property_account_payable.id) line_ids = self.pool.get('account.move.line').search( cr, uid, [('partner_id', '=', partner.id), ('account_id', 'in', accounts), ('reconcile_id', '=', False), '|', ('date_maturity', '>=', today), ('date_maturity', '=', False)], context=context) # Those that have amount_to_pay == 0, will mean that they're circulating. The payment request has been sent # to the bank but have not yet been reconciled (or the date_maturity has not been reached). amount = 0.0 for line in self.pool.get('account.move.line').browse( cr, uid, line_ids, context): #amount += line.debit - line.credit amount += -line.amount_to_pay res[partner.id] = amount return res
def current_time(db, layer_code='', time_zone=None): """ Получение текущего времени с учетом time zone """ if layer_code != '': if not time_zone: sql_text = 'select * from MY_GETDATETIME(?,?)' sql_params = [None, 'DT'] res = db.dbExec(sql_text, params=sql_params, fetch='one') return res['DATETIMEZONE'] else: return now() + timedelta(hours=time_zone) else: return now()
def update_trait(self): """ Update the 'trait_user_notified' trait by: 1. Creating it, if it doesn't exist 2. Resetting it if the date attribute is more than days_reset days old. 3. Incrementing the numval attribute. """ # Initial values for new trait last_reset = now() num_sent = 0 trait = self.ac.get_trait(self.co.trait_user_notified) # Trait date exists, and is not older than days_reset old. if trait and (last_reset - self.days_reset) < trait.get('date'): last_reset = trait.get('date') num_sent = trait.get('numval') or 0 # Else, reset trait # Increment and write the updated trait values num_sent += 1 self.ac.populate_trait(self.co.trait_user_notified, numval=num_sent, date=last_reset) self.ac.write_db() if self.dryrun: self.logger.warn("Dryrun, not writing trait '%s' for user '%s", str(self.co.trait_user_notified), self.ac.account_name) self.ac._db.rollback() else: self.ac._db.commit()
def entity_is_fresh(self, person, account): """Check if a person or account is 'fresh', i.e. if the account or person is newly created, or if the account has been restored lately. This is to be able to avoid blocking new phone numbers from systems where the account is just activated. """ delay = now() - getattr(cisconf, 'FRESH_DAYS', 10) # Check for traits only set for 'fresh' accounts: for tr in (self.co.trait_student_new, self.co.trait_sms_welcome): trait = account.get_trait(tr) if trait and trait['date'] > delay: log.debug( 'Fresh trait %s for account %s, so considered fresh' % (tr, account.account_name)) return True # Check if person has recently been created: for row in self.db.get_log_events(types=(self.co.person_create), any_entity=person.entity_id, sdate=delay): log.debug("Person %s is fresh" % person.entity_id) return True log.debug("Person %s (account %s) is not fresh" % (person.entity_id, account.entity_id)) return False
def test_mxDateTime(): setupClass(DateTime2) _now = now() dt2 = DateTime2(col1=_now, col2=_now, col3=Time(_now.hour, _now.minute, _now.second)) assert isinstance(dt2.col1, col.DateTimeType) assert dt2.col1.year == _now.year assert dt2.col1.month == _now.month assert dt2.col1.day == _now.day assert dt2.col1.hour == _now.hour assert dt2.col1.minute == _now.minute assert dt2.col1.second == int(_now.second) assert isinstance(dt2.col2, col.DateTimeType) assert dt2.col2.year == _now.year assert dt2.col2.month == _now.month assert dt2.col2.day == _now.day if getConnection().dbName == "sqlite": assert dt2.col2.hour == _now.hour assert dt2.col2.minute == _now.minute assert dt2.col2.second == int(_now.second) else: assert dt2.col2.hour == 0 assert dt2.col2.minute == 0 assert dt2.col2.second == 0 assert isinstance(dt2.col3, (col.DateTimeType, col.TimeType)) assert dt2.col3.hour == _now.hour assert dt2.col3.minute == _now.minute assert dt2.col3.second == int(_now.second)
def removeClient(self, perspective): """Quita al cliente de id clientId de la lista de clientes actuales. """ #si era tutor elimino su room if perspective.perspective_whoami() == TUTOR: self.removeRoom(perspective.avId) else: if perspective.viewing != None: self.wbRooms[perspective.viewing].roomViewerExit(perspective.avId) #si esperaba lo elimino de la cola if perspective.waitingInQueue != None: self.wbQueues.leaveQueue(perspective.waitingInQueue, perspective.avId) #si estaba en "observacion" if perspective.waitingInRoom != None: self.wbRooms[perspective.waitingInRoom].roomPupilStopWaiting() avTutor = self.wbClients[perspective.waitingInRoom] avTutor.cleanWhiteBoard() self.exitViewers(perspective.waitingInRoom) self.manageNextPupil(perspective.waitingInRoom, self.sessions.pupilEnd) if perspective.roomId != None: self.wbRooms[perspective.roomId].roomPupilExit(perspective.avId) discount = (now().minute - perspective.lastMinute) % 60 perspective.discountIA(discount) avTutor = self.wbClients[perspective.roomId] try: self.notifyClient(perspective.roomId, "El alumno abandono") avTutor.saveClassStatus() avTutor.cleanWhiteBoard() except (pb.DeadReferenceError): pass self.exitViewers(perspective.roomId) self.manageNextPupil(perspective.roomId, self.sessions.pupilEnd) del(self.wbClients[perspective.avId]) del(self.wbClientStatus[perspective.avId])
def delete_stale_events(cl_events, db): """Remove all events of type cl_events older than GRACE_PERIOD. cl_events is an iterable listing change_log event types that we want expunged. These events cannot require any state change in Cerebrum (other than their own deletion). It is the caller's responsibility to check that this is so. """ if not isinstance(cl_events, (list, tuple, set)): cl_events = [cl_events, ] clconst = Factory.get("CLConstants")() typeset_request = ", ".join(str(clconst.ChangeType(x)) for x in cl_events) logger.debug("Deleting stale requests: %s", typeset_request) for event in db.get_log_events(types=cl_events): tstamp = event["tstamp"] timeout = cereconf.GRACE_PERIOD try: params = json.loads(event["change_params"]) if params['timeout'] is not None: timeout = DateTimeDelta(params['timeout']) logger.debug('Timeout set to %s for %s', (now() + timeout).strftime('%Y-%m-%d'), event['change_id']) if timeout > cereconf.MAX_INVITE_PERIOD: logger.warning('Too long timeout (%s) for for %s', timeout.strftime('%Y-%m-%d'), event['change_id']) timeout = cereconf.MAX_INVITE_PERIOD except KeyError: pass if now() - tstamp <= timeout: continue logger.debug("Deleting stale event %s (@%s) for entity %s (id=%s)", str(clconst.ChangeType(event["change_type_id"])), event["tstamp"].strftime("%Y-%m-%d"), fetch_name(event["subject_entity"], db), event["subject_entity"]) db.remove_log_event(event["change_id"]) db.commit() logger.debug("Deleted all stale requests: %s", typeset_request)
def compute(self, cr, uid, id, value, paydays, date_ref=False, context={}): if not date_ref: date_ref = now().strftime('%Y-%m-%d') pt = self.browse(cr, uid, id, context) amount = value result = [] aux_date = mx.DateTime.strptime(date_ref, '%Y-%m-%d') # logger.notifyChannel('aux_date',netsvc.LOG_INFO, aux_date) for line in pt.line_ids: if line.value == 'fixed': amt = line.value_amount elif line.value == 'procent': amt = round(amount * line.value_amount, 2) elif line.value == 'balance': amt = amount if amt: next_date = aux_date + RelativeDateTime(days=line.days) # logger.notifyChannel('_date',netsvc.LOG_INFO, next_date) if line.condition == 'end of month': next_date += RelativeDateTime(day=-1) # Esta condición es la que se añade. Se crea una lista a partir de la cadena de 'Días de Pago' # y se ordena. Trás aplicar los días de plazo par el efecto se recorre la lista y se calcula # la fecha de vencimiento. Se ha añadido un día extra "en el mes siguiente" para vencimientos # posteriores a la última fecha. Dicho día es el primero de la lista del mes siguiente. Ejemplo: día 35 de # noviembre = 5 de Diciembre if line.condition == 'payment days' and paydays: payment_days_list = map(int, paydays.split('-')) # logger.notifyChannel('Dias de pago',netsvc.LOG_INFO, payment_days_list) payment_days_list.sort() # logger.notifyChannel('Dias de pago,2',netsvc.LOG_INFO, payment_days_list) payment_days_list.append(next_date.days_in_month + payment_days_list[0]) # logger.notifyChannel('Dias de pago 3',netsvc.LOG_INFO, payment_days_list) for pay_day in payment_days_list: if next_date <= next_date + RelativeDateTime( day=pay_day): # logger.notifyChannel('next_date1',netsvc.LOG_INFO, next_date) # logger.notifyChannel('next_date1',netsvc.LOG_INFO, pay_day) next_date = next_date + RelativeDateTime( day=pay_day) # logger.notifyChannel('next_date1',netsvc.LOG_INFO, next_date) # Se debe establecer un criterio de como actuar en el caso # de que en un mes no exista el día de pago. ¿Qué se hace # si el día de pago es el 30 y estamos en febrero?. Las # tres siguiente líneas hacen que en este caso # se tome como día de pago el último día del mes. # Si se comentan estas líneas el día de pago será el # día 1 o 2 del mes siguiente. previous_month = next_date - RelativeDateTime( months=1) while next_date.day not in payment_days_list + [ previous_month.days_in_month ]: next_date = next_date - RelativeDateTime( days=1) break result.append((next_date.strftime('%Y-%m-%d'), amt)) amount -= amt return result
def __absolute_time(self, time_str): if time_str[0] == "+": # Relative time time = round_datetime(now() + Parser.TimeDeltaFromString(time_str[1:])) else: time = Parser.DateTimeFromString(time_str) return time
def __absolute_time(self, time_str): if time_str[0] == "+": # Relative time time = round_datetime(now() + ISO.ParseTime(time_str[1:])) else: time = Parser.ParseDateTime(time_str) return time
def set_time(): h = int(time_field_hour.get()) m = int(time_field_min.get()) s = int(time_field_sec.get()) self.go_time = DateTime(now().year,now().month,now().day,h,m,s) if self.go_time < now(): self.go_time = DateTime(now().year, now().month, now().day + 1, h, m, s) self.dt = self.go_time - now() self.armed = True run_alarm()
def sync_group(affil, gname, descr, mtype, memb, recurse=True): logger.debug( ("sync_group(parent:'%s'; groupname:'%s'; description:'%s'; " + "membertype:'%s'; members:'%s'; recurse:'%s')") % (affil, gname, descr, mtype, memb.keys(), recurse)) if mtype == constants.entity_group: # memb has group_name as keys members = {} for tmp_gname in memb.keys(): grp = get_group(tmp_gname) members[int(grp.entity_id)] = 1 else: # memb has account_id as keys members = memb.copy() if affil is not None: AffiliatedGroups.setdefault(affil, {})[gname] = 1 try: group = get_group(gname) except Errors.NotFoundError: group = Factory.get('Group')(db) group.clear() group.populate( creator_id=group_creator, visibility=constants.group_visibility_all, name=gname, description=descr, group_type=constants.group_type_unknown, ) group.write_db() else: # Update description if it has changed if group.description != descr: group.description = descr group.write_db() if group.is_expired(): # Extend the group's life by 6 months from mx.DateTime import now, DateTimeDelta group.expire_date = now() + DateTimeDelta(6 * 30) group.write_db() # Make sure the group is listed for export to LMS if not group.has_spread(constants.spread_lms_group): group.add_spread(constants.spread_lms_group) for member in group.search_members(group_id=group.entity_id, member_type=mtype, member_filter_expired=False): member = int(member["member_id"]) if members.has_key(member): del members[member] else: logger.debug("sync_group(): Deleting member %d" % member) group.remove_member(member) for member in members.keys(): group.add_member(member)
def _makeOrder(self, cr, uid, data, context): pool = pooler.get_pool(cr.dbname) purchase_obj = pool.get('purchase.order') sale_obj = pool.get('sale.order') shop_obj = pool.get('sale.shop') shop_id = shop_obj.search(cr, uid, [])[0] partner_obj = pool.get('res.partner') sale_line_obj = pool.get('sale.order.line') new_ids = [] user = pool.get('res.users').browse(cr, uid, uid) partner_id = user.company_id.partner_id.id partner_addr = partner_obj.address_get( cr, uid, [partner_id], ['invoice', 'delivery', 'contact']) default_pricelist = partner_obj.browse( cr, uid, partner_id, context).property_product_pricelist.id fpos = partner_obj.browse(cr, uid, partner_id, context).property_account_position fpos_id = fpos and fpos.id or False for purchase in purchase_obj.browse(cr, uid, data['ids']): vals = { 'origin': 'PO:%s' % str(purchase.name), 'picking_policy': 'direct', 'shop_id': shop_id, 'partner_id': partner_id, 'pricelist_id': default_pricelist, 'partner_invoice_id': partner_addr['invoice'], 'partner_order_id': partner_addr['contact'], 'partner_shipping_id': partner_addr['delivery'], 'order_policy': 'manual', 'date_order': now(), 'fiscal_position': fpos_id } new_id = sale_obj.create(cr, uid, vals) fpos = user.company_id.partner_id.property_account_position and user.company_id.partner_id.property_account_position.id or False for line in purchase.order_line: value = sale_line_obj.product_id_change( cr, uid, [], default_pricelist, line.product_id.id, qty=line.product_qty, partner_id=partner_id, fiscal_position=fpos)['value'] value['price_unit'] = line.price_unit value['product_id'] = line.product_id.id value['product_uos'] = value.get('product_uos') and value.get( 'product_uos')[0] or False value['product_uom_qty'] = line.product_qty value['order_id'] = new_id sale_line_obj.create(cr, uid, value) return {}
def testRegisterDocument(self): self.pos.registerDocument(documentClass=Invoice, number='some number', type='X', detail='blah blah blah', amount=100, actualDate=now(), otherParty=self.client) self.trans.save()
def __init__(self, number=0, recordTime=None, pos=None, customerAccount=None): self.number = number self.recordTime = recordTime or now() self.pointOfSale = pos self.customerAccount = customerAccount
def _opening(self, query, roomList, kind): day = now() if kind == IACLASS: day = DateTime(2005, 5, (1 + day.day_of_week) % 7 + 1, day.hour, day.minute) day = day + RelativeDateTime(minute=day.minute+2) d = self.db.db.runQuery(query, (day,)) d.addCallback(self._updateRooms, roomList) d.addCallback(self._openRooms, roomList, kind)
def get_phone_numbers(self, person, only_first_affiliation=True): """Return a list of the registered phone numbers for a given person. Only the defined source systems and contact types are searched for, and the person must have an active affiliation from a system before a number could be retrieved from that same system. Note that only the person affiliation with the highest priority is checked for phone numbers, as long as L{only_first_affiliation} is True. This is to separate the user types and avoid e.g. a student's phone getting changed and thus be able to get hold of the employee account for the same person. """ old_limit = now() - RelativeDateTime( days=cereconf.INDIVIDUATION_AFF_GRACE_PERIOD) pe_systems = [ int(af['source_system']) for af in person.list_affiliations(person_id=person.entity_id, include_deleted=True) if (af['deleted_date'] is None or af['deleted_date'] > old_limit) ] log.debug("Person has affiliations in the systems: %s" % pe_systems) phones = [] for systems in self._get_priorities(): sys_codes = [getattr(self.co, s) for s in systems] if not any(s in sys_codes for s in pe_systems): # person has no affiliation at this priority go to next priority continue for system, values in systems.iteritems(): types = [getattr(self.co, t) for t in values['types']] sys = getattr(self.co, system) if not types: # support empty lists, to be able to block e.g. employees # from the service continue for row in person.list_contact_info(entity_id=person.entity_id, contact_type=types, source_system=sys): phones.append({ 'number': row['contact_value'], 'system': sys, 'system_name': system, 'type': self.co.ContactInfo(row['contact_type']), }) log.debug( "Phones for person_id:%s from (%s): %s" % (person.entity_id, ','.join(s for s in systems), ','.join( '%s:%s:%s' % (p['system_name'], p['type'], p['number']) for p in phones))) if only_first_affiliation: return phones return phones
def testRegisterDocument(self): self.pos.registerDocument(documentClass=Invoice, number='some number', type='X', detail='blah blah blah', amount=100, actualDate=now(), otherParty=self.client ) self.trans.save()
def testMxDateTime(self): _now = now() dt2 = DateTime2(col1=_now, col2=_now) self.failUnless(isinstance(dt2.col1, col.DateTimeType)) self.failUnless(isinstance(dt2.col2, col.DateTimeType)) today_str = _now.strftime("%Y-%m-%d 00:00:00.00") now_str = _now.strftime("%Y-%m-%d %T.00") self.assertEqual(str(dt2.col1), today_str) self.assertEqual(str(dt2.col2), now_str)
def _restoringPA(self, query, roomList, kind): day = now() day1 = DateTime(2005, 5, (1 + day.day_of_week) % 7 + 1, day.hour, day.minute) day2 = DateTime(2005, 5, (1 + day.day_of_week) % 7 + 8, day.hour, day.minute) day1_start = day1 + RelativeDateTime(minute=day.minute+2) day2_start = day2 + RelativeDateTime(minute=day.minute+2) d = self.db.db.runQuery(query, (day1_start, day1, day2_start, day2)) d.addCallback(self._updateRooms, roomList) d.addCallback(self._openRooms, roomList, kind)
def testDateTime(self): _now = now() dt1 = DateTime1(col1=_now, col2=_now) self.failUnless(isinstance(dt1.col1, date)) self.failUnless(isinstance(dt1.col2, datetime)) today_str = _now.strftime("%Y-%m-%d") now_str = _now.strftime("%Y-%m-%d %T") self.assertEqual(str(dt1.col1), today_str) self.assertEqual(str(dt1.col2), now_str)
def _makeOrder(self, cr, uid, data, context): pool = pooler.get_pool(cr.dbname) purchase_obj = pool.get('purchase.order') sale_obj = pool.get('sale.order') shop_obj = pool.get('sale.shop') shop_check = shop_obj.search(cr, uid, []) if shop_check: shop_id = shop_check[0] else : raise osv.except_osv(_('Error !'), _('No Shop defined for this Company !\nPlease create Shop for this Company.')) partner_obj = pool.get('res.partner') sale_line_obj = pool.get('sale.order.line') new_ids = [] user = pool.get('res.users').browse(cr, uid, uid) partner_id = user.company_id.partner_id.id partner_addr = partner_obj.address_get(cr, uid, [partner_id], ['invoice', 'delivery', 'contact']) default_pricelist = partner_obj.browse(cr, uid, partner_id, context).property_product_pricelist.id fpos = partner_obj.browse(cr, uid, partner_id, context).property_account_position fpos_id = fpos and fpos.id or False for purchase in purchase_obj.browse(cr, uid, data['ids']): vals = { 'origin': 'PO:%s' % str(purchase.name), 'picking_policy': 'direct', 'shop_id': shop_id, 'partner_id': partner_id, 'pricelist_id': default_pricelist, 'partner_invoice_id': partner_addr['invoice'], 'partner_order_id': partner_addr['contact'], 'partner_shipping_id': partner_addr['delivery'], 'order_policy': 'manual', 'date_order': now(), 'fiscal_position': fpos_id } new_id = sale_obj.create(cr, uid, vals) fpos = user.company_id.partner_id.property_account_position and user.company_id.partner_id.property_account_position.id or False for line in purchase.order_line: value = sale_line_obj.product_id_change(cr, uid, [], default_pricelist, line.product_id.id, qty=line.product_qty, partner_id=partner_id, fiscal_position=fpos)['value'] value['price_unit'] = line.price_unit value['product_id'] = line.product_id.id value['product_uos'] = value.get('product_uos') and value.get('product_uos')[0] or False value['product_uom_qty'] = line.product_qty value['order_id'] = new_id sale_line_obj.create(cr, uid, value) return {}
def testClassCreate(self): if not self.supportAuto: return class AutoTest(SQLObject): _fromDatabase = True _idName = 'auto_id' _connection = connection() john = AutoTest(firstName='john', lastName='doe', age=10, created=now(), wannahavefun=False) jane = AutoTest(firstName='jane', lastName='doe', happy='N', created=now(), wannahavefun=True) self.failIf(john.wannahavefun) self.failUnless(jane.wannahavefun) del classregistry.registry(AutoTest._registry).classes['AutoTest']
def _saveStatus(self): date = now() status = self.server.wbClientStatus[self.avId].pickle() queryIdSubject = """select id from subject where name=%s""" query = """insert into offline_questions (fk_pupil, fk_subject, time_submit, status) values (%s, %s, '%s', %s) """ status = QuotedString(status) d = self.db.db.runQuery(queryIdSubject, (self.selectedSubject,)) d.addCallback(lambda idRes: self.db.db.runOperation(query, (self.avId, idRes[0][0], date, status))) return d
def ous_with_quarantines(ou_object, ou_quarantine, initial_account): from mx.DateTime import now ous = list() for e in _populator(ou_object): ou_object.find(e.get('entity_id')) ou_object.add_entity_quarantine(ou_quarantine, initial_account.entity_id, "Description", now()) ou_object.clear() ous.append(e) return ous
def test_dateTime(): setupClass(DateTime1) _now = now() dt1 = DateTime1(col1=_now, col2=_now) assert isinstance(dt1.col1, date) assert isinstance(dt1.col2, datetime) today_str = _now.strftime("%Y-%m-%d") now_str = _now.strftime("%Y-%m-%d %T") assert str(dt1.col1) == today_str assert str(dt1.col2) == now_str
def test_mxDateTime(): setupClass(DateTime2) _now = now() dt2 = DateTime2(col1=_now, col2=_now) assert isinstance(dt2.col1, col.DateTimeType) assert isinstance(dt2.col2, col.DateTimeType) today_str = _now.strftime("%Y-%m-%d 00:00:00.00") now_str = _now.strftime("%Y-%m-%d %T.00") assert str(dt2.col1) == today_str assert str(dt2.col2) == now_str
def set_start(self, cr, uid, ids, context={}): self.write(cr, uid, ids, {'state': 'ready'}) cron_id = self.browse(cr, uid, ids[0], {}).cron_id.id nextcall = (now() + DateTime.RelativeDateTime(seconds=30) ).strftime('%Y-%m-%d %H:%M:%S') self.pool.get('ir.cron').write(cr, uid, cron_id, { 'numbercall': 1, 'active': True, 'nextcall': nextcall }) return True
def actionScheduleAlarm(self, cmsg, dbcom): sms_dict = self.alarm.retrieveData(cmsg) activity = ACTIVE if sms_dict == NOTFOUND: self.log.LOG( LOG_ERROR, "sms.actionScheduleAlarm()", "TAGs are missing in the requisition to schedule an alarm. Aborting schedule." ) return "NOTFOUND" blow = self.shared.mountTime(sms_dict[DATA_BLOW]) if blow == INVALID: return "INVALID" elif blow < now() or blow == now(): activity = FAILED ret = dbcom.registerSMS(sms_dict[DATA_ORG], sms_dict[DATA_EXT + "0"], sms_dict[DATA_BLOW], sms_dict[DATA_OPER], sms_dict[DATA_MSG], activity) if ret == OK and activity == ACTIVE: alarm_thread = Thread(target=self.alarm.launch, args=(blow, )) alarm_thread.start() self.log.LOG(LOG_INFO, "sms", "New alarm thread has been started.") return "OK" elif ret == NOTFOUND: return "NOTFOUND" elif activity == FAILED: return "INVALID" else: return "ERROR"
def launch(self, blow): try: sleep_time = int(blow - now()) self.log.LOG(LOG_INFO, "alarm", "New alarm has been scheduled. Blow date/time in %s. It's take %d seconds from now." % (blow, sleep_time)) time.sleep(sleep_time) self.log.LOG(LOG_INFO, "alarm", "THE ALARM HAS EXPLODED!!!.") except: self.log.LOG(LOG_CRITICAL, "alarm.launch()", "The alarm thread has a problem and will be aborted.")
def main(inargs=None): parser = argparse.ArgumentParser( description="Generate an html formatted report of accounts with" " active quarantines") parser.add_argument( '-o', '--output', metavar='FILE', type=argparse.FileType('w'), default='-', help='Output file for report, defaults to stdout') parser.add_argument( '-e', '--encoding', dest='codec', default=DEFAULT_ENCODING, type=codec_type, help="Output file encoding, defaults to %(default)s") age_arg = parser.add_mutually_exclusive_group(required=True) age_arg.add_argument( '-s', '--start_date', metavar='DATE', dest='start_date', type=ISO.ParseDate, help='Report quarantines set by date (YYYY-MM-DD)') age_arg.add_argument( '-a', '--age', metavar='DAYS', dest='start_date', type=lambda x: now() + RelativeDateTime(days=-abs(int(x))), help='Report quarantines set by age (in days)') Cerebrum.logutils.options.install_subparser(parser) args = parser.parse_args(inargs) Cerebrum.logutils.autoconf('cronjob', args) db = Factory.get('Database')() logger.info('Start of script %s', parser.prog) logger.debug("args: %r", args) quarantines = list(get_quarantine_data(db, args.start_date)) write_html_report(args.output, args.codec, quarantines, args.start_date) args.output.flush() if args.output is not sys.stdout: args.output.close() logger.info('Report written to %s', args.output.name) logger.info('Done with script %s', parser.prog)
def write_html_report(stream, codec, matches): output = codec.streamwriter(stream) template_env = Environment(trim_blocks=True, lstrip_blocks=True) report = template_env.from_string(template) num_accounts = sum(len(v) for v in matches.values()) output.write( report.render({ 'encoding': codec.name, 'num_accounts': num_accounts, 'matches': matches, 'when': now().strftime('%Y-%m-%d %H:%M:%S'), })) output.write('\n')
def purgeOldResults(self): """removes old records in the results table""" tresholdDate = now() - ONE_HOUR query = 'DELETE FROM results WHERE record_ts < %(treshold)s' cursor = self._cnx.cursor() try: cursor.execute(query, {'treshold': tresholdDate}) except: traceback.print_exc() cursor.close() self._cnx.rollback() else: cursor.close() self._cnx.commit()