def is_str_or_unicode_test(): """ Utils.is_str_or_unicode accepts unicode, str, rejects others. """ assert Utils.is_str_or_unicode('Hello world!') assert Utils.is_str_or_unicode(u'Hello world!') assert not Utils.is_str_or_unicode(None) assert Utils.is_str_or_unicode(str(None)) assert Utils.is_str_or_unicode(unicode(None))
def is_str_test(): """ Utils.is_str accepts str, rejects unicode and others. """ assert Utils.is_str('Hello world!') assert not Utils.is_str(u'Hello world!') assert not Utils.is_str(None) assert Utils.is_str(str(None)) assert not Utils.is_str(unicode(None))
def _send_mail(mail_to, mail_from, subject, body, logger, mail_cc=None, debug_enabled=False): if debug_enabled: logger.debug("Sending mail to %s. Subject: %s", mail_to, subject) # logger.debug("Body: %s" % body) return True try: Utils.sendmail( toaddr=mail_to, fromaddr=mail_from, subject=subject, body=body, cc=mail_cc, debug=debug_enabled) except smtplib.SMTPRecipientsRefused as e: failed_recipients = e.recipients for mail, condition in failed_recipients.iteritems(): logger.exception("Failed when notifying %s (%s): %s", mail_to, mail, condition) return False except Exception as e: logger.error("Error when notifying %s: %s" % (mail_to, e)) return False return True
def _send_mail(mail_to, mail_from, subject, body, logger, mail_cc=None, debug_enabled=False): if debug_enabled: logger.debug("Sending mail to %s. Subject: %s", mail_to, subject) # logger.debug("Body: %s" % body) return True try: Utils.sendmail(toaddr=mail_to, fromaddr=mail_from, subject=subject, body=body, cc=mail_cc, debug=debug_enabled) except smtplib.SMTPRecipientsRefused as e: failed_recipients = e.recipients for mail, condition in failed_recipients.iteritems(): logger.exception("Failed when notifying %s (%s): %s", mail_to, mail, condition) return False except Exception as e: logger.error("Error when notifying %s: %s" % (mail_to, e)) return False return True
def serve(config, num_workers, enable_listener, enable_collectors): channels = [TARGET_SYSTEM, ] exchanged = utils.ProcessHandler(manager=Manager) event_queue = exchanged.mgr.queue() queues = [] Handler = getattr(Utils.dyn_import(config.handler.handler_mod), config.handler.handler_class) for i in range(0, num_workers): exchanged.add_process( Handler( daemon=True, queue=event_queue, log_queue=exchanged.log_queue, running=exchanged.run_trigger, config=config, mock=config.client.mock)) if (config.deferred_handler.handler_mod and config.deferred_handler.handler_class): group_event_queue = exchanged.mgr.queue() hand = getattr(Utils.dyn_import(config.deferred_handler.handler_mod), config.deferred_handler.handler_class) exchanged.add_process( hand( daemon=True, queue=group_event_queue, log_queue=exchanged.log_queue, running=exchanged.run_trigger, config=config, mock=config.client.mock)) queues.append(group_event_queue) if enable_listener: exchanged.add_process( evhandlers.EventLogListener( daemon=True, queue=event_queue, fan_out_queues=queues, log_queue=exchanged.log_queue, running=exchanged.run_trigger, channels=channels)) if enable_collectors: for chan in channels: exchanged.add_process( evhandlers.EventLogCollector( daemon=True, queue=event_queue, fan_out_queues=queues, log_queue=exchanged.log_queue, running=exchanged.run_trigger, channel=chan, config=config.eventcollector)) exchanged.serve()
def dyn_import_test(): """ Utils.dyn_import puts modules in sys.modules. """ for name in ("Cerebrum.Utils", "Cerebrum.modules", "Cerebrum.modules.no"): Utils.dyn_import(name) assert name in sys.modules x = "Cerebrum.modules.no" assert Utils.dyn_import(x) is sys.modules[x]
def serve(logger, config, num_workers, enable_listener, enable_collectors): logger.info('Starting {!r} event utils'.format(TARGET_SYSTEM)) channels = [TARGET_SYSTEM, ] exchanged = utils.ProcessHandler(logger=logger, manager=Manager) event_queue = exchanged.mgr.queue() queues = [] Handler = getattr(Utils.dyn_import(config.handler.handler_mod), config.handler.handler_class) for i in range(0, num_workers): exchanged.add_process( Handler, queue=event_queue, log_queue=exchanged.log_queue, running=exchanged.run_trigger, config=config, mock=config.client.mock) if (config.deferred_handler.handler_mod and config.deferred_handler.handler_class): group_event_queue = exchanged.mgr.queue() hand = getattr(Utils.dyn_import(config.deferred_handler.handler_mod), config.deferred_handler.handler_class) exchanged.add_process( hand, queue=group_event_queue, log_queue=exchanged.log_queue, running=exchanged.run_trigger, config=config, mock=config.client.mock) queues.append(group_event_queue) if enable_listener: exchanged.add_process( evhandlers.DBEventListener, queue=event_queue, fan_out_queues=queues, log_queue=exchanged.log_queue, running=exchanged.run_trigger, channels=channels) if enable_collectors: for chan in channels: exchanged.add_process( evhandlers.DBEventCollector, queue=event_queue, fan_out_queues=queues, log_queue=exchanged.log_queue, running=exchanged.run_trigger, channel=chan, config=config.eventcollector) exchanged.serve()
def serve(config, num_workers, enable_listener, enable_collectors): channels = [ TARGET_SYSTEM, ] exchanged = utils.ProcessHandler(manager=Manager) event_queue = exchanged.mgr.queue(maxsize=1000) queues = [] Handler = getattr(Utils.dyn_import(config.handler.handler_mod), config.handler.handler_class) for i in range(0, num_workers): exchanged.add_process( Handler(daemon=True, queue=event_queue, log_channel=exchanged.log_channel, running=exchanged.run_trigger, config=config, mock=config.client.mock)) if (config.deferred_handler.handler_mod and config.deferred_handler.handler_class): group_event_queue = exchanged.mgr.queue() hand = getattr(Utils.dyn_import(config.deferred_handler.handler_mod), config.deferred_handler.handler_class) exchanged.add_process( hand(daemon=True, queue=group_event_queue, log_channel=exchanged.log_channel, running=exchanged.run_trigger, config=config, mock=config.client.mock)) queues.append(group_event_queue) if enable_listener: exchanged.add_process( evhandlers.EventLogListener(daemon=True, queue=event_queue, fan_out_queues=queues, log_channel=exchanged.log_channel, running=exchanged.run_trigger, channels=channels)) if enable_collectors: for chan in channels: exchanged.add_process( evhandlers.EventLogCollector(daemon=True, queue=event_queue, fan_out_queues=queues, log_channel=exchanged.log_channel, running=exchanged.run_trigger, channel=chan, config=config.eventcollector)) exchanged.serve()
def update_mappings(progname, config): events = getattr(Utils.dyn_import(config.handler.handler_mod), config.handler.handler_class).event_map.events if (config.deferred_handler.handler_mod and config.deferred_handler.handler_class): events.extend( getattr(Utils.dyn_import(config.deferred_handler.handler_mod), config.deferred_handler.handler_class).event_map.events) utils.update_system_mappings(progname, TARGET_SYSTEM, events)
def test_messages_fetch_exists(): """ Utils.Messages fetch exising word/fallback word. """ # Word in primary and fallback assert Utils.Messages(text={'foo': {'no': 'bar_no', 'en': 'bar_en'}}, lang='no', fallback='en')['foo'] == 'bar_no' # Word only in primary assert Utils.Messages(text={'foo': {'no': 'bar_no', }}, lang='no', fallback='en')['foo'] == 'bar_no' # Word only in fallback assert Utils.Messages(text={'foo': {'en': 'bar_en', }}, lang='no', fallback='en')['foo'] == 'bar_en'
def serve(logger, config, num_workers, enable_listener, enable_collectors): logger.info('Starting {!r} event utils'.format(TARGET_SYSTEM)) channels = [ TARGET_SYSTEM, ] exchanged = utils.ProcessHandler(logger=logger, manager=Manager) event_queue = exchanged.mgr.queue() queues = [] Handler = getattr(Utils.dyn_import(config.handler.handler_mod), config.handler.handler_class) for i in range(0, num_workers): exchanged.add_process(Handler, queue=event_queue, log_queue=exchanged.log_queue, running=exchanged.run_trigger, config=config, mock=config.client.mock) if (config.deferred_handler.handler_mod and config.deferred_handler.handler_class): group_event_queue = exchanged.mgr.queue() hand = getattr(Utils.dyn_import(config.deferred_handler.handler_mod), config.deferred_handler.handler_class) exchanged.add_process(hand, queue=group_event_queue, log_queue=exchanged.log_queue, running=exchanged.run_trigger, config=config, mock=config.client.mock) queues.append(group_event_queue) if enable_listener: exchanged.add_process(evhandlers.DBEventListener, queue=event_queue, fan_out_queues=queues, log_queue=exchanged.log_queue, running=exchanged.run_trigger, channels=channels) if enable_collectors: for chan in channels: exchanged.add_process(evhandlers.DBEventCollector, queue=event_queue, fan_out_queues=queues, log_queue=exchanged.log_queue, running=exchanged.run_trigger, channel=chan, config=config.eventcollector) exchanged.serve()
def email_report(to_address, from_address, report): """Send the report by email.""" import smtplib try: Utils.sendmail(to_address, from_address, 'ePhorte role report', report, cc=None) except smtplib.SMTPRecipientsRefused, e: failed_recipients = e.recipients logger.info("Failed to notify <%d> users", len(failed_recipients)) for email, condition in failed_recipients.iteritems(): logger.info("Failed to notify: %s", condition)
def main(): try: opts, args = getopt.getopt(sys.argv[1:], 'hsdf:', ['help', 'summary', 'detail', 'min=', 'max=', 'mail-to=', 'mail-from=', 'file=']) except getopt.GetoptError: usage("Argument error!", 1) detail = False minimum = 1 maxmum = None report_type = 'screen' outputstream = sys.stdout mail_to = None mail_from = None persons = 'person_id' for opt, val in opts: if opt in ('-h', '--help'): usage() elif opt in ('-s', '--summary'): pass elif opt in ('-d', '--detail'): detail = True elif opt in ('--min'): minimum = int(val) if minimum < 0: usage("Error: the value of parameter --min should be at least 1", 2) elif opt in ('--max'): maxmum = int(val) if maxmum < 0: usage("Error: the value of parameter --max should be at least 1", 3) elif opt in ('-f', '--file'): report_type = 'file' outputstream = open(val, 'w') outputstream.write("==== Results of checking active accounts in Cerebrum ====\n") outputstream.write("person_id\tNr_accounts\taccount_names\n") elif opt in ('--mail-to'): report_type = 'mail' mail_to = val elif opt in ('--mail-from'): mail_from = val else: usage("Error: Unknown parameter '%s'" % opt, 4) persons += checkACaccount(minimum,maxmum,detail,report_type,outputstream) if mail_to: count = persons.count("\n") subject = "Warning: these following %s persons have more than %s active accounts." % (count,minimum) Utils.sendmail(mail_to, mail_from, subject, persons)
def update_mappings(progname, config): events = getattr(Utils.dyn_import(config.handler.handler_mod), config.handler.handler_class).event_map.events if (config.deferred_handler.handler_mod and config.deferred_handler.handler_class): events.extend(getattr(Utils.dyn_import( config.deferred_handler.handler_mod), config.deferred_handler.handler_class).event_map.events) utils.update_system_mappings( progname, TARGET_SYSTEM, events)
def wrapped_separate_entries_test(): """ Utils.{keep,reject}_entries() filters as expected. """ rows = [MetaRow(('a', 'b', 'c'))(row) for row in ((1, 2, 3), (1, 2, 4), (1, 3, 4))] predicate = (('a', 1), ('b', 2), ('c', 3)) # Matches only the first row # keep_entries assert rows[0] in Utils.keep_entries(rows, *predicate) assert rows[1] not in Utils.keep_entries(rows, *predicate) # reject_entries assert rows[0] not in Utils.reject_entries(rows, *predicate) assert rows[1] in Utils.reject_entries(rows, *predicate)
def wrapper(cls): # Make the API exceptions available for name in errors.API_EXCEPTION_NAMES: base = getattr(Utils.this_module(), name) setattr(cls, name, base) # The type constructors provided by the driver module should # be accessible as (static) methods of the database's # connection objects. for ctor_name in API_TYPE_CTOR_NAMES: if hasattr(cls, ctor_name): # There already is an implementation of this # particular ctor in this class, probably for a good # reason (e.g. the driver module doesn't supply this # type ctor); skip to next ctor. continue f = getattr(module, ctor_name) setattr(cls, ctor_name, staticmethod(f)) # Likewise we copy the driver-specific type objects to the # connection object's class. for type_name in API_TYPE_NAMES: if hasattr(cls, type_name): # Already present as attribute; skip. continue type_obj = getattr(module, type_name) setattr(cls, type_name, type_obj) # make the real db module available as db-mod cls._db_mod = module return cls
def delete_user(uname, old_host, old_home, operator, mail_server): account = get_account(name=uname) generation = account.get_trait(const.trait_account_generation) if generation: generation = generation['numval'] + 1 else: generation = 1 args = [ SUDO_CMD, cereconf.RMUSER_SCRIPT, '--username', account.account_name, '--deleted-by', operator, '--homedir', old_home, '--generation', str(generation) ] if DEBUG: args.append('--debug') cmd = SSH_CEREBELLUM + [" ".join(args), ] if Utils.spawn_and_log_output(cmd, connect_to=[old_host]) == EXIT_SUCCESS: account.populate_trait(const.trait_account_generation, numval=generation) account.write_db() return True return False
def wrapper(self, *rest, **kw_args): # This gives us a wrapped method that ignores the suitable # exceptions ... func = Utils.exception_wrapper(functor, exc_list, return_on_exc, self.logger) # ... and here we call the wrapped method. return func(self, *rest, **kw_args)
def proc_sympa_remove(request): """Execute the request for removing a sympa mailing list. @type request: ?? @param request: A dict-like object containing all the parameters for sympa list removal. """ try: state = pickle.loads(str(request["state_data"])) except: logger.exception("Corrupt request state for sympa request %s: %s", request["request_id"], request["state_data"]) return True try: listname = state["listname"] host = state["run_host"] except KeyError: logger.error("No listname/runhost specified for request %s", request["request_id"]) return True cmd = [cereconf.SYMPA_SCRIPT, host, 'rmlist', listname] return Utils.spawn_and_log_output(cmd) == EXIT_SUCCESS
def connect_cyrus(host=None, username=None, as_admin=True): """Connect to user's Cyrus and return IMAP object. Authentication is always as CYRUS_ADMIN, but if as_admin is True (default), authorise as admin user, not username. It is assumed the Cyrus server accepts SASL PLAIN and SSL. """ def auth_plain_cb(response): cyrus_pw = Utils.read_password(cereconf.CYRUS_ADMIN, cereconf.CYRUS_HOST) return "%s\0%s\0%s" % (username or cereconf.CYRUS_ADMIN, cereconf.CYRUS_ADMIN, cyrus_pw) if as_admin: username = cereconf.CYRUS_ADMIN try: imapconn = Utils.CerebrumIMAP4_SSL(host=host.name, ssl_version=ssl.PROTOCOL_TLSv1) except socket.gaierror as e: raise CyrusConnectError("%s@%s: %s" % (username, host.name, e)) try: imapconn.authenticate('PLAIN', auth_plain_cb) except (imapconn.error, socket.error) as e: raise CyrusConnectError("%s@%s: %s" % (username, host.name, e)) return imapconn
def send_mail(mailto, mail_template, substitute, debug=False): ret = Utils.mail_template(mailto, mail_template, substitute=substitute, debug=debug) if ret: logger.debug("Not sending mail:\n%s" % ret) else: logger.debug("Sending mail to: %s" % mailto)
def generate_mail_notification(quar_info, event_info, debug=False): if event_info['change_type'] == 'quarantine:mod': event_type = 'altered' elif event_info['change_type'] == 'quarantine:add': event_type = 'added' elif event_info['change_type'] == 'quarantine:del': event_type = 'deleted' else: raise Errors.CerebrumError('Unknown quarantine action: %s' % event_info['change_type']) subject = 'Quarantine %s %s on account %s' % \ (quar_info['name'], event_type, event_info['subject']) body = ('Quarantine %s %s on %s.\n\n' 'When: %s\n' 'By: %s\n' 'Change-ID: %s') % (quar_info['name'], event_type, event_info['subject'], event_info['time_stamp'], event_info['change_by'], event_info['change_id']) return Utils.sendmail(quar_info['mail_to'], quar_info['mail_from'], subject, body, debug=debug)
def test_argument_to_sql_droptables(): """ Utils.argument_to_sql with Bobby Tables. """ binds = {} name = "Robert'; DROP TABLE Students;--" sql = Utils.argument_to_sql(name, 'name', binds) assert sql == '(name = :name)' assert binds == {'name': name} # This function should not sanitize. That's
def fullsync(user_class_ref, url, user_spread=None, group_spread=None, dryrun=False, delete_objects=False, ad_ldap=None): # Get module and glass name, and use getattr to get a class object. modname, classname = user_class_ref.split("/") sync_class = getattr(Utils.dyn_import(modname), classname) # Group or user sync? sync_type = 'user' spread = user_spread if group_spread: sync_type = 'group' spread = group_spread # Different logger for different adsyncs logger_name = "ad_" + sync_type + "sync_" + str(spread).split('@ad_')[1] logger = Utils.Factory.get_logger(logger_name) # Catch protocolError to avoid that url containing password is # written to log try: # instantiate sync_class and call full_sync sync_class(db, co, logger, url=url, ad_ldap=ad_ldap).full_sync(sync_type, delete_objects, spread, dryrun, user_spread) except xmlrpclib.ProtocolError, xpe: logger.critical("Error connecting to AD service. Giving up!: %s %s" % (xpe.errcode, xpe.errmsg))
def send_mail(mail_to, mail_from, subject, body, mail_cc=None): """Function for sending mail to users. Will respect dryrun, as that is given and handled by Utils.sendmail, which is then not sending the e-mail. @type mail_to: string @param mail_to: The recipient of the Email. @type mail_from: string @param mail_from: The senders address. @type subject: string @param subject: The messages subject. @type body: string @param body: The message body. @type mail_cc: string @param mail_cc: An optional address that the mail will be CCed to. @rtype: bool @return: A boolean that tells if the email was sent sucessfully or not. """ try: ret = Utils.sendmail(mail_to, mail_from, subject, body, cc=mail_cc, debug=dryrun) if debug_verbose: print "---- Mail: ---- \n" + ret except smtplib.SMTPRecipientsRefused, e: failed_recipients = e.recipients logger.info("Failed to notify <%d> users", len(failed_recipients)) for _, condition in failed_recipients.iteritems(): logger.info("Failed to notify: %s", condition)
def test_argument_to_sql_sequence(): """ Utils.argument_to_sql with sequence. """ sequence = [1, 2, 3] for seq_type in (tuple, set, list): binds = {} sql = Utils.argument_to_sql(seq_type(sequence), 'foo', binds) assert sql == '(foo IN (:foo0, :foo1, :foo2))' assert binds == {'foo0': 1, 'foo1': 2, 'foo2': 3}
def test_format_exception_context1(): """ Utils.format_exception_context with valid arguments. """ try: raise ValueError("ioshivfq") except ValueError: message = Utils.format_exception_context(*sys.exc_info()) assert re.search("Exception <type 'exceptions.ValueError'> occured " + "\(in context.*: ioshivfq", message)
def test_notset_single(): """ Utils.NotSet comparison behaviour. """ ns1 = Utils.NotSet ns2 = Utils.NotSet ns3 = Utils._NotSet() assert ns1 is ns2 assert ns1 is ns3 assert ns1 == ns2 == ns3 assert not bool(ns1)
def main(): try: opts, args = getopt.getopt( sys.argv[1:], "hds:c:t:f:", ["help", "dryrun", "start-date=", "change-program=", "mail-to=", "mail-from="] ) except getopt.GetoptError: usage(1) change_program = None mail_to = None mail_from = None dryrun = False sdate = DateTime.now() - 1 for opt, val in opts: if opt in ("-h", "--help"): usage() if opt in ("-d", "--dryrun"): dryrun = True elif opt in ("-s", "--start-date"): try: sdate = DateTime.ISO.ParseDate(val) except ValueError: logger.error("Incorrect date format") usage(exitcode=2) elif opt in ("-c", "--change-program"): change_program = val elif opt in ("-t", "--mail-to"): mail_to = val elif opt in ("-f", "--mail-from"): mail_from = val new_persons = get_new_persons(sdate, change_program=change_program) if new_persons: msg = report_new_persons(new_persons) if change_program: subject = "New persons from %s since %s" % (change_program, sdate.date) else: subject = "New persons since %s" % sdate.date if mail_to and not dryrun: Utils.sendmail(mail_to, mail_from, subject, msg) else: print msg
def spool_job(self, filename, type, printer, skip_lpr=False, logfile=None, lpr_user='******', def_lpr_cmd=None): """Spools the job. The spool command is executed in the directory where filename resides.""" if logfile is None: logfile = Utils.make_temp_file(only_name=True) self.logfile = logfile old_dir = os.getcwd() if os.path.dirname(filename): os.chdir(os.path.dirname(filename)) base_filename = filename[:filename.rindex('.')] try: if cereconf.PRINT_DVIPS_CMD: format_sys_cmd = "%s -f < %s.dvi > %s.ps 2>> %s" % (cereconf.PRINT_DVIPS_CMD, base_filename, base_filename, logfile) base_filename += ".ps" elif cereconf.PRINT_DVIPDF_CMD: format_sys_cmd = "%s %s.dvi %s.pdf 2>> %s" % (cereconf.PRINT_DVIPDF_CMD, base_filename, base_filename, logfile) base_filename += ".pdf" else: raise IOError("Error spooling job, see %s for details" % logfile) if type == 'tex': status = (os.system("%s --interaction nonstopmode %s >> %s 2>&1" % ( cereconf.PRINT_LATEX_CMD, filename, logfile)) or os.system("%s" % (format_sys_cmd))) if status: raise IOError("Error spooling job, see %s for details" % logfile) if not skip_lpr: if printer is not None and re.search(r'[^a-z0-9\-_]', printer): raise IOError("Bad printer name") if def_lpr_cmd: lpr_cmd = string.Template(def_lpr_cmd) else: lpr_cmd = string.Template(cereconf.PRINT_LPR_CMD) # Assemble parameters that might be of use for further # handling of the job. Contents of def_lpr_cmd/ # cereconf.PRINT_LPR_CMD determine what is actually used and for # what purpose lpr_params = {'filename': base_filename, 'uname': lpr_user, 'printer': printer, 'hostname': os.uname()[1]} status = os.system("%s >> %s 2>&1" % (lpr_cmd.substitute(lpr_params), logfile)) if status: raise IOError("Error spooling job, see %s for details (tail: %s)" % (logfile, self._tail(logfile, num=1))) finally: os.chdir(old_dir)
def archive_cyrus_data(uname, mail_server, generation): args = [ SUDO_CMD, cereconf.ARCHIVE_MAIL_SCRIPT, '--server', mail_server, '--user', uname, '--gen', str(generation) ] if DEBUG: args.append('--debug') cmd = SSH_CEREBELLUM + [" ".join(args), ] return ( Utils.spawn_and_log_output(cmd, connect_to=[mail_server]) == EXIT_SUCCESS)
def next(self): """Return next object constructed from a suitable XML element Reads the 'next' element and returns an object constructed out of it, if at all possible. The object construction is dispatched to subclasses (via next_object). If the object construction fails, next_object should return None. This method would consume subsequent XML elements/subtrees until a suitable object can be constructed or we run out of XML elements. In the latter case StopIteration is thrown (as per iterator protocol). IVR 2007-12-25 TBD: releasing the memory occupied by subtrees is quite helpful, but very ugly in this code. This should be implemented more elegantly. """ import sys while 1: try: # Fetch the next XML subtree... element = self._xmliter.next() # ... and dispatch to subclass to create an object obj = self.next_object(element) # free the memory in the ElementTree framework. element.clear() # IVR 2007-12-28 TBD: Do we want some generic 'no object # created' error message here? The problem with such a message # is that it is difficult to ignore a separate generic error # line in the logs. Typically, when obj is None, # next_element() would have made (or at least, it should have) # some sort of error message which explains far better what # went wrong, thus making a generic message here somewhat # moot. if obj is not None: return obj except StopIteration: raise except: # If *any* sort of exception occurs, log this, and continue # with the parsing. We cannot afford one defective entry to # break down the entire data import run. if self.logger: self.logger.warn( "%s occurred while processing XML element %s. " "Skipping it.", Utils.format_exception_context(*sys.exc_info()), element.tag) element.clear()
def encrypt(self, plaintext, salt=None, binary=False): if not isinstance(plaintext, six.text_type) and not binary: raise ValueError("plaintext cannot be bytestring and not binary") if isinstance(plaintext, six.text_type): plaintext = plaintext.encode('utf-8') if salt is None: saltchars = string.ascii_letters + string.digits + "./" salt = bytes("$1$" + Utils.random_string(8, saltchars)) elif isinstance(salt, six.text_type): salt = bytes(salt) return crypt.crypt(plaintext, salt).decode()
def separate_entries_test(): """ Utils.separate_entries() filters as expected. """ rows = [MetaRow(('a', 'b', 'c'))(row) for row in ((1, 2, 3), (1, 2, 4), (1, 3, 4))] result = Utils.separate_entries(rows, ('a', 1), ('b', 2), ('c', 3)) # Should keep rows[0], reject rows[1], rows[2] assert rows[0] in result[0] and rows[1] in result[1] test_cases = [{'predicate': (('a', 1), ('b', 2), ('c', 3)), 'keep': 1, 'reject': 2}, # Matches 1st row {'predicate': (('a', 1),), 'keep': 3, 'reject': 0}, # Matches no rows {'predicate': (('a', 2), ), 'keep': 0, 'reject': 3}, # Matches all rows {'predicate': (('a', 1), ('c', 4)), 'keep': 2, 'reject': 1}] # Matches 1st, 3rd row for test_case in test_cases: result = Utils.separate_entries(rows, *(test_case['predicate'])) assert (len(result[0]) == test_case['keep'] and len(result[1]) == test_case['reject'])
def move_user(uname, uid, gid, old_host, old_disk, new_host, new_disk, operator): args = [SUDO_CMD, cereconf.MVUSER_SCRIPT, '--user', uname, '--uid', str(uid), '--gid', str(gid), '--old-disk', old_disk, '--new-disk', new_disk, '--operator', operator] if DEBUG: args.append('--debug') cmd = SSH_CEREBELLUM + [" ".join(args), ] return (Utils.spawn_and_log_output(cmd, connect_to=[old_host, new_host]) == EXIT_SUCCESS)
def proc_sympa_create(request): """Execute the request for creating a sympa mailing list. :type request: db_row :param request: A dict-like object describing the sympa list creation request. """ try: listname = get_address(request["entity_id"]) except Errors.NotFoundError: logger.info("Sympa list address id:%s is deleted! No need to create", request["entity_id"]) return True try: state = json.loads(request["state_data"]) except ValueError: state = None # Remove this when there's no chance of pickled data if state is None: try: state = pickle.loads(request["state_data"]) except Exception: pass if state is None: logger.error("Cannot parse request state for sympa list=%s: %s", listname, request["state_data"]) return True try: host = state["runhost"] profile = state["profile"] description = state["description"] admins = state["admins"] admins = ",".join(admins) except KeyError: logger.error("No host/profile/description specified for sympa list %s", listname) return True # 2008-08-01 IVR FIXME: Safe quote everything fished out from state. cmd = [ cereconf.SYMPA_SCRIPT, host, 'newlist', listname, admins, profile, description ] return Utils.spawn_and_log_output(cmd) == EXIT_SUCCESS
def clear_state(self, state_types=None): """ Remove session state data. Session state data mainly constists of cached passwords for the misc_list_passwords command. """ sql = """DELETE FROM [:table schema=cerebrum name=bofhd_session_state] WHERE session_id=:session_id""" binds = {'session_id': self.get_session_id()} if state_types: sql += " AND " + Utils.argument_to_sql(state_types, 'state_type', binds, str) self._db.execute(sql, binds) self._remove_old_sessions()
def clear_state(self, state_types=None): """ Remove session state data. Session state data mainly constists of cached passwords for the misc_list_passwords command. """ sql = """DELETE FROM [:table schema=cerebrum name=bofhd_session_state] WHERE session_id=:session_id""" binds = {"session_id": self.get_session_id()} if state_types: sql += " AND " + Utils.argument_to_sql(state_types, "state_type", binds, str) self._db.execute(sql, binds) self._remove_old_sessions()
def proc_sympa_create(request): """Execute the request for creating a sympa mailing list. :type request: db_row :param request: A dict-like object describing the sympa list creation request. """ try: listname = get_address(request["entity_id"]) except Errors.NotFoundError: logger.info("Sympa list address id:%s is deleted! No need to create", request["entity_id"]) return True try: state = json.loads(request["state_data"]) except ValueError: state = None # Remove this when there's no chance of pickled data if state is None: try: state = pickle.loads(request["state_data"]) except Exception: pass if state is None: logger.error("Cannot parse request state for sympa list=%s: %s", listname, request["state_data"]) return True try: host = state["runhost"] profile = state["profile"] description = state["description"] admins = state["admins"] admins = ",".join(admins) except KeyError: logger.error("No host/profile/description specified for sympa list %s", listname) return True # 2008-08-01 IVR FIXME: Safe quote everything fished out from state. cmd = [cereconf.SYMPA_SCRIPT, host, 'newlist', listname, admins, profile, description] return Utils.spawn_and_log_output(cmd) == EXIT_SUCCESS
def _make_password_document(self, filename, account, password, tpl): """ Make the password document to print. :param str filename: Basename of the document. :param Cerebrum.Account account: The account to generate a password document for. :param str password: The new password for the account. :param dict tpl: The template to use (output from __list_password_print_options). :return str: The full path to the generated document. """ self.logger.debug("make_password_document: Selected template %r", tpl) th = TemplateHandler(tpl.get('lang'), tpl.get('type'), tpl.get('fmt')) # TODO: We should use a <prefix>/var/cache/ or <prefix>/tmp/ dir for # this, NOT a logging dir. Also, we should consider the read access to # these files. tmp_dir = Utils.make_temp_dir(dir=cereconf.JOB_RUNNER_LOG_DIR, prefix="bofh_spool") self.logger.debug("make_password_letter: temp dir=%r template=%r", tmp_dir, filename) output_file = os.path.join(tmp_dir, filename) mapping = self._get_mappings(account, tpl) mapping.update({ 'uname': account.account_name, 'password': password, 'account_id': account.entity_id, 'lopenr': '' }) # Barcode if 'barcode' in mapping: mapping['barcode'] = os.path.join(tmp_dir, mapping['barcode']) try: th.make_barcode(account.entity_id, mapping['barcode']) except IOError, msg: self.logger.error( "make_password_letter: unable to make barcode (%s)", msg) raise CerebrumError(msg)
def _make_password_document(self, filename, account, password, tpl): """ Make the password document to print. :param str filename: Basename of the document. :param Cerebrum.Account account: The account to generate a password document for. :param str password: The new password for the account. :param dict tpl: The template to use (output from __list_password_print_options). :return str: The full path to the generated document. """ self.logger.debug("make_password_document: Selected template %r", tpl) th = TemplateHandler(tpl.get('lang'), tpl.get('type'), tpl.get('fmt')) # TODO: We should use a <prefix>/var/cache/ or <prefix>/tmp/ dir for # this, NOT a logging dir. Also, we should consider the read access to # these files. tmp_dir = Utils.make_temp_dir(dir=cereconf.JOB_RUNNER_LOG_DIR, prefix="bofh_spool") self.logger.debug( "make_password_letter: temp dir=%r template=%r", tmp_dir, filename) output_file = os.path.join(tmp_dir, filename) mapping = self._get_mappings(account, tpl) mapping.update({ 'uname': account.account_name, 'password': password, 'account_id': account.entity_id, 'lopenr': ''}) # Barcode if 'barcode' in mapping: mapping['barcode'] = os.path.join(tmp_dir, mapping['barcode']) try: th.make_barcode(account.entity_id, mapping['barcode']) except IOError, msg: self.logger.error( "make_password_letter: unable to make barcode (%s)", msg) raise CerebrumError(msg)
def test_exception_wrapper_behaviour(): """ Utils.exception_wrapper with valid arguments. """ # Ignoring all exceptions with defaults always yields None assert Utils.exception_wrapper(noop, None)() is None # Ignoring the exception raised with defaults always yields None assert Utils.exception_wrapper(raise1, ValueError)() is None # Exceptions can be given as tuples ... assert Utils.exception_wrapper(raise1, (ValueError,))() is None # ... lists assert Utils.exception_wrapper(raise1, [ValueError, ])() is None # ... or sets without affecting the result assert Utils.exception_wrapper(raise1, set((ValueError,)))() is None # Exception not matching the spec are not caught nose.tools.assert_raises(ValueError, Utils.exception_wrapper(raise1, AttributeError)) # Return value with no exceptions is not altered assert Utils.exception_wrapper(noop, None, '')() is None # Return value with exceptions matches the arg assert Utils.exception_wrapper(raise1, ValueError, '')() == ''