def save(): schedule_type = num.safe_int(request.form.get('scheduletype')) try: models.save_schedule_manually({ 'id': request.form.get('oid'), 'type': schedule_type, 'data': get_data(schedule_type), 'starttime': get_time(schedule_type, True), 'endtime': get_time(schedule_type, False), 'func': request.form.get('func', None), 'module': request.form.get('module', None), 'maxinstance': num.safe_int(request.form.get('maxinstance', 5), 5), 'enabled': num.safe_int(request.form.get('enabled', None)), 'args': request.form.get('args', None), 'sourceid': request.form.get('sourceid', None) }) return 'success' except: return logger.error_traceback()
def save_task_schedule(o): if strings.is_blank(o.get('id', None)): raise ValueError('Field "id" not in this object: ' + str(o)) if o['enabled']: # prepare args & kwargs args = None kwargs = None try: args_kwargs = json.loads(o['args']) args = args_kwargs['args'] kwargs = args_kwargs['kwargs'] except: pass # add if o['type'] == 1: # date task.add_date_job(o['func'], o['id'], TASK_DATABASE, args=args, kwargs=kwargs, run_date=time.string_to_date( o['data'], '%Y-%m-%d %H:%M:%S'), timezone='utc') elif o['type'] == 2: # interval interval = json.loads(o['data']) task.add_interval_job(o['func'], o['id'], TASK_DATABASE, args=args, kwargs=kwargs, weeks=num.safe_int(interval['weeks']), days=num.safe_int(interval['days']), hours=num.safe_int(interval['hours']), minutes=num.safe_int(interval['minutes']), seconds=num.safe_int(interval['seconds']), start_date=o['starttime'], end_date=o['endtime'], timezone='utc') elif o['type'] == 3: # cron cron = json.loads(o['data']) task.add_cron_job(o['func'], o['id'], TASK_DATABASE, args=args, kwargs=kwargs, year=cron['year'], month=cron['month'], day=cron['day'], day_of_week=cron['day_of_week'], hour=cron['hour'], minute=cron['minute'], second=cron['second'], start_date=o['starttime'], end_date=o['endtime'], timezone='utc') else: try: task.remove_job(o['id'], TASK_DATABASE) except JobLookupError: pass
def __init__(self, item): import six assert isinstance(item, six.string_types) item_arr = item.split() self.permission = item_arr[0] self.hard_link_count = num.safe_int(item_arr[1]) self.owner = item_arr[2] self.group = item_arr[3] self.size = num.safe_int(item_arr[4]) # process date time from datetime import datetime if " ".join(item_arr[5:8]).index(":") > 0: # this year self.date = datetime.strptime( " ".join(item_arr[5:8]) + " " + str(datetime.now().year), "%b %d %H:%M %Y") else: # not this year self.date = datetime.strptime(" ".join(item_arr[5:8]), "%b %d %Y") temp = item for field in item_arr[:7]: temp = temp.lstrip(field) temp = temp.lstrip() temp = temp.lstrip(item_arr[7])[1:] self.filename = temp # set helper attributes self.is_dir = self.permission[0] == "d" self.is_file = self.permission[0] == "-"
def initialize(): import os import sys import logging.handlers from collections import Iterable from raphael.utils import setting, num, strings conf = setting.conf.get('logging') _logger = logging.getLogger() # stream logger config_stdout = conf.get('stdout', {}) if config_stdout.get('enable'): _stream_handler = logging.StreamHandler(sys.stdout) _stream_handler.setFormatter( logging.Formatter( strings.get_non_empty_str(config_stdout, 'format', DEFAULT_LOGGING_FMT))) _stream_handler.setLevel( num.safe_int(config_stdout.get('level'), DEFAULT_LOGGIN_LEVEL)) _logger.addHandler(_stream_handler) # file logger config_file = conf.get('file', {}) if config_file.get('enable'): path = strings.get_non_empty_str( config_file, 'path', os.path.join( os.path.dirname(__file__), '..', setting.conf.get('system').get('project_name') + ".log")) config_rotating = config_file.get('rotating', {}) if config_rotating.get('enable'): _file_handler = logging.handlers.TimedRotatingFileHandler( path, when=config_rotating.get('when', 'H'), backupCount=config_rotating.get('backup_count', 0), encoding='utf-8') else: _file_handler = logging.handlers.RotatingFileHandler( path, encoding='utf-8') _file_handler.setFormatter( logging.Formatter( strings.get_non_empty_str(config_file, 'format', DEFAULT_LOGGING_FMT))) _file_handler.setLevel( num.safe_int(config_file.get('level'), DEFAULT_LOGGIN_LEVEL)) _logger.addHandler(_file_handler) _logger.setLevel(DEFAULT_LOGGIN_LEVEL) loggers = conf.get('loggers') if loggers is not None: assert isinstance(loggers, Iterable) for item in loggers: _log = logging.getLogger(item.get('name')) if 'level' in item: _log.setLevel( num.safe_int(item.get('level'), DEFAULT_LOGGIN_LEVEL))
def add_user(self, cn, sn, uid_number, gid_number=100, gecos=None, mail=None, display_name=None, shadow_min=None, shadow_max=None, shadow_inactive=None, shadow_warning=None, shadow_last_change=None, skip_event_callback=False): # check value if strings.is_blank(cn): raise Exception('cn cannot be blank') if strings.is_blank(sn): raise Exception('sn cannot be blank') if num.safe_int(uid_number) <= 1000: raise Exception('uidNumber should > 1000') attributes = { 'cn': cn, 'uid': cn, 'sn': sn, 'uidNumber': num.safe_int(uid_number), 'gidNumber': num.safe_int(gid_number), 'homeDirectory': '/home/' + cn, 'loginShell': '/bin/bash', 'userPassword': '******', 'sudoUser': cn, 'sudoHost': 'ALL', 'sudoOption': '!authenticate', } if gecos is not None: attributes['gecos'] = gecos if mail is not None: attributes['mail'] = mail if display_name is not None: attributes['displayName'] = display_name if shadow_min is not None: attributes['shadowMin'] = shadow_min if shadow_max is not None: attributes['shadowMax'] = shadow_max if shadow_inactive is not None: attributes['shadowInactive'] = shadow_inactive if shadow_warning is not None: attributes['shadowWarning'] = shadow_warning if shadow_last_change is not None: attributes['shadowLastChange'] = shadow_last_change # set 0 to force change password on the first login else: attributes['shadowLastChange'] = self._epoch_days() return self.add( dn=self.assemble_user_dn(cn), object_class=['top', 'posixAccount', 'shadowAccount', 'person', 'inetOrgPerson', 'hostObject', 'sudoRole', 'authorizedServiceObject'], attributes=attributes, event=self.EVENT_ON_USER_CREATED, skip_event_callback=skip_event_callback, )
def batch(res): for item in res: # parse data field arr = [] if item['type'] == 1: # date pass elif item['type'] == 2: # interval interval = json.loads(item['data']) for field, unit in ('weeks', 'w'), ('days', 'd'), ('hours', 'h'), ('minutes', 'm'), ('seconds', 's'): val = num.safe_int(interval[field]) if val: arr.append(str(val) + unit) item['data'] = ' '.join(arr) elif item['type'] == 3: # cron cron = json.loads(item['data']) zero_flag = False for field in 'year', 'day_of_week', 'month', 'day', 'hour', 'minute', 'second': if cron[field]: zero_flag = True arr.append(cron[field]) else: arr.append('0' if zero_flag else '*') item['data'] = ' '.join(reversed(arr)) # is in current job list job = task.get_job(item['id'], models.TASK_DATABASE) item['active'] = job is not None # add next run item['next_run'] = None if job is None else job.next_run_time return res
def find_menu(**params): sql = ['1=1'] cond = {} if 'parentid' in params: sql.append('parentid = :parentid') cond['parentid'] = params['parentid'] if 'sort' in params: sql.append('sort = :sort') cond['sort'] = num.safe_int(params['sort']) return DBContext().create_query("um_menu", ' and '.join(sql), **cond)
def find_schedule_logs(**params): sql = ['1=1'] cond = {} ctx = DBContext() if 'scheduleid' in params: sql.append('and scheduleid = :scheduleid') cond['scheduleid'] = params['scheduleid'] if 'status' in params: sql.append('and status = :status') cond['status'] = num.safe_int(params['status']) return ctx.create_query('cm_schedule_log', ' '.join(sql), **cond)
def table(): @webutils.table_batch def batch(res): for item in res: # parse data field arr = [] if item['type'] == 1: # date pass elif item['type'] == 2: # interval interval = json.loads(item['data']) for field, unit in ('weeks', 'w'), ('days', 'd'), ('hours', 'h'), ('minutes', 'm'), ('seconds', 's'): val = num.safe_int(interval[field]) if val: arr.append(str(val) + unit) item['data'] = ' '.join(arr) elif item['type'] == 3: # cron cron = json.loads(item['data']) zero_flag = False for field in 'year', 'day_of_week', 'month', 'day', 'hour', 'minute', 'second': if cron[field]: zero_flag = True arr.append(cron[field]) else: arr.append('0' if zero_flag else '*') item['data'] = ' '.join(reversed(arr)) # is in current job list job = task.get_job(item['id'], models.TASK_DATABASE) item['active'] = job is not None # add next run item['next_run'] = None if job is None else job.next_run_time return res cond = {} if strings.is_not_blank(g.params.get("type", None)): cond['type'] = num.safe_int(g.params["type"]) if strings.is_not_blank(g.params.get("module", None)): cond['module'] = g.params["module"] if strings.is_not_blank(g.params.get("modulelike", None)): cond['modulelike'] = g.params["modulelike"] if strings.is_not_blank(g.params.get("sourceid", None)): cond['sourceid'] = g.params["sourceid"] return models.find_schedules(**cond)
def save(): oid = request.form.get("id") parentid = strings.strip_to_empty(request.form.get('parentid', '')) menu = {} if strings.is_not_blank(oid): menu = get_menu(oid) if menu is None: menu = {} else: menu['sort'] = find_menu(parentid=parentid).count() + 1 menu["name"] = request.form.get("name", '') menu['type'] = num.safe_int(request.form.get('type', 0)) menu["url"] = request.form.get("url", None) menu["target"] = request.form.get("target", None) menu['parentid'] = parentid menu['icon'] = request.form.get('icon', '') menu['mark'] = request.form.get('mark', '') save_menu(menu) return "success"
def get_int(name, dv=0): return num.safe_int(get(name), dv)
def log_table(scheduleid): cond = {'scheduleid': scheduleid} status = num.safe_int(g.params.get('status', None)) if status != 0: cond['status'] = status return models.find_schedule_logs(**cond)