def logout(): from core.error import UserNotFound try: user = auth.is_logged_in_core(request) except UserNotFound: user = None try: nonce = request.query['_'] except KeyError: nonce = None if nonce == utils.url_unescape(user.logout_nonce): logger.info("User {} logged out from IP {}.".format( user.for_log, request.remote_addr)) response.delete_cookie("login", path="/") return "You have logged out. <a href='{}/login'>Click here to log in again.</a>".format(BASE_URL) return "No logout nonce. <a href='{}/logout?_={}'>Click here to log out.</a>".format( BASE_URL, user.logout_nonce)
def build_page(queue_entry, async_write=False): try: fileinfo = FileInfo.get(FileInfo.id == queue_entry.data_integer) blog = queue_entry.blog page_tags = generate_page_tags(fileinfo, blog) file_page_text = generate_page_text(fileinfo, page_tags) if async_write: if not t.is_alive: t.target = write_file_queue t.args = (write_queue, ) t.start() write_queue.put_nowait( (file_page_text, blog.path, fileinfo.file_path)) else: write_file(file_page_text, blog.path, fileinfo.file_path) except FileInfo.DoesNotExist as e: raise Exception('''Fileinfo {} could not be found in the system. It may refer to a fileinfo that was deleted by another action. ({})'''.format( queue_entry.data_integer, e)) except NoArchiveForFileInfo: logger.info( "Fileinfo {} has no corresponding pages. File {} removed.".format( fileinfo.id, fileinfo.file_path)) delete_fileinfo_files((fileinfo, )) # fileinfo.delete_instance(recursive=True) # FIXME: for now we leave this out # because deletes do not coalesce properly in the queue (I think) except Exception as e: context_list = [(f.object, f.ref) for f in fileinfo.context] raise Exception('Error building fileinfo {} ({},{},{}): {}'.format( fileinfo.id, fileinfo.page, context_list, fileinfo.file_path, e))
def logout(): from core.error import UserNotFound try: user = auth.is_logged_in_core(request) except UserNotFound: user = None try: nonce = request.query['_'] except KeyError: nonce = None if nonce == utils.url_unescape(user.logout_nonce): logger.info("User {} logged out from IP {}.".format( user.for_log, request.remote_addr)) response.delete_cookie("login", path="/") return "You have logged out. <a href='{}/login'>Click here to log in again.</a>".format( BASE_URL) return "No logout nonce. <a href='{}/logout?_={}'>Click here to log out.</a>".format( BASE_URL, user.logout_nonce)
def blog_create(**new_blog_data): new_blog = Blog() new_blog.site = new_blog_data['site'].id new_blog.name = new_blog_data['name'] new_blog.description = new_blog_data['description'] new_blog.url = new_blog_data['url'] new_blog.path = new_blog_data['path'] new_blog.local_path = new_blog.path new_blog.theme = new_blog_data['theme'] new_blog.save() new_blog_default_category = Category(blog=new_blog, title="Uncategorized", default=True) new_blog_default_category.save() # template installation should be its own function # install whatever the currently set system default templates are user = user_from_ka(**new_blog_data) logger.info("Blog {} created on site {} by user {}.".format( new_blog.for_log, new_blog.site.for_log, user.for_log)) return new_blog
def decorated_function(*args, **kw): #### 所有注释都是进行单点登录操作的 !!!!!!! auth_token = request.cookies.get('auth_token') refresh_time = request.cookies.get('refresh_time') user_id = get_cookie_info().get('user_id') # 这个个方法里存在单点登录状态 sso_code = get_cookie_info().get('sso_code') if is_none(auth_token) or is_none(refresh_time) or is_none(user_id): return BaseError.not_login() # 去redis中取 组装cookie时存的随机数 _redis = Redis() _sso_code = _redis.get_hget("app_sso_code", user_id) # 校验cookie解析出来的随机数 和存在redis中的随机数是否一致 if not is_none(_sso_code) and not is_none(sso_code) and sso_code != _sso_code: logger.info("账号在其他设备登陆了%s"% user_id) return BaseError.not_local_login() # 解密auth_token中的sign sign = aes_decrypt(auth_token) # 利用user_id + '#$%' + redis中随机数 + '#$%' + md5加密后的字符串 组装_sign _sign = hashlib.sha1(AUTH_COOKIE_KEY + user_id + refresh_time + sso_code).hexdigest() if sign == _sign: return f(*args, **kw) else: return BaseError.not_login()
def save(self, user, no_revision=False, backup_only=False, change_note=None): ''' Wrapper for the model's .save() action, which also updates the PageRevision table to include a copy of the current revision of the page BEFORE the save is committed. ''' from core.log import logger revision_save_result = None if no_revision == False and self.id is not None: page_revision = PageRevision.copy(self) revision_save_result = page_revision.save(user, self, False, change_note) page_save_result = Model.save(self) if backup_only is False else None if revision_save_result is not None: logger.info("Page {} edited by user {}.".format( self.for_log, user.for_log)) else: logger.info( "Page {} edited by user {} but without changes.".format( self.for_log, user.for_log)) return (page_save_result, revision_save_result)
def decorator(*args, **kw): # 校验参数 try: check_rule = build_check_rule(str(request.url_rule),str(request.rule_version), list(request.url_rule.methods & set(METHODS))) check_func = check_param.get_check_rules().get(check_rule) if check_func: check_func(*args, **kw) except BusinessException as e: if not is_none(e.func): return e.func elif not is_none(e.code) and not is_none(e.msg): business_exception_log(e) return return_data(code=e.code, msg=e.msg) # 监听抛出的异常 try: if request.trace_id is not None and request.full_path is not None: logger.info('trace_id is:' + request.trace_id + ' request path:' + request.full_path) return func(*args, **kw) except BusinessException as e: if e.func is not None: return e.func() elif e.code is not None and e.msg is not None: business_exception_log(e) if e.code == SYSTEM_CODE_404 or e.code == SYSTEM_CODE_503: return return_data(code=e.code, msg='很抱歉服务器异常,请您稍后再试') else: return return_data(code=e.code, msg=e.msg) else: return request_fail() except Exception: return request_fail()
def sig_handler(signum, frame): global running if running: running = False logger.debug('heligated caught siglan {0}'.format(signum)) logger.info('heligated stopping') os.exit(0)
def build_file(f, blog): ''' Builds a single file based on a fileinfo entry f for a given blog. Returns details about the built file. This does _not_ perform any checking for the page's publication status, nor does it perform any other higher-level security. This should be the action that is pushed to the queue, and consolidated based on the generated filename. (The conslidation should be part of the queue push function) ''' report = [] page_text, pathname = generate_file(f, blog) report.append("Output: " + pathname) encoded_page = page_text.encode('utf8') split_path = f.file_path.rsplit('/', 1) path_to_check = blog.path + "/" + f.file_path.rsplit('/', 1)[0] if len(split_path) > 1: if os.path.isdir(path_to_check) is False: os.makedirs(path_to_check) with open(pathname, "wb") as output_file: output_file.write(encoded_page) logger.info("File '{}' built ({} bytes).".format(f.file_path, len(encoded_page))) return report
def meta(self, file_list): url = "http://pan.baidu.com/api/filemetas" data = {'target': json.dumps(file_list)} querystring = {"dlink": "0", "blocks": "0", "method": "filemetas"} headers = { 'host': "pan.baidu.com", 'accept': "application/json, text/javascript, text/html, */*; q=0.01", 'user-agent': "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36", 'accept-language': "en-US,en;q=0.8,zh-CN;q=0.6,zh;q=0.4,zh-TW;q=0.2", 'referer': "http://pan.baidu.com/disk/home", 'x-requested-with': "XMLHttpRequest", 'content-type': "application/x-www-form-urlencoded" } try: response = session.post(url, cookies=cj, data=data, headers=headers, params=querystring) except Exception as e: logger.info(e) return '[]' return response.text
async def get_image_by_name(request: Request, author: str = None): logger.info("Author Image Requested: " + str(author)) try: image = crudImage.get_image_as_base64(name=author) return {"image": image} except ValueError: raise HTTPException(status_code=404, detail="Author not found")
def media_edit_save(blog_id, media_id): ''' Save changes to a media entry. ''' user = auth.is_logged_in(request) blog = Blog.load(blog_id) is_member = auth.is_blog_member(user, blog) media = Media.load(media_id) permission = auth.is_media_owner(user, media) friendly_name = request.forms.getunicode('media_friendly_name') changes = False if friendly_name != media.friendly_name: changes = True media.friendly_name = friendly_name import datetime if changes is True: media.modified_date = datetime.datetime.utcnow() media.save() status = utils.Status( type='success', message='Changes to media <b>{}</b> saved successfully.'.format( media.for_display) ) else: status = utils.Status( type='warning', no_sure=True, message='No discernible changes submitted for media <b>{}</b>.'.format( media.id, media.for_display) ) logger.info("Media {} edited by user {}.".format( media.for_log, user.for_log)) from core.ui import kv kv_ui_data = kv.ui(media.kv_list()) tags = template_tags(blog=blog, media=media, status=status, user=user) tags.sidebar = sidebar.render_sidebar( panel_set='edit_media', status_badge=status_badge, kv_object='Media', kv_objectid=media.id, kv_ui=kv_ui_data) return media_edit_output(tags)
def doWMI(): log.debug("doWMI : " + __name__ + " loaded") c = wmi.WMI() try: os = c.OperatingSystem() log.info("WMI: " + os[0].CSName) return True except AttributeError: return False
async def get_image_by_name_html(request: Request, author: str = None): logger.info("Author Image Requested: " + author) try: image = crudImage.get_image_as_base64(name=author) return templates.TemplateResponse( "index.html", {"request": request, "image": image} ) except ValueError: raise HTTPException(status_code=404, detail="Author not found")
def translate_name_to_filename(name: str = None): if name is not None: filename = name.lower().replace(" ", "_") + ".jpg" filename = sanitize_filename(filename) logger.info("File Found" + str(filename)) else: logger.error("File not found Found", filename=str(name)) filename = "error.png" return filename
def create_user_site(**new_user_data): new_user = user_create(**new_user_data) site = new_user_data['site'] saved_permission = add_user_permission(new_user, permission=127, site=site) user = user_from_ka(**new_user_data) logger.info("User {} created on site {} by user {}.".format( new_user.for_log, site.for_log, user.for_log))
def update_user(user, editing_user, **user_data): user.name = user_data['name'] user.email = user_data['email'] user.save() logger.info("Changes to user {} saved by user {}.".format( user.for_log, editing_user.for_log)) return user
def seed_db(db: Session, quoteList: schemas.QuotesList): if db.query(models.Quotes).first() is None: for quote in quoteList.quotes: create_quote(db=db, quote=quote) logger.info("database successfully seeded") logger.debug("databse seesed with", quote=quoteList) return True return None
def login(): ''' User login interface ''' tpl = template('ui/ui_login', **template_tags().__dict__) logger.info("Login page requested from IP {}.".format(request.remote_addr)) response.delete_cookie("login", path="/") return tpl
def login(): ''' User login interface ''' tpl = template('ui/ui_login', **template_tags().__dict__) logger.info("Login page requested from IP {}.".format(request.remote_addr)) response.delete_cookie("login", path="/") return tpl
def page_edit(page_id): ''' UI for editing a page in a blog ''' user = auth.is_logged_in(request) page = Page.load(page_id) permission = auth.is_page_editor(user, page) status = None referer = request.headers.get('Referer') if (referer is None or page.modified_date is None or re.match(re.escape(BASE_URL + "/blog/" + str(page.blog.id)), referer) is None): referer = BASE_URL + "/blog/" + str(page.blog.id) if page.modified_date is None: status = utils.Status( type='info', message="Page <b>{}</b> created.".format(page.for_log)) page.modified_date = datetime.datetime.utcnow() page.save(user) tags = template_tags(page=page, user=user, status=status) from core.ui import kv kv_ui_data = kv.ui(page.kv_list()) tpl = template('edit/page', menu=generate_menu('edit_page', page), parent_path=referer, # search_context=(search_context['blog'], page.blog), html_editor_settings=html_editor_settings(page.blog), sidebar=sidebar.render_sidebar( panel_set='edit_page', status_badge=status_badge, save_action_list=save_action_list, save_action=save_action, kv_ui=kv_ui_data, kv_object='Page', kv_objectid=page.id, **tags.__dict__), msg_float=False, **tags.__dict__) logger.info("Page {} opened for editing by {}.".format( page.for_log, user.for_log)) return tpl
def handle(self, *args, **options): if len(args) != 1: self.stdout.write('need <days> arg') days = int(args[0]) delta = datetime.timedelta(days=days) date_until = datetime.datetime.today() - delta qs = SquidLog.objects.filter(access_date__lt=date_until) count = qs.count() qs.delete() msg = 'from SquidLog successfully deleted {0} records'.format(count) logger.info(msg)
def check_message_validity(json_object): if "quotes" in json_object: try: logger.info("Message Contains!") quote = Quote.parse_obj(json_object["quotes"]) return quote except ValidationError: logger.error("Something went wrong!") raise ValueError elif "detail" in json_object: raise ValueError else: raise ValueError
def save(self, user, current_revision, is_backup=False, change_note=None): from core.log import logger from core.error import PageNotChanged max_revisions = self.blog.max_revisions previous_revisions = (self.select().where( PageRevision.page_id == self.page_id).order_by( PageRevision.modified_date.desc()).limit(max_revisions)) if previous_revisions.count() > 0: last_revision = previous_revisions[0] page_changed = False for name in last_revision._meta.fields: if name not in ("modified_date", "id", "page_id", "is_backup", "change_note", "saved_by"): value = getattr(current_revision, name) new_value = getattr(last_revision, name) if value != new_value: page_changed = True break if page_changed is False: raise PageNotChanged( 'Page {} was saved but without changes.'.format( current_revision.for_log)) if previous_revisions.count() >= max_revisions: older_revisions = DeleteQuery(PageRevision).where( PageRevision.page_id == self.page_id, PageRevision.modified_date < previous_revisions[max_revisions - 1].modified_date) older_revisions.execute() self.is_backup = is_backup self.change_note = change_note self.saved_by = user.id results = Model.save(self) logger.info("Revision {} for page {} created.".format( date_format(self.modified_date), self.for_log)) return results
def media_edit_save(blog_id, media_id): ''' Save changes to a media entry. ''' user = auth.is_logged_in(request) blog = Blog.load(blog_id) is_member = auth.is_blog_member(user, blog) media = Media.load(media_id) permission = auth.is_media_owner(user, media) friendly_name = request.forms.getunicode('media_friendly_name') changes = False if friendly_name != media.friendly_name: changes = True media.friendly_name = friendly_name import datetime if changes is True: media.modified_date = datetime.datetime.utcnow() media.save() status = utils.Status( type='success', message='Changes to media <b>{}</b> saved successfully.'.format( media.for_display)) else: status = utils.Status( type='warning', no_sure=True, message='No discernible changes submitted for media <b>{}</b>.'. format(media.id, media.for_display)) logger.info("Media {} edited by user {}.".format(media.for_log, user.for_log)) from core.ui import kv kv_ui_data = kv.ui(media.kv_list()) tags = template_tags(blog=blog, media=media, status=status, user=user) tags.sidebar = sidebar.render_sidebar(panel_set='edit_media', status_badge=status_badge, kv_object='Media', kv_objectid=media.id, kv_ui=kv_ui_data) return media_edit_output(tags)
def page_edit(page_id): ''' UI for editing a page in a blog ''' user = auth.is_logged_in(request) page = Page.load(page_id) permission = auth.is_page_editor(user, page) status = None referer = request.headers.get('Referer') if (referer is None or page.modified_date is None or re.match(re.escape(BASE_URL + "/blog/" + str(page.blog.id)), referer) is None): referer = BASE_URL + "/blog/" + str(page.blog.id) if page.modified_date is None: status = utils.Status(type='info', message="Page <b>{}</b> created.".format( page.for_log)) page.modified_date = datetime.datetime.utcnow() page.save(user) tags = template_tags(page=page, user=user, status=status) from core.ui import kv kv_ui_data = kv.ui(page.kv_list()) tpl = template( 'edit/page', menu=generate_menu('edit_page', page), parent_path=referer, # search_context=(search_context['blog'], page.blog), html_editor_settings=html_editor_settings(page.blog), sidebar=sidebar.render_sidebar(panel_set='edit_page', status_badge=status_badge, save_action_list=save_action_list, save_action=save_action, kv_ui=kv_ui_data, kv_object='Page', kv_objectid=page.id, **tags.__dict__), msg_float=False, **tags.__dict__) logger.info("Page {} opened for editing by {}.".format( page.for_log, user.for_log)) return tpl
def readdirAsync(self, path, depth=2, threadPool=pool): try: foo = json.loads(self.disk.list_files(path)) except Exception as s: logger.exception(s) files = ['.', '..'] abs_files = [] if 'errno' in foo: logger.error( "maybe token is not right, try re login http://pan.baidu.com in Chrome" ) if "list" not in foo: # logger.info("no list") return for file in foo['list']: if file['server_filename'].startswith("."): continue files.append(file['server_filename']) abs_files.append(file['path']) # logger.debug(file['path']) file_num = len(abs_files) group = int(math.ceil(file_num / 100.0)) # logger.debug(f"group: {group}") # logger.debug(f"abs_files: {abs_files}") for i in range(group): obj = [f for n, f in enumerate(abs_files) if n % group == i] #一组数据 while 1: try: ret = json.loads(self.disk.meta(obj)) # logger.debug(f'{ret}') break except Exception as e: logger.info(ret) logger.exception(e) for file_info in ret['info']: # logger.debug(file_info) self._add_file_to_buffer(file_info['path'], file_info) if depth > 0: depth -= 1 if file_info['isdir']: if file_info['path'] not in self.traversed_folder: self.traversed_folder[path] = True threadPool.submit(self.readdirAsync, file_info['path'], depth, threadPool) self.dir_buffer[path] = files
def implement_get(self, params, **kwargs): self.url_add_common_param() self.url_add_business_param(params) logger.info(self.url) resp = requests.get(self.url, **kwargs) if resp.status_code == 200: ret_data = resp.json() else: raise BusinessException(code=resp.status_code, msg=resp.text, url=resp.url) # 如果请求成功,但是后端返回的code不是200,则记录日志 if 'code' not in ret_data or ret_data.get('code') != 200: logger.error( 'api_return_error, code: %s, msg: %s, url: %s' % (ret_data.get('code'), ret_data.get('msg'), self.url)) return ret_data
def create_user_blog(**new_user_data): new_user = user_create(**new_user_data) blog = new_user_data['blog'] site = blog.site saved_permission = add_user_permission(new_user, permission=127, blog=blog, site=site) user = user_from_ka(**new_user_data) logger.info("User {} created on blog {} by user {}.".format( new_user.for_log, blog.for_log, user.for_log))
def implement_post(self, params, headers=None,**kwargs): self.url_add_common_param() params = {'params':json.dumps(params)} resp = requests.post(self.url, data=params, headers=headers, **kwargs) logger.info(self.url) if resp.status_code == 200: ret_data = resp.json() else: raise BusinessException(code=resp.status_code, msg=resp.text, url=resp.url) if 'code' not in ret_data or ret_data.get('code') != 200: logger.error( 'api_return_error, code: %s, msg: %s, url: %s' % (ret_data.get('code'), ret_data.get('msg'), self.url)) return ret_data
def queue_page_archive_actions(page): ''' Pushes to the publishing queue all the page archives for a given page object. :param page: The page object whose archives will be pushed to the publishing queue. ''' #=========================================================================== # NOTE: I tried to speed this up by checking the list of fileinfos # related to mappings for the page (if any), and then pushing those # if they exist, but I haven't seen evidence it does anything tangible # for performance. # I need to double-check that old mappings are in fact invalidated # when they are changed. #=========================================================================== archive_templates = page.blog.archive_templates tags = template_tags(page=page) for n in archive_templates: try: if n.publishing_mode != publishing_mode.do_not_publish: fileinfo_mappings = FileInfo.select().where( FileInfo.page == page, FileInfo.template_mapping << n.mappings) if fileinfo_mappings.count() == 0: fileinfo_mappings = build_archives_fileinfos_by_mappings( n, (page, )) if len(fileinfo_mappings) == 0: logger.info( 'No archive fileinfos could be built for page {} with template {}' .format(page.for_log, n.for_log)) else: for fileinfo_mapping in fileinfo_mappings: Queue.push(job_type=job_type.archive, blog=page.blog, site=page.blog.site, priority=7, data_integer=fileinfo_mapping.id) except Exception as e: from core.error import QueueAddError raise QueueAddError( 'Archive template {} for page {} could not be queued: '.format( n, page.for_log, e))
def blog_new_page_save(blog_id): ''' UI for saving a newly created page. ''' user = auth.is_logged_in(request) blog = Blog.load(blog_id) permission = auth.is_blog_member(user, blog) tags = cms.save_page(None, user, blog) # TODO: move to model instance? logger.info("Page {} created by user {}.".format( tags.page.for_log, user.for_log)) response.add_header('X-Redirect', BASE_URL + '/page/{}/edit'.format(str(tags.page.id))) return response
def blog_new_page_save(blog_id): ''' UI for saving a newly created page. ''' user = auth.is_logged_in(request) blog = Blog.load(blog_id) permission = auth.is_blog_member(user, blog) tags = cms.save_page(None, user, blog) # TODO: move to model instance? logger.info("Page {} created by user {}.".format(tags.page.for_log, user.for_log)) response.add_header('X-Redirect', BASE_URL + '/page/{}/edit'.format(str(tags.page.id))) return response
def blog_settings_save(request, blog, user): _forms = request.forms blog_name = _forms.getunicode('blog_name') if blog_name is not None: blog.name = blog_name blog_description = _forms.getunicode('blog_description') if blog_description is not None: blog.description = blog_description blog_url = _forms.getunicode('blog_url') if blog_url is not None: blog_url = blog_url.rstrip('/') blog.url = blog_url # TODO: url validation blog_path = _forms.getunicode('blog_path') if blog_path is not None: blog_path = blog_path.rstrip('/') blog.path = blog_path # TODO: validate this path blog_base_extension = _forms.getunicode('blog_base_extension') if blog_base_extension is not None: blog_base_extension = blog_base_extension.lstrip('.') blog.base_extension = blog_base_extension blog.save() status = Status(type='success', message="Settings for <b>{}</b> saved.", vals=(blog.name, )) logger.info("Settings for blog {} edited by user {}.".format( blog.for_log, user.for_log)) return status
def queue_page_archive_actions(page): ''' Pushes to the publishing queue all the page archives for a given page object. :param page: The page object whose archives will be pushed to the publishing queue. ''' #=========================================================================== # NOTE: I tried to speed this up by checking the list of fileinfos # related to mappings for the page (if any), and then pushing those # if they exist, but I haven't seen evidence it does anything tangible # for performance. # I need to double-check that old mappings are in fact invalidated # when they are changed. #=========================================================================== archive_templates = page.blog.archive_templates tags = template_tags(page=page) for n in archive_templates: try: if n.publishing_mode != publishing_mode.do_not_publish: fileinfo_mappings = FileInfo.select().where(FileInfo.page == page, FileInfo.template_mapping << n.mappings) if fileinfo_mappings.count() == 0: fileinfo_mappings=build_archives_fileinfos_by_mappings(n,(page,)) if len(fileinfo_mappings)==0: logger.info('No archive fileinfos could be built for page {} with template {}'.format( page.for_log, n.for_log)) else: for fileinfo_mapping in fileinfo_mappings: Queue.push(job_type=job_type.archive, blog=page.blog, site=page.blog.site, priority=7, data_integer=fileinfo_mapping.id) except Exception as e: from core.error import QueueAddError raise QueueAddError('Archive template {} for page {} could not be queued: '.format( n, page.for_log, e))
def get_quote(url): try: response = requests.get(url) # If the response was successful, no Exception will be raised response.raise_for_status() except HTTPError as http_err: logger.error(f"HTTP error occurred: {http_err}") return {"name": "error", "quote": http_err} except Exception as err: logger.error(f"Other error occurred: {err}") return {"name": "error", "quote": err} else: try: json_object = response.json() logger.info("Success!") return response.json() except ValueError as e: logger.info("Not Valid JSON") return {"name": "error", "quote": "Not Valid JSON"}
def clear_model(self, model): self.stdout.write('start clearing {0}'.format(model.__name__)) qs = model.objects.all() del_count = 0 total_count = qs.count() for (counter, record) in enumerate(qs): console_progrees(total_count, counter + 1, self.stdout) if has_related_objects(record): continue record.delete() del_count += 1 self.stdout.write('') msg = 'from {0} successfully deleted {1} records'.format( model.__name__, del_count, ) logger.info(msg)
def test_get_quotes(): logger.info("Running '/' tests") logger.info("Get '/admin/delete_all'") response = client.get("/admin/delete_all") logger.info("Post '/seed'") response = client.post("/seed", json=test_quotes_list) logger.info("Get '/'") response = client.get("/") assert response.status_code == 200 msg = response.json() del msg["backend"] logger.info("Quote Received " + str(msg["quotes"])) if msg["quotes"] not in test_quotes_list["quotes"]: assert False
def rename(self, old, new): logger.info("rename") url = "http://pcs.baidu.com/rest/2.0/pcs/file" querystring = {"app_id": self.app_id, "method": "move"} formatPaths = '[{"from":"' + old + '","to":"' + new + '"}]' payload = "--a3e249a7d481640c2215fe9bd04ad69c196dd9a116c0354d94e27ddda942\nContent-Disposition: form-data; name=\"param\"\n\n{\"list\":" + formatPaths + "}\n--a3e249a7d481640c2215fe9bd04ad69c196dd9a116c0354d94e27ddda942--\n" payload = payload.encode('utf-8') headers = { 'host': "pcs.baidu.com", 'User-Agent': self.user_agent, 'Content-Type': "multipart/form-data; boundary=a3e249a7d481640c2215fe9bd04ad69c196dd9a116c0354d94e27ddda942", 'cookie': "BDUSS=" + self.BDUSS } response = requests.request("POST", url, data=payload, headers=headers, params=querystring)
def test_get_quotes(): logger.info("Running '/quotes' tests") logger.info("Get '/admin/delete_all'") response = client.get("/admin/delete_all") logger.info("Post '/seed'") response = client.post("/seed", json=test_quotes_list) logger.info("Get '/quotes'") response = client.get("/quotes") assert response.status_code == 200 msg = response.json() del msg["backend"] logger.info("Quote Received " + str(msg["quotes"])) for quote in msg["quotes"]: if quote not in test_quotes_list["quotes"]: logger.error("Unknown Quote: " + quote, quote=quote) assert False
def build_page(queue_entry, async_write=False): try: fileinfo = FileInfo.get(FileInfo.id == queue_entry.data_integer) blog = queue_entry.blog page_tags = generate_page_tags(fileinfo, blog) file_page_text = generate_page_text(fileinfo, page_tags) if async_write: if not t.is_alive: t.target = write_file_queue t.args = (write_queue,) t.start() write_queue.put_nowait((file_page_text, blog.path, fileinfo.file_path)) else: write_file(file_page_text, blog.path, fileinfo.file_path) except FileInfo.DoesNotExist as e: raise Exception('''Fileinfo {} could not be found in the system. It may refer to a fileinfo that was deleted by another action. ({})'''.format(queue_entry.data_integer, e)) except NoArchiveForFileInfo: logger.info("Fileinfo {} has no corresponding pages. File {} removed.".format( fileinfo.id, fileinfo.file_path) ) delete_fileinfo_files((fileinfo,)) # fileinfo.delete_instance(recursive=True) # FIXME: for now we leave this out # because deletes do not coalesce properly in the queue (I think) except Exception as e: context_list = [(f.object, f.ref) for f in fileinfo.context] raise Exception('Error building fileinfo {} ({},{},{}): {}'.format( fileinfo.id, fileinfo.page, context_list, fileinfo.file_path, e))
def login_verify_core(email, password): try: user = User.login_verify(email, password) except User.DoesNotExist: logger.info("User at {} attempted to log in as '{}'. User not found or password not valid.".format( request.remote_addr, email)) return False else: response.set_cookie("login", user.email, secret=SECRET_KEY, path="/") logger.info("User {} logged in from IP {}.".format( user.for_log, request.remote_addr)) user.logout_nonce = utils.url_escape(utils.logout_nonce(user)) user.save() return True
def run(self): logger.info('starting heligated') event_system_start() apply_system_start() logger.info('heligated started') global running wait_timer = 0 while running: try: logger.info('try connect to rabbitmq server') time.sleep(wait_timer) run_events_loop(apply_events) except pika.exceptions.ConnectionClosed: wait_timer = 0 logger.info('rabbitmq connection closed') except pika.exceptions.AMQPConnectionError: logger.info("can't connect to rabbitmq server") except AttributeError: pass except KeyboardInterrupt: running = False if wait_timer < 1000: wait_timer += 1 event_system_stop() apply_system_stop() logger.info('heligated stoped')
'\n'.join(scheduled_page_report)) import smtplib from email.mime.text import MIMEText from core.auth import get_users_with_permission, role admin_users = get_users_with_permission(role.SYS_ADMIN) admins = [] for n in admin_users: msg = MIMEText(message_text) msg['Subject'] = 'Scheduled activity report for {}'.format(product_id) msg['From'] = n.email msg['To'] = n.email admins.append(n.email) s = smtplib.SMTP('localhost') s.send_message(msg) s.quit() print ('Reports emailed to {}.'.format(','.join(admins))) logger.info("Scheduled job run, processed {} pages.".format(total_pages)) else: print ('No scheduled tasks found to run.') print ('Scheduled tasks script completed.')
def loop_run(): try: run_events_loop(logger_event) except: logger.exception('error in squid3 logger') os._exit(1) def sig_handler(signum, frame): logger.debug('squid3 logger caught siglan {0}'.format(signum)) if __name__ == '__main__': logger.info('squid3 logger starting') sys.excepthook = except_hook signal.signal(signal.SIGTERM, sig_handler) event_loop_thread = threading.Thread(target=loop_run) event_loop_thread.start() logger.info('squid3 logger started') try: while True: line = sys.stdin.readline() cmd_type = line[0] if cmd_type == 'L': squid3_logger.log(line) elif cmd_type == 'F':
def blog_settings_save(blog_id, nav_setting): user = auth.is_logged_in(request) blog = Blog.load(blog_id) permission = auth.is_blog_admin(user, blog) _get = request.forms.getunicode # this could also be normalized?: # blog.form_gets([list here]) # & you could always add custom fields after the fact blog.name = _get('blog_name', blog.name) blog.description = _get('blog_description', blog.description) blog.set_timezone = _get('blog_timezone') blog.url = _get('blog_url', blog.url) blog.path = _get('blog_path', blog.path) blog.base_extension = _get('blog_base_extension', blog.base_extension) blog.media_path = _get('blog_media_path', blog.media_path) from core.utils import Status from core.libs.peewee import IntegrityError errors = [] try: blog.validate() blog.save() except IntegrityError as e: from core.utils import field_error errors.append(field_error(e)) except Exception as e: errors.extend(e.args[0]) # We could condense this all to: # blog.validate_and_save() # and just have it return errors as a list? if len(errors) > 0: status = Status( type='danger', no_sure=True, message='Blog settings could not be saved due to the following problems:', message_list=errors) else: status = Status( type='success', message="Settings for <b>{}</b> saved successfully.<hr/>It is recommended that you <a href='{}/blog/{}/purge'>republish this blog</a> immediately.".format( blog.for_display, BASE_URL, blog.id)) logger.info("Settings for blog {} edited by user {}.".format( blog.for_log, user.for_log)) tags = template_tags(user=user) tags.blog = blog tags.nav_default = nav_setting if status is not None: tags.status = status return blog_settings_output(tags)
def process_queue_publish(queue_control, blog): ''' Processes the publishing queue for a given blog. Takes in a queue_control entry, and returns an integer of the number of jobs remaining in the queue for that blog. Typically invoked by the process_queue function. :param queue_control: The queue_control entry, from the queue, to use for this publishing queue run. :param blog: The blog object that is in context for this job. ''' # from . import invalidate_cache # invalidate_cache() queue_control.lock() queue_original = Queue.select().order_by(Queue.priority.desc(), Queue.date_touched.desc()).where(Queue.blog == blog, Queue.is_control == False) queue = queue_original.limit(MAX_BATCH_OPS).naive() queue_original_length = queue_original.count() queue_length = queue.count() start_queue = time.clock() if queue_length > 0: logger.info("Queue job #{} @ {} (blog #{}, {} items) started.".format( queue_control.id, date_format(queue_control.date_touched), queue_control.blog.id, queue_original_length)) removed_jobs = [] start = time.clock() for q in queue: job_type.action[q.job_type](q) removed_jobs.append(q.id) if (time.clock() - start) > LOOP_TIMEOUT: break Queue.remove(removed_jobs) # we don't need to have an entirely new job! # we should recycle the existing one, yes? new_queue_control = Queue.control_job(blog) # new_queue_control = Queue.get(Queue.blog == blog, # Queue.is_control == True) queue_original_length -= len(removed_jobs) new_queue_control.data_integer = queue_original_length end_queue = time.clock() total_time = end_queue - start_queue if new_queue_control.data_integer <= 0: new_queue_control.delete_instance() logger.info("Queue job #{} @ {} (blog #{}) finished ({:.4f} secs).".format( new_queue_control.id, date_format(new_queue_control.date_touched), new_queue_control.blog.id, total_time)) else: # new_queue_control.is_running = False # new_queue_control.save() new_queue_control.unlock() logger.info("Queue job #{} @ {} (blog #{}) processed {} items ({:.4f} secs, {} remaining).".format( new_queue_control.id, date_format(new_queue_control.date_touched), new_queue_control.blog.id, len(removed_jobs), total_time, queue_original_length, )) return new_queue_control.data_integer
def boot(aux_settings=None): ''' Reads setup options and starts the Web server for the application. ''' if aux_settings is not None: for n in aux_settings: if n in settings.__dict__: settings.__dict__[n] = aux_settings[n] from core.libs import bottle _stderr = bottle._stderr import sys if len(sys.argv) > 0: arguments = setup_args() bottle.TEMPLATE_PATH = [settings.VIEW_PATH] _stderr (settings.PRODUCT_NAME + "\n") _stderr ("Running in " + settings.APPLICATION_PATH + "\n") if settings.DEBUG_MODE: _stderr ("\n" + ('*' * 40) + "\nDebug mode!\nThis may impact performance.\nDo not use this setting in production.\n" + ('*' * 40) + "\n\n") if settings.NO_SETUP: # We could probably move all this into its own module ala core.routes _stderr('\nNo configuration file [{}] found in \'{}\'.\n'.format( settings.INSTALL_INI_FILE_NAME, settings.config_file)) import os os.makedirs(os.path.join(settings.APPLICATION_PATH, 'data'), exist_ok=True) app = bottle.Bottle() def make_server(_app, settings): # we could move these two into utils and import from there for both # here and core.routes @_app.hook('before_request') def strip_path(): if len(bottle.request.environ['PATH_INFO']) > 1: bottle.request.environ['PATH_INFO'] = bottle.request.environ['PATH_INFO'].rstrip('/') @_app.route(settings.BASE_PATH + settings.STATIC_PATH + '/<filepath:path>') def server_static(filepath): ''' Serves static files from the application's own internal static path, e.g. for its CSS/JS ''' bottle.response.add_header('Cache-Control', 'max-age=7200') return bottle.static_file(filepath, root=settings.APPLICATION_PATH + settings.STATIC_PATH) @_app.route(settings.BASE_PATH + '/install', ('GET', 'POST')) @_app.route(settings.BASE_PATH + '/install/step-<step_id:int>', ('GET', 'POST')) def setup_step(step_id=0): try: from install.install import step s = step(step_id) except Exception as e: raise e return s @app.route('/') def setup(): global routes_ready if routes_ready is False: try: url = bottle.request.urlparts # let's assume there's always going to be redirection to hide the script name path = url.path.rstrip('/').rsplit('/', 1) settings.BASE_URL_PROTOCOL = url.scheme + "://" # The URL scheme settings.BASE_URL_NETLOC = url.netloc # The server name settings.BASE_URL_ROOT = settings.BASE_URL_PROTOCOL + settings.BASE_URL_NETLOC # Everything up to the first / after the server name settings.BASE_URL_PATH = path[0] # Any additional path to the script (subdirectory) settings.BASE_URL = settings.BASE_URL_ROOT + settings.BASE_URL_PATH # The URL we use to reach the script by default make_server(app, settings) routes_ready = True except Exception as e: return "Oops: {}".format(e) bottle.redirect(settings.BASE_PATH + '/install') else: from core.routes import app try: settings.DB.make_db_connection() except Exception as e: _stderr("Could not make DB connection: {}".format(e)) from core import plugins try: plugins.activate_plugins() except (plugins.PluginImportError, BaseException) as e: _stderr ("\nProblem importing plugins: " + (str(e)) + '\n') if settings.DESKTOP_MODE and arguments.url: import webbrowser webbrowser.open(settings.BASE_URL_PROTOCOL + settings.DEFAULT_LOCAL_ADDRESS + settings.DEFAULT_LOCAL_PORT + '/' + arguments.url) if (settings.DEBUG_MODE is True and settings.NO_SETUP is False and settings.USE_WSGI is False): from core.log import logger logger.info("Starting server at {} on port {}".format( settings.DEFAULT_LOCAL_ADDRESS, settings.DEFAULT_LOCAL_PORT[1:])) if settings.SERVER_MODE == 'wsgi': bottle.run(app, server="flipflop", debug=settings.DEBUG_MODE) elif settings.SERVER_MODE == 'cgi': bottle.run(app, server="cgi", debug=settings.DEBUG_MODE) else: bottle.run(app, server="auto", port=settings.DEFAULT_LOCAL_PORT[1:], debug=settings.DEBUG_MODE)
redirector.users_updated() if (SYSTEM_START in events) or (SYSTEM_FULL_RECONFIG in events): redirector.config_updated() def loop_run(): try: run_events_loop(redirector_event) except: logger.exception('error in rederector') os._exit(1) if __name__ == '__main__': logger.info('rederector starting') sys.excepthook = except_hook event_loop_thread = threading.Thread(target=loop_run) event_loop_thread.start() logger.info('rederector started') try: while True: line = sys.stdin.readline() if len(line) <= 1: logger.debug('rederector received blank line') logger.info('rederector stopping') redirector.log_statistic() logger.info('rederector stoped') os._exit(0)
def page_delete(page_id, confirm): ''' Deletes a selected page -- no confirmation yet Returns user to list of pages in blog with a notice about the deleted file ''' user = auth.is_logged_in(request) page = Page.load(page_id) permission = auth.is_page_editor(user, page) blog = page.blog from core.utils import Status tags = template_tags( page=page, user=user) from core.models import page_status if page.status != page_status.unpublished: message = 'Page <b>{}</b> is not set to unpublished and cannot be deleted. Unpublish this page before deleting it.'.format( page.for_display) url = '{}/blog/{}'.format(BASE_URL, blog.id) action = 'Return to the page listing' tags.status = Status( type='danger', no_sure=True, message=message, action=action, url=url, close=False) else: if request.forms.getunicode('confirm') == user.logout_nonce: p = page.for_log from core.cms.cms import delete_page delete_page(page) message = 'Page {} successfully deleted'.format( p) url = '{}/blog/{}'.format(BASE_URL, blog.id) action = 'Return to the page listing' tags.status = Status( type='success', message=message, action=action, url=url, close=False) logger.info("Page {} deleted by user {}.".format( p, user.for_log)) else: message = ('You are about to delete page <b>{}</b> from blog <b>{}</b>.'.format( page.for_display, blog.for_display)) yes = { 'label':'Yes, delete this page', 'id':'delete', 'name':'confirm', 'value':user.logout_nonce} no = { 'label':'No, return to blog page listing', 'url':'{}/blog/{}'.format( BASE_URL, blog.id) } tags.status = Status( message=message, type='warning', close=False, yes=yes, no=no ) return report(tags, 'blog_delete_page', page)