def export(self, entity: _content.model.Content, exporter=_content_export.model.ContentExport): """Export data. """ _logger.info("Export started. '{}'".format(entity.title)) try: opts = exporter.driver_opts # type: _frozendict oauth_token = opts.get('oauth_token') oauth_token_secret = opts.get('oauth_token_secret') s = _tumblr.session.Session(oauth_token, oauth_token_secret) tags = exporter.add_tags # type: tuple if entity.has_field('tags'): tags += tuple(t.title for t in entity.f_get('tags')) thumb_url = entity.images[0].get_url( width=640) if entity.images else None author = entity.author.first_last_name description = entity.f_get('description') if entity.has_field( 'description') else '' s.blog_post_link(opts['user_blog'], entity.url, entity.title, description, thumb_url, author=author, tags=','.join(tags)) except Exception as e: raise _content_export.error.ExportError(e) _logger.info("Export finished. '{}'".format(entity.title))
def do_send(msg: Message): try: engine = _SMTP('localhost') engine.sendmail(msg._from_addr, msg._to_addrs, str(msg)) log_msg = "Message '{}' has been sent to {}.".format(msg.subject, msg.to_addrs) _logger.info(log_msg) except Exception as e: _logger.error('Unable to send message to {}: {}'.format(msg.to_addrs, e), exc_info=e)
def install(archive_path: str, delete_zip_file: bool = True): """Install a theme from a zip-file """ logger.debug( 'Requested theme installation from zip-file {}'.format(archive_path)) # Create temporary directory tmp_dir_path = util.mk_tmp_dir(subdir='theme') try: # Extract archive to the temporary directory _extract_archive(archive_path, tmp_dir_path) # Try to initialize the theme to ensure everything is okay theme = _theme.Theme('tmp.theme.{}'.format( path.basename(tmp_dir_path))) # Install required pip packages for pkg_name, pkg_version in theme.requires['packages'].items(): logger.info( "Theme '{}' requires pip package '{} {}', going to install it". format(theme.name, pkg_name, pkg_name, pkg_version)) pip.install(pkg_name, pkg_version, True, reg.get('debug')) # Install required plugins for p_name, p_version in theme.requires['plugins'].items(): if not plugman.is_installed(p_name, VersionRange(p_version)): logger.info( "Theme '{}' requires plugin '{}', installing...".format( theme.name, p_name, p_version)) plugman.install(p_name, VersionRange(p_version)) # Theme has been successfully initialized, so now it can be moved to the 'themes' package dst_path = path.join(_themes_path, theme.name) if path.exists(dst_path): logger.warn( "Existing theme installation at '{}' will be replaced with new one" .format(dst_path)) rmtree(dst_path) # Move directory to the final location move(tmp_dir_path, dst_path) logger.debug("'{}' has been successfully moved to '{}'".format( tmp_dir_path, dst_path)) reload.reload() finally: # Remove temporary directory if path.exists(tmp_dir_path): rmtree(tmp_dir_path) # Remove ZIP file if delete_zip_file: unlink(archive_path)
def cron_every_min(): out = '' for r in _events.fire('pytsite.stats@update'): if not r or not isinstance(r, str): continue out += '- ' + r + '\n' with open(_path.join(_reg.get('paths.storage'), 'stats.txt'), 'wt') as f: f.write(_util.w3c_datetime_str() + '\n\n' + out) if out: _logger.info('Current stats:\n{}'.format(out[:-1]))
def export(self, entity: _content.model.Content, exporter=_content_export.model.ContentExport): """Export data. """ _logger.info("Export started. '{}'".format(entity.title)) try: opts = exporter.driver_opts # type: _frozendict user_session = _facebook.session.Session(opts.get('access_token')) # Tags tags = [ '#' + _tag_cleanup_re.sub('', t) for t in exporter.add_tags ] if entity.has_field('tags'): tags += [ '#' + _tag_cleanup_re.sub('', t.title) for t in entity.f_get('tags') ] message = _util.strip_html_tags( entity.body)[:600] + ' ' + ' '.join(tags) + ' ' + entity.url # Pre-generating image for OpenGraph story if entity.has_field('images') and entity.images: _requests.get(entity.images[0].get_url(width=900, height=500)) # Notify OpenGraph about sharing scrape_r = user_session.request('', 'POST', id=entity.url, scrape='true') if 'updated_time' not in scrape_r: raise _facebook.error.OpenGraphError( "Error while updating OG story '{}'. Response from Facebook: {}." .format(entity.title, scrape_r)) if 'page_id' in opts and opts['page_id']: page_session = _facebook.session.Session( self._get_page_access_token(opts['page_id'], user_session)) page_session.feed_message(message, entity.url) else: user_session.feed_message(message, entity.url) except Exception as e: raise _content_export.error.ExportError(e) _logger.info("Export finished. '{}'".format(entity.title))
def export(self, entity, exporter): """Performs export. :type entity: plugins.content._model.Content :type exporter: plugins.content_export._model.ContentExport """ try: _logger.info("Export started. '{}'".format(entity.title)) tags = exporter.add_tags if entity.has_field('tags'): tags += tuple([tag.title for tag in entity.f_get('tags')]) opts = exporter.driver_opts msg = '' if entity.has_field('images') and entity.images: img_url = entity.images[0].get_url(width=1024) msg += '<p><a href="{}"><img src="{}" title="{}"></a></p>'. \ format(entity.url, img_url, _util.escape_html(entity.title)) msg += '<p>{}: <a href="{}">{}</a></p>'.format( _lang.t('content_export_livejournal@source', language=entity.language), entity.url, entity.url) if entity.has_field('description'): msg += '<p>{}</p>'.format(entity.f_get('description')) msg += '<lj-cut>' msg_body = entity.f_get('body', process_tags=True, responsive_images=False, images_width=1200) msg_body = msg_body.replace('\r', '').replace('\n', '') msg += _util.trim_str(msg_body, 64000, True) msg += '</lj-cut>' if opts['lj_like']: msg += '<lj-like buttons="{}">'.format(opts['lj_like']) s = _livejournal.Session(opts['username'], opts['password']) pub_time = entity.f_get('publish_time') if entity.has_field( 'publish_time') else entity.created r = s.post_event(entity.title[:255], msg, tags, pub_time) _logger.info("Export finished. '{}'. LJ response: {}".format( entity.title, r)) except Exception as e: raise _content_export.error.ExportError(e)
def cron_daily(): """'pytsite.cron.daily' event handler """ content = '' for user_agent, rules in _api.get_rules().items(): if rules: content += 'User-agent: {}\n'.format(user_agent) for rule in rules: content += rule + '\n' content += '\n' out_path = path.join(_reg.get('paths.static'), 'robots.txt') with open(out_path, 'w') as f: f.write(content) _logger.info('File successfully written into {}'.format(out_path))
def uninstall(package_name: str): """Uninstall a theme """ theme = get(package_name) if theme.name == get().name: raise RuntimeError( 'Cannot uninstall current theme, please switch to another theme before uninstallation' ) del _fallback_theme_name[package_name] rmtree(theme.path) logger.info( "Theme '{}' has been successfully uninstalled from '{}'".format( theme.name, theme.path))
def build_translations(pkg_name: str): """Compile translations """ # Manage with recursive calls if pkg_name in _building_translations: return _building_translations.append(pkg_name) # Build dependencies for dep_pkg_name in package_info.requires_plugins(pkg_name): dep_pkg_name = 'plugins.' + dep_pkg_name if lang.is_package_registered(dep_pkg_name): build_translations(dep_pkg_name) output_file = path.join(assets_dst('assetman'), 'translations.json') # Prepare data structure if path.exists(output_file): data = util.load_json(output_file) else: data = {'langs': {}, 'translations': {}} # Update languages information data['langs'] = lang.langs() # Build translations structure for lang_code in lang.langs(): if lang_code not in data['translations']: data['translations'][lang_code] = {} logger.info('Compiling translations for {} ({})'.format( pkg_name, lang_code)) data['translations'][lang_code][ pkg_name] = lang.get_package_translations(pkg_name, lang_code) # Create output directory output_dir = path.dirname(output_file) if not path.exists(output_dir): makedirs(output_dir, 0o755, True) # Write translations to teh file with open(output_file, 'wt', encoding='utf-8') as f: logger.debug("Writing translations into '{}'".format(output_file)) f.write(json.dumps(data))
def export(self, entity: _content.model.Content, exporter=_content_export.model.ContentExport): """Export data. """ _logger.info("Export started. '{}'".format(entity.title)) opts = exporter.driver_opts # type: _frozendict app_key = _twitter.get_app_key() app_sec = _twitter.get_app_secret() try: tw = _Twython(app_key, app_sec, opts['oauth_token'], opts['oauth_token_secret']) media_ids = [] if entity.images: img = entity.images[0] with open(img.storage_path, 'rb') as f: r = tw.upload_media(media=f) media_ids.append(r['media_id']) except _TwythonError as e: raise _content_export.error.ExportError(str(e)) tags = ['#' + t for t in exporter.add_tags if ' ' not in t] if hasattr(entity, 'tags'): tags += ['#' + t.title for t in entity.tags if ' ' not in t.title] attempts = 20 status = '{} {} {}'.format(entity.title, entity.url, ' '.join(tags)) while attempts: try: tw.update_status(status=status, media_ids=media_ids) break except _TwythonError as e: # Cut one word from the right status = ' '.join(status.split(' ')[:-1]) attempts -= 1 if not attempts: raise _content_export.error.ExportError(str(e)) _logger.info("Export finished. '{}'".format(entity.title))
def send(self): """Send message """ def do_send(msg: Message): try: engine = _SMTP('localhost') engine.sendmail(msg._from_addr, msg._to_addrs, str(msg)) log_msg = "Message '{}' has been sent to {}.".format(msg.subject, msg.to_addrs) _logger.info(log_msg) except Exception as e: _logger.error('Unable to send message to {}: {}'.format(msg.to_addrs, e), exc_info=e) super().attach(_MIMEText(self.body, 'html', 'utf-8')) for attachment in self._attachments: super().attach(attachment) _threading.run_in_thread(do_send, msg=self) _logger.info('Started new message send thread to {}'.format(self.to_addrs))
def export(self, entity: content.model.Content, exporter=content_export.model.ContentExport): """Export data. """ logger.info("Export started. '{}'".format(entity.title)) opts = exporter.driver_opts # type: frozendict tags = ['#' + t for t in exporter.add_tags if ' ' not in t] if hasattr(entity, 'tags'): tags += ['#' + t.title for t in entity.tags if ' ' not in t.title] try: text = '{} {} {}'.format(entity.title, entity.url, ' '.join(tags)) bot = telegram.Bot(opts['bot_token']) bot.send_message(text, opts['chat_id']) except telegram.error.Error as e: raise content_export.error.ExportError(str(e)) logger.info("Export finished. '{}'".format(entity.title))
def _on_pre_save(self, **kwargs): super()._on_pre_save(**kwargs) c_user = auth.get_current_user() # Admins have unrestricted permissions if c_user.is_admin: return # Check current user's permissions to CREATE entities if self.is_new and not self.odm_auth_check_entity_permissions( PERM_CREATE): logger.info(f'Current user login: {auth.get_current_user().login}') raise errors.ForbidCreation( f"Insufficient permissions to create entities of model '{self.model}'" ) # Check current user's permissions to MODIFY entities if not self.is_new and not self.odm_auth_check_entity_permissions( PERM_MODIFY): logger.info(f'Current user login: {auth.get_current_user().login}') raise errors.ForbidModification( f"Insufficient permissions to modify entity '{self.ref}'")
def export(self, entity: _content.model.Content, exporter=_content_export.model.ContentExport): """Export data. """ _logger.info("Export started. '{}'".format(entity.title)) opts = exporter.driver_opts # type: _frozendict tags = ['#' + t for t in exporter.add_tags if ' ' not in t] if entity.has_field('tags'): tags += [ '#' + t.title for t in entity.f_get('tags') if ' ' not in t.title ] message = '{} {} {}'.format(entity.title, ' '.join(tags), entity.url) try: owner_id = -int(opts['group_id']) if opts[ 'group_id'] != '0' else self._parse_user_id(opts['access_url']) s = _vkontakte.session.Session( self._parse_access_token(opts['access_url'])) if entity.images: r = s.wall_post(owner_id, message, entity.images[0], entity.url) else: r = s.wall_post(owner_id, message) _logger.info( "Export finished. '{}'. Vkontakte response: {}".format( entity.title, r)) except Exception as e: raise _content_export.error.ExportError(e)
def cron_1min(): """pytsite.cron.1min """ global _working if _working: _logger.warn('Content import is still working') return _working = True max_errors = _reg.get('content_import.max_errors', 13) max_items = _reg.get('content_import.max_items', 10) delay_errors = _reg.get('content_import.delay_errors', 120) importer_finder = _odm.find('content_import') \ .eq('enabled', True) \ .lt('paused_till', _datetime.now()) \ .sort([('errors', _odm.I_ASC)]) for importer in importer_finder.get(): # type: _model.ContentImport options = dict(importer.driver_opts) options.update({ 'content_author': importer.content_author, 'content_model': importer.content_model, 'content_language': importer.content_language, 'content_status': importer.content_status, 'content_section': importer.content_section, }) driver = _api.get_driver(importer.driver) items_imported = 0 try: _logger.info('Content import started. Driver: {}. Options: {}'.format(driver.get_name(), options)) # Get entities from driver and save them for entity in driver.get_entities(_frozendict(options)): if items_imported == max_items: break try: # Append additional tags if entity.has_field('tags'): for tag_title in importer.add_tags: tag = _tag.find_by_title(tag_title, language=importer.content_language) if not tag: tag = _tag.dispense(tag_title, language=importer.content_language).save() entity.f_add('tags', tag) # Save entity entity.save() # Notify listeners _events.fire('content_import@import', driver=driver, entity=entity) _logger.info("Content entity imported: '{}'".format(entity.f_get('title'))) items_imported += 1 # Entity was not successfully saved; make record in the log and skip to the next entity except Exception as e: # Delete already attached images to free space if entity.has_field('images') and entity.images: for img in entity.images: img.delete() _logger.error("Error while creating entity '{}'. {}".format(entity.title, str(e)), exc_info=e) # Mark that driver made its work without errors importer.f_set('errors', 0) _logger.info('Content import finished. Entities imported: {}.'.format(items_imported)) except Exception as e: # Increment errors counter importer.f_inc('errors') # Store info about error importer.f_set('last_error', str(e)) if importer.errors >= max_errors: # Disable if maximum errors count reached importer.f_set('enabled', False) else: # Pause importer importer.f_set('paused_till', _datetime.now() + _timedelta(minutes=delay_errors)) _logger.error(e) # Continue to the next importer continue finally: importer.save() _working = False
def generate_rss(model: str, filename: str, lng: str = '*', finder_setup: Callable[[odm.SingleModelFinder], None] = None, item_setup: Callable[[feed.xml.Serializable, Content], None] = None, length: int = 20): """Generate RSS feeds """ # Setup finder finder = find(model, language=lng) if finder_setup: finder_setup(finder) # Preparing output directory output_dir = path.join(reg.get('paths.static'), 'feed') if not path.exists(output_dir): makedirs(output_dir, 0o755, True) # Create generator content_settings = reg.get('content') parser = feed.rss.Parser() # Get <channel> element channel = parser.get_children('channel')[0] # Channel title channel.append_child(feed.rss.em.Title(content_settings.get('home_title_' + lng) or 'UNTITLED')) # Channel description channel.append_child(feed.rss.em.Description(content_settings.get('home_description_' + lng))) # Channel link channel.append_child(feed.rss.em.Link(router.base_url())) # Channel language channel.append_child(feed.rss.em.Language(lng)) # Channel logo logo_url = router.url(reg.get('content.rss_logo_url', 'assets/app/img/logo-rss.png')) channel.append_child(feed.rss.yandex.Logo(logo_url)) square_logo_url = router.url(reg.get('content.rss_square_logo_url', 'assets/app/img/logo-rss-square.png')) channel.append_child(feed.rss.yandex.Logo(square_logo_url, square=True)) # Append channel's items for entity in finder.get(length): item = feed.rss.em.Item() try: item.append_child(feed.rss.em.Title(entity.title)) item.append_child(feed.rss.em.Link(entity.url)) item.append_child(feed.rss.em.PdaLink(entity.url)) item.append_child(feed.rss.em.Description(entity.description if entity.description else entity.title)) item.append_child(feed.rss.em.PubDate(entity.publish_time)) item.append_child(feed.rss.em.Author('{} ({})'.format(entity.author.login, entity.author.first_last_name))) except odm.error.FieldNotDefined: pass # Section if entity.has_field('section'): item.append_child(feed.rss.em.Category(entity.section.title)) # Tags if entity.has_field('tags'): for tag in entity.tags: item.append_child(feed.rss.pytsite.Tag(tag.title)) # Images if entity.has_field('images') and entity.images: # Attaching all the images as enclosures for img in entity.images: item.append_child(feed.rss.em.Enclosure(url=img.get_url(), length=img.length, type=img.mime)) # Video links if entity.has_field('video_links') and entity.video_links: m_group = item.append_child(feed.rss.media.Group()) for link_url in entity.video_links: m_group.add_widget(feed.rss.media.Player(url=link_url)) # Body if entity.has_field('body'): item.append_child(feed.rss.yandex.FullText(entity.f_get('body', process_tags=False, remove_tags=True))) item.append_child(feed.rss.content.Encoded(entity.f_get('body', process_tags=False, remove_tags=True))) item.append_child(feed.rss.pytsite.FullText(entity.f_get('body', process_tags=False))) if item_setup: item_setup(item, entity) channel.append_child(item) # Write feed content out_path = path.join(output_dir, '{}-{}.xml'.format(filename, lng)) with open(out_path, 'wt', encoding='utf-8') as f: f.write(parser.generate()) logger.info("RSS feed successfully written to '{}'.".format(out_path))
def _generate_sitemap(): """Generate content sitemap """ global _sitemap_generation_works if _sitemap_generation_works: raise RuntimeError('Sitemap generation is still in progress') _sitemap_generation_works = True logger.info('Sitemap generation start.') output_dir = path.join(reg.get('paths.static'), 'sitemap') if path.exists(output_dir): rmtree(output_dir) makedirs(output_dir, 0o755, True) sitemap_index = sitemap.Index() links_per_file = 50000 loop_count = 1 loop_links = 1 sm = sitemap.Sitemap() sm.add_url(router.base_url(), datetime.now(), 'always', 1) for lng in lang.langs(): for model in reg.get('content.sitemap_models', ()): logger.info( "Sitemap generation started for model '{}', language '{}'". format(model, lang.lang_title(lng))) for entity in _api.find(model, language=lng): # type: ContentWithURL sm.add_url(entity.url, entity.publish_time) loop_links += 1 # Flush sitemap if loop_links >= links_per_file: loop_count += 1 loop_links = 0 sitemap_path = sm.write( path.join(output_dir, 'data-%02d.xml' % loop_count), True) logger.info( "'{}' successfully written with {} links".format( sitemap_path, loop_links)) sitemap_index.add_url( router.url('/sitemap/{}'.format( path.basename(sitemap_path)))) del sm sm = sitemap.Sitemap() # If non-flushed sitemap exist if len(sm): sitemap_path = sm.write( path.join(output_dir, 'data-%02d.xml' % loop_count), True) logger.info("'{}' successfully written with {} links.".format( sitemap_path, loop_links)) sitemap_index.add_url( router.url('/sitemap/{}'.format(path.basename(sitemap_path)))) if len(sitemap_index): sitemap_index_path = sitemap_index.write( path.join(output_dir, 'index.xml')) logger.info("'{}' successfully written.".format(sitemap_index_path)) logger.info('Sitemap generation stop.') _sitemap_generation_works = False
def on_cron_every_min(): """Send weekly mail digest """ # Check if the models is specified models = _reg.get('content_digest.models') if not models: return # Check for the current day and time weekdays = _reg.get('content_digest.days_of_week', []) # type: list time_of_day = _reg.get('content_digest.day_time', '00:00') if isinstance(time_of_day, _datetime): time_of_day = time_of_day.time() else: time_of_day = _util.parse_date_time(time_of_day).time() now = _datetime.now() now_weekday = now.weekday() if now.weekday() not in weekdays or not (time_of_day.hour == now.hour and time_of_day.minute == now.minute): return # Calculate days number to query collections prev_weekday = weekdays[weekdays.index(now_weekday) - 1] if prev_weekday < now_weekday: days_diff = (now_weekday + 1) - (prev_weekday + 1) else: days_diff = 8 - (prev_weekday + 1) + now_weekday # Get entities of each model entities = [] entities_num = _reg.get('content_digest.entities_number', 10) pub_period = _datetime.now() - _timedelta(days_diff) for model in models: f = _content.find(model, language='*').gte('publish_time', pub_period).sort([ ('views_count', _odm.I_DESC) ]) entities += list(f.get(entities_num)) # Nothing to send if not entities: return # Sort all entities and cut top entities = sorted(entities, key=lambda e: e.views_count)[:entities_num] for subscriber in _odm.find('content_digest_subscriber').eq( 'enabled', True).get(): _logger.info('Preparing content digest for {}'.format( subscriber.f_get('email'))) lng = subscriber.f_get('language') default_m_subject = _lang.t('content_digest@default_mail_subject', language=lng) m_subject = _reg.get('content_digest.mail_subject_{}'.format(lng), default_m_subject) m_body = _tpl.render( _reg.get('content_digest.tpl', 'content_digest@digest'), { 'entities': entities, 'subscriber': subscriber, 'language': lng, }) _mail.Message(subscriber.f_get('email'), m_subject, m_body).send()
def on_pytsite_load(): update_info = _api.get_update_info() if not update_info: return # If there waiting updates exist, reload the application if _reg.get('env.type') == 'wsgi': _logger.warn('Application needs to be loaded in console to finish plugins update') return failed_plugins = [] # Call 'plugin_pre_install()' hooks for p_name, info in update_info.items(): v_to = _semver.Version(info['version_to']) try: # Check if the plugin is installed and loaded plugin = _api.get(p_name) # Call plugin_pre_install() hook if hasattr(plugin, 'plugin_pre_install') and callable(plugin.plugin_pre_install): plugin.plugin_pre_install() # Fire 'pre_install' event _events.fire('pytsite.plugman@pre_install', name=p_name, version=v_to) except _error.PluginNotLoaded as e: _logger.error(e) _console.print_warning(_lang.t('pytsite.plugman@plugin_install_error', { 'plugin': p_name, 'version': v_to, 'msg': str(e), })) failed_plugins.append(p_name) continue # Finish installing/updating plugins for p_name, info in update_info.items(): if p_name in failed_plugins: continue plugin = _api.get(p_name) v_from = _semver.Version(info['version_from']) v_to = _semver.Version(info['version_to']) try: _logger.info(_lang.t('pytsite.plugman@installing_plugin', { 'plugin': p_name, 'version': v_to, })) # Call plugin_install() hook if hasattr(plugin, 'plugin_install') and callable(plugin.plugin_install): plugin.plugin_install() # Fire 'install' event _events.fire('pytsite.plugman@install', name=p_name, version=v_to) _console.print_success(_lang.t('pytsite.plugman@plugin_install_success', { 'plugin': p_name, 'version': v_to, })) except Exception as e: _logger.error(e) _console.print_warning(_lang.t('pytsite.plugman@plugin_install_error', { 'plugin': p_name, 'version': v_to, 'msg': str(e), })) continue # Update plugin if v_from != '0.0.0': try: _console.print_info(_lang.t('pytsite.plugman@updating_plugin', { 'plugin': p_name, 'v_from': v_from, 'v_to': v_to, })) # Call plugin_update() hook if hasattr(plugin, 'plugin_update') and callable(plugin.plugin_update): plugin.plugin_update(v_from=v_from) # Fire 'update' event _events.fire('pytsite.plugman@update', name=p_name, v_from=v_from) _console.print_success(_lang.t('pytsite.plugman@plugin_update_success', { 'plugin': p_name, 'version': v_to, })) except Exception as e: _console.print_warning(_lang.t('pytsite.plugman@plugin_update_error', { 'plugin': p_name, 'version': v_to, 'msg': str(e), })) continue # Remove info from update queue _api.rm_update_info(p_name)