def bulk(self, type=None, ids=None, **kwargs): """Perform bulk operations on media items :param type: The type of bulk action to perform (delete) :param ids: A list of IDs. """ if not ids: ids = [] elif not isinstance(ids, list): ids = [ids] if type == 'delete': Category.query.filter(Category.id.in_(ids)).delete(False) DBSession.commit() success = True else: success = False return dict( success = success, ids = ids, parent_options = unicode(category_form.c['parent_id'].display()), )
def restore_necessary_files(): # Restore the appropriate media files and thumbnail files # for any media currently in the database. # Use the python models to do this. if not deleted_dir: return filename_pairs = [] for media in DBSession.query(Media).all(): for thumb in thumb_paths(media).values(): filename_pairs.append(( thumb.replace(m_img_dir, m_deleted_dir), thumb )) for file in media.files: if file.file_path: filename_pairs.append(( file.file_path.replace(media_dir, m_deleted_dir), file.file_path )) for podcast in DBSession.query(Podcast).all(): for thumb in thumb_paths(podcast).values(): filename_pairs.append(( thumb.replace(p_img_dir, p_deleted_dir), thumb )) for src, dest in filename_pairs: if os.path.exists(src): if DEBUG: print "Moving %s to %s" % (src, dest) shutil.move(src, dest)
def _update_settings(self, values): """Modify the settings associated with the given dictionary.""" for name, value in values.iteritems(): setting = tmpl_context.settings[name] if value is None: value = u'' else: value = unicode(value) if setting.value != value: setting.value = value DBSession.add(setting) DBSession.flush() # Clear the settings cache unless there are multiple processes. # We have no way of notifying the other processes that they need # to clear their caches too, so we've just gotta let it play out # until all the caches expire. if not request.environ.get('wsgi.multiprocess', False): app_globals.settings_cache.clear() else: # uWSGI provides an automagically included module # that we can use to call a graceful restart of all # the uwsgi processes. # http://projects.unbit.it/uwsgi/wiki/uWSGIReload try: import uwsgi uwsgi.reload() except ImportError: pass
def init_model(engine): """Call me before using any of the tables or classes in the model.""" DBSession.configure(bind=engine) from mediacore.model import meta meta.metadata.bind = engine meta.engine = engine
def increment_views(self): """Increment the number of views in the database. We avoid concurrency issues by incrementing JUST the views and not allowing modified_on to be updated automatically. """ if self.id is None: self.views += 1 return self.views # Don't raise an exception should concurrency problems occur. # Views will not actually be incremented in this case, but thats # relatively unimportant compared to rendering the page for the user. # We may be able to remove this after we improve our triggers to not # issue an UPDATE on media_fulltext unless one of its columns are # actually changed. Even when just media.views is updated, all the # columns in the corresponding media_fulltext row are updated, and # media_fulltext's MyISAM engine must lock the whole table to do so. transaction = DBSession.begin_nested() try: DBSession.query(self.__class__)\ .filter(self.__class__.id == self.id)\ .update({self.__class__.views: self.__class__.views + 1}) transaction.commit() except OperationalError, e: transaction.rollback() # (OperationalError) (1205, 'Lock wait timeout exceeded, try restarting the transaction') if not '1205' in e.message: raise
def update_status(self, id, status=None, publish_on=None, publish_until=None, **values): """Update the publish status for the given media. :param id: Media ID :type id: ``int`` :param update_status: The text of the submit button which indicates that the :attr:`~mediacore.model.media.Media.status` should change. :type update_status: ``unicode`` or ``None`` :param publish_on: A date to set to :attr:`~mediacore.model.media.Media.publish_on` :type publish_on: :class:`datetime.datetime` or ``None`` :param publish_until: A date to set to :attr:`~mediacore.model.media.Media.publish_until` :type publish_until: :class:`datetime.datetime` or ``None`` :rtype: JSON dict :returns: success bool message Error message, if unsuccessful status_form Rendered XHTML for the status form, updated to reflect the changes made. """ media = fetch_row(Media, id) new_slug = None # Make the requested change assuming it will be allowed if status == 'unreviewed': media.reviewed = True elif status == 'draft': self._publish_media(media, publish_on) elif publish_on: media.publish_on = publish_on media.update_popularity() elif publish_until: media.publish_until = publish_until # Verify the change is valid by re-determining the status media.update_status() DBSession.flush() if request.is_xhr: # Return the rendered widget for injection status_form_xhtml = unicode( update_status_form.display( action=url_for(action='update_status'), media=media)) return dict( success=True, status_form=status_form_xhtml, slug=new_slug, ) else: redirect(action='edit')
def _add_new_media_file(media, original_filename, file): # FIXME: I think this will raise a KeyError if the uploaded # file doesn't have an extension. file_ext = os.path.splitext(original_filename)[1].lower()[1:] # set the file paths depending on the file type media_file = MediaFile() media_file.display_name = original_filename media_file.container = guess_container_format(file_ext) media_file.type = guess_media_type(media_file.container) # Small files are stored in memory and do not have a tmp file w/ fileno if hasattr(file, 'fileno'): media_file.size = os.fstat(file.fileno())[6] else: # The file may contain multi-byte characters, so we must seek instead of count chars file.seek(0, os.SEEK_END) media_file.size = file.tell() file.seek(0) # update media relations media.files.append(media_file) # add the media file (and its media, if new) to the database to get IDs DBSession.add(media_file) DBSession.flush() # copy the file to its permanent location file_name = '%d_%d_%s.%s' % (media.id, media_file.id, media.slug, file_ext) file_url = _store_media_file(file, file_name) media_file.file_name = file_name return media_file
def attach_and_store_media_file(media, media_file, file): """Given a Media object, a MediaFile object, and a file handle, attaches the MediaFile to the Media object, and saves the file to permanent storage. Adds the MediaFile to the database. """ # Small files are stored in memory and do not have a tmp file w/ fileno if hasattr(file, 'fileno'): media_file.size = os.fstat(file.fileno())[6] else: # The file may contain multi-byte characters, so we must seek instead of count chars file.seek(0, os.SEEK_END) media_file.size = file.tell() file.seek(0) # update media relations media.files.append(media_file) # add the media file (and its media, if new) to the database to get IDs DBSession.add(media_file) DBSession.flush() # copy the file to its permanent location file_name = '%d_%d_%s.%s' % (media.id, media_file.id, media.slug, media_file.container) file_url = store_media_file(file, file_name) if file_url: # The file has been stored remotely media_file.url = file_url else: # The file is stored locally and we just need its name media_file.file_name = file_name
def save_fields(**result): """Save SEO settings to the database on a Media item save. When the :attr:`mediacore.plugin.events.Admin.MediaController.save` event is triggered it receives the dict of values returned by :meth:`mediacore.controllers.admin.media.MediaController.save`. The SEO values are extracted from tmpl_context.form_values and if a value was entered it is saved. If a valid setting was found, but it does not have a value, we remove it form the given media item. :param result: A dict of form values for the Media item :param type: dict :returns: A dict of form values for the Media item :rtpye: dict """ media = Media.query.get(result['media_id']) for key, value in tmpl_context.form_values['seo'].iteritems(): meta_key = u'seo_%s' % key if value: media.meta[meta_key] = value elif meta_key in media.meta: DBSession.delete(media._meta[meta_key]) return result
def increment_views(self): """Increment the number of views in the database. We avoid concurrency issues by incrementing JUST the views and not allowing modified_on to be updated automatically. """ if self.id is None: self.views += 1 return self.views # Don't raise an exception should concurrency problems occur. # Views will not actually be incremented in this case, but thats # relatively unimportant compared to rendering the page for the user. # We may be able to remove this after we improve our triggers to not # issue an UPDATE on media_fulltext unless one of its columns are # actually changed. Even when just media.views is updated, all the # columns in the corresponding media_fulltext row are updated, and # media_fulltext's MyISAM engine must lock the whole table to do so. transaction = DBSession.begin_nested() try: DBSession.query(self.__class__)\ .filter(self.__class__.id == self.id)\ .update({self.__class__.views: self.__class__.views + 1}) transaction.commit() except exc.OperationalError, e: transaction.rollback() # (OperationalError) (1205, 'Lock wait timeout exceeded, try restarting the transaction') if not '1205' in e.message: raise
def save(self, id, display_name, group_name, permissions, delete=None, **kwargs): """Save changes or create a new :class:`~mediacore.model.auth.Group` instance. :param id: Group ID. If ``"new"`` a new group is created. :type id: ``int`` or ``"new"`` :returns: Redirect back to :meth:`index` after successful save. """ group = fetch_row(Group, id) if delete: DBSession.delete(group) redirect(action='index', id=None) group.display_name = display_name group.group_name = group_name if permissions: query = DBSession.query(Permission).filter( Permission.permission_id.in_(permissions)) group.permissions = list(query.all()) else: group.permissions = [] DBSession.add(group) redirect(action='index', id=None)
def _autocommit_commit(req): try: DBSession.commit() except: _autocommit_rollback(req) raise else: _autocommit_fire_callbacks(req, req.commit_callbacks)
def backup_files(dump_dir): # Backup all files (media files, thumbs) referenced by an object in the DB # to the provided dump_dir. # TODO: display errors when file operations fail if dump_dir == "/": return 1, "Dump Files directory should never be the root directory, '/'" # normalize dirname dump_dir = dump_dir.rstrip(os.sep) + os.sep # These are the directories we will write to. media_thumb_dir = dump_dir + Media._thumb_dir podcast_thumb_dir = dump_dir + Podcast._thumb_dir media_files_dir = dump_dir + "media_files" # Initialize our default paths to backup default_images = ["news.jpg", "newm.jpg", "newl.jpg"] media_thumbs = [m_img_dir + os.sep + img for img in default_images] podcast_thumbs = [p_img_dir + os.sep + img for img in default_images] media_files = [] # Add the media thumbs and media files for media in DBSession.query(Media).all(): file_paths = [file_path(f) for f in media.files] media_files += [fp for fp in file_paths if fp] media_thumbs += thumb_paths(media).values() # Add the podcast thumbs for podcast in DBSession.query(Podcast).all(): podcast_thumbs += thumb_paths(podcast).values() # Ensure the necessary directories exist. assert os.path.isdir(dump_dir) for subdir in (media_thumb_dir, media_files_dir, podcast_thumb_dir): if not os.path.exists(subdir): os.mkdir(subdir) assert os.path.isdir(subdir) empty_dir(subdir) # Copy over all of the files: sources_dests = ( (media_thumbs, media_thumb_dir), (media_files, media_files_dir), (podcast_thumbs, podcast_thumb_dir), ) for sources, dest_dir in sources_dests: for src in sources: if DEBUG: print "Copying %s to %s%s" % (src, dest_dir, os.sep) shutil.copy2(src, dest_dir) return ( 0, "%d thumbnails and %d media files successfully backed up" % (len(media_thumbs) + len(podcast_thumbs), len(media_files)), )
def podcast_from_feed(d, tags=False, save_files=False): # Assume not explicit explicit = False if 'itunes_explicit' in d['feed']: explicit = bool(d['feed']['itunes_explicit']) image = None if 'image' in d['feed']: image = d['feed']['image']['href'] title = u'' if 'title' in d['feed']: title = d['feed']['title'] description = u'' if 'summary' in d['feed']: description = d['feed']['summary'] subtitle = u'' if 'subtitle' in d['feed']: subtitle = d['feed']['subtitle'] slug = slugify(title) author_name = u"PLACEHOLDER NAME" author_email = u"*****@*****.**" podcast = Podcast() podcast.slug = get_available_slug(Podcast, slug, podcast) podcast.title = title podcast.subtitle = subtitle podcast.author = Author(author_name, author_email) podcast.description = description podcast.explicit = explicit DBSession.add(podcast) DBSession.flush() # Create thumbs from image, or default thumbs created_images = False if image: temp_imagefile = tempfile.TemporaryFile() imagefile = urllib2.urlopen(image) temp_imagefile.write(imagefile.read()) temp_imagefile.seek(0) filename = urlparse.urlparse(image)[2] create_thumbs_for(podcast, temp_imagefile, filename) created_images = True if not created_images: create_default_thumbs_for(podcast) # Now add all of the entries for entry in d['entries']: media = media_from_entry(entry, tags, save_files) media.podcast = podcast return podcast
def setUp(self): super(DBTestCase, self).setUp() self.env_dir = self._create_environment_folders() self.pylons_config = setup_environment_and_database(self.env_dir, enabled_plugins=self.enabled_plugins) add_default_data() DBSession.commit() config.push_process_config(self.pylons_config)
def disassociate_video_id(self, media_file, video_id): # Create a meta_key for this MediaCore::MediaFile -> Panda::Video pairing. # This is sort of a perversion of the meta table, but hey, it works. meta_key = u"%s%s" % (META_VIDEO_PREFIX, video_id) mfm = DBSession.query(MediaFilesMeta)\ .filter(MediaFilesMeta.media_files_id==media_file.id)\ .filter(MediaFilesMeta.key==meta_key) for x in mfm: DBSession.delete(x)
def _autocommit_commit(req): from mediacore.model.meta import DBSession try: DBSession.commit() except: _autocommit_rollback(req) raise else: _autocommit_fire_callbacks(req, req.commit_callbacks)
def import_videos_from_feed(self, feed): for entry in feed.entry: youtube_id = self.id_for_entry(entry) if not self._should_import_video(youtube_id): continue media = self._import_video(entry) self._video_notifcation(youtube_id) if media: DBSession.add(media) DBSession.flush()
def fetch_and_create_multi_setting(key, value): multisettings = MultiSetting.query\ .filter(MultiSetting.key==key)\ .all() for ms in multisettings: if ms.value == value: return ms ms = MultiSetting(key, value) DBSession.add(ms) return ms
def example(cls, **kwargs): defaults = dict( name = u'baz_users', display_name = u'Baz Users', ) defaults.update(kwargs) group = Group(**defaults) DBSession.add(group) DBSession.flush() return group
def example(cls, **kwargs): defaults = dict( name=u'baz_users', display_name=u'Baz Users', ) defaults.update(kwargs) group = Group(**defaults) DBSession.add(group) DBSession.flush() return group
def fetch_and_create_tags(tag_names): """Return a list of Tag instances that match the given names. Tag names that don't yet exist are created automatically and returned alongside the results that did already exist. If you try to create a new tag that would have the same slug as an already existing tag, the existing tag is used instead. :param tag_names: The display :attr:`Tag.name` :type tag_names: list :returns: A list of :class:`Tag` instances. :rtype: :class:`TagList` instance """ results = TagList() lower_names = [name.lower() for name in tag_names] slugs = [slugify(name) for name in lower_names] # Grab all the tags that exist already, whether its the name or slug # that matches. Slugs can be changed by the tag settings UI so we can't # rely on each tag name evaluating to the same slug every time. results = Tag.query.filter( sql.or_(func.lower(Tag.name).in_(lower_names), Tag.slug.in_(slugs))).all() # Filter out any tag names that already exist (case insensitive), and # any tag names evaluate to slugs that already exist. for tag in results: # Remove the match from our three lists until its completely gone while True: try: try: index = slugs.index(tag.slug) except ValueError: index = lower_names.index(tag.name.lower()) tag_names.pop(index) lower_names.pop(index) slugs.pop(index) except ValueError: break # Any remaining tag names need to be created. if tag_names: # We may still have multiple tag names which evaluate to the same slug. # Load it into a dict so that duplicates are overwritten. uniques = dict((slug, name) for slug, name in izip(slugs, tag_names)) # Do a bulk insert to create the tag rows. new_tags = [{'name': n, 'slug': s} for s, n in uniques.iteritems()] DBSession.execute(tags.insert(), new_tags) DBSession.flush() # Query for our newly created rows and append them to our result set. results += Tag.query.filter(Tag.slug.in_(uniques.keys())).all() return results
def save_thumb(self, id, thumb, **kwargs): """Save a thumbnail uploaded with :class:`~mediacore.forms.admin.ThumbForm`. :param id: Media ID. If ``"new"`` a new Media stub is created with :func:`~mediacore.model.media.create_media_stub`. :type id: ``int`` or ``"new"`` :param file: The uploaded file :type file: :class:`cgi.FieldStorage` or ``None`` :rtype: JSON dict :returns: success bool message Error message, if unsuccessful id The :attr:`~mediacore.model.media.Media.id` which is important if a new media has just been created. """ if id == 'new': media = create_media_stub() else: media = fetch_row(Media, id) try: # Create thumbs img = Image.open(thumb.file) if id == 'new': DBSession.add(media) DBSession.flush() # TODO: Allow other formats? for key, xy in config['thumb_sizes'][media._thumb_dir].iteritems(): thumb_path = helpers.thumb_path(media, key) thumb_img = helpers.resize_thumb(img, xy) thumb_img.save(thumb_path) # Backup the original image just for kicks backup_type = os.path.splitext(thumb.filename)[1].lower()[1:] backup_path = helpers.thumb_path(media, 'orig', ext=backup_type) backup_file = open(backup_path, 'w+b') thumb.file.seek(0) shutil.copyfileobj(thumb.file, backup_file) thumb.file.close() backup_file.close() success = True message = None except IOError: success = False message = 'Unsupported image type' except Exception, e: success = False message = e.message
def update_status(self, id, update_button=None, publish_on=None, **values): """Update the publish status for the given media. :param id: Media ID :type id: ``int`` :param update_status: The text of the submit button which indicates that the :attr:`~mediacore.model.media.Media.status` should change. :type update_status: ``unicode`` or ``None`` :param publish_on: A date to set to :attr:`~mediacore.model.media.Media.publish_on` :type publish_on: :class:`datetime.datetime` or ``None`` :rtype: JSON dict :returns: success bool message Error message, if unsuccessful status_form Rendered XHTML for the status form, updated to reflect the changes made. """ media = fetch_row(Media, id) new_slug = None # Make the requested change assuming it will be allowed if update_button == _('Review Complete'): media.reviewed = True elif update_button == _('Publish Now'): media.publishable = True media.publish_on = publish_on or datetime.now() media.update_popularity() # Remove the stub prefix if the user wants the default media title if media.slug.startswith('_stub_'): new_slug = get_available_slug(Media, media.slug[len('_stub_'):]) media.slug = new_slug elif publish_on: media.publish_on = publish_on media.update_popularity() # Verify the change is valid by re-determining the status media.update_status() DBSession.flush() if request.is_xhr: # Return the rendered widget for injection status_form_xhtml = unicode(update_status_form.display( action=url_for(action='update_status'), media=media)) return dict( success = True, status_form = status_form_xhtml, slug = new_slug, ) else: redirect(action='edit')
def backup_files(dump_dir): # Backup all files (media files, thumbs) referenced by an object in the DB # to the provided dump_dir. # TODO: display errors when file operations fail if dump_dir == '/': return 1, "Dump Files directory should never be the root directory, '/'" # normalize dirname dump_dir = dump_dir.rstrip(os.sep) + os.sep # These are the directories we will write to. media_thumb_dir = dump_dir + Media._thumb_dir podcast_thumb_dir = dump_dir + Podcast._thumb_dir media_files_dir = dump_dir + 'media_files' # Initialize our default paths to backup default_images = ['news.jpg', 'newm.jpg', 'newl.jpg'] media_thumbs = [m_img_dir+os.sep+img for img in default_images] podcast_thumbs = [p_img_dir+os.sep+img for img in default_images] media_files = [] # Add the media thumbs and media files for media in DBSession.query(Media).all(): file_paths = [file_path(f) for f in media.files] media_files += [fp for fp in file_paths if fp] media_thumbs += thumb_paths(media).values() # Add the podcast thumbs for podcast in DBSession.query(Podcast).all(): podcast_thumbs += thumb_paths(podcast).values() # Ensure the necessary directories exist. assert os.path.isdir(dump_dir) for subdir in (media_thumb_dir, media_files_dir, podcast_thumb_dir): if not os.path.exists(subdir): os.mkdir(subdir) assert os.path.isdir(subdir) empty_dir(subdir) # Copy over all of the files: sources_dests = ( (media_thumbs, media_thumb_dir), (media_files, media_files_dir), (podcast_thumbs, podcast_thumb_dir), ) for sources, dest_dir in sources_dests: for src in sources: if DEBUG: print "Copying %s to %s%s" % (src, dest_dir, os.sep) shutil.copy2(src, dest_dir) return 0,'%d thumbnails and %d media files successfully backed up' %\ (len(media_thumbs) + len(podcast_thumbs), len(media_files))
def fetch_and_create_tags(tag_names): """Return a list of Tag instances that match the given names. Tag names that don't yet exist are created automatically and returned alongside the results that did already exist. If you try to create a new tag that would have the same slug as an already existing tag, the existing tag is used instead. :param tag_names: The display :attr:`Tag.name` :type tag_names: list :returns: A list of :class:`Tag` instances. :rtype: :class:`TagList` instance """ results = TagList() lower_names = [name.lower() for name in tag_names] slugs = [slugify(name) for name in lower_names] # Grab all the tags that exist already, whether its the name or slug # that matches. Slugs can be changed by the tag settings UI so we can't # rely on each tag name evaluating to the same slug every time. results = Tag.query.filter(sql.or_(func.lower(Tag.name).in_(lower_names), Tag.slug.in_(slugs))).all() # Filter out any tag names that already exist (case insensitive), and # any tag names evaluate to slugs that already exist. for tag in results: # Remove the match from our three lists until its completely gone while True: try: try: index = slugs.index(tag.slug) except ValueError: index = lower_names.index(tag.name.lower()) tag_names.pop(index) lower_names.pop(index) slugs.pop(index) except ValueError: break # Any remaining tag names need to be created. if tag_names: # We may still have multiple tag names which evaluate to the same slug. # Load it into a dict so that duplicates are overwritten. uniques = dict((slug, name) for slug, name in izip(slugs, tag_names)) # Do a bulk insert to create the tag rows. new_tags = [{'name': n, 'slug': s} for s, n in uniques.iteritems()] DBSession.execute(tags.insert(), new_tags) DBSession.flush() # Query for our newly created rows and append them to our result set. results += Tag.query.filter(Tag.slug.in_(uniques.keys())).all() return results
def example(cls, **kwargs): media = Media() defaults = dict(title=u"Foo Media", author=Author(u"Joe", u"*****@*****.**"), type=VIDEO) defaults.update(kwargs) defaults.setdefault("slug", get_available_slug(Media, defaults["title"])) for key, value in defaults.items(): assert hasattr(media, key) setattr(media, key, value) DBSession.add(media) DBSession.flush() return media
def update_status(self, id, status=None, publish_on=None, publish_until=None, **values): """Update the publish status for the given media. :param id: Media ID :type id: ``int`` :param update_status: The text of the submit button which indicates that the :attr:`~mediacore.model.media.Media.status` should change. :type update_status: ``unicode`` or ``None`` :param publish_on: A date to set to :attr:`~mediacore.model.media.Media.publish_on` :type publish_on: :class:`datetime.datetime` or ``None`` :param publish_until: A date to set to :attr:`~mediacore.model.media.Media.publish_until` :type publish_until: :class:`datetime.datetime` or ``None`` :rtype: JSON dict :returns: success bool message Error message, if unsuccessful status_form Rendered XHTML for the status form, updated to reflect the changes made. """ media = fetch_row(Media, id) new_slug = None # Make the requested change assuming it will be allowed if status == 'unreviewed': media.reviewed = True elif status == 'draft': self._publish_media(media, publish_on) elif publish_on: media.publish_on = publish_on media.update_popularity() elif publish_until: media.publish_until = publish_until # Verify the change is valid by re-determining the status media.update_status() DBSession.flush() if request.is_xhr: # Return the rendered widget for injection status_form_xhtml = unicode(update_status_form.display( action=url_for(action='update_status'), media=media)) return dict( success = True, status_form = status_form_xhtml, slug = new_slug, ) else: redirect(action='edit')
def _update_settings(self, values): """Modify the settings associated with the given dictionary.""" for name, value in values.iteritems(): if value is None: value = u'' else: value = unicode(value) if self.settings[name].value != value: self.settings[name].value = value DBSession.add(self.settings[name]) DBSession.flush()
def popularity_save(self, **kwargs): """Save :class:`~mediacore.forms.admin.settings.PopularityForm`. Updates the popularity for every media item based on the submitted values. """ self._save(popularity_form, values=kwargs) for m in Media.query: m.update_popularity() DBSession.add(m) redirect(action='popularity')
def example(cls, **kwargs): defaults = dict( name=u'foo', description=u'foo permission', groups=None, ) defaults.update(kwargs) permission = Permission(**defaults) DBSession.add(permission) DBSession.flush() return permission
def example(cls, **kwargs): defaults = dict( name=u'foo', description = u'foo permission', groups = None, ) defaults.update(kwargs) permission = Permission(**defaults) DBSession.add(permission) DBSession.flush() return permission
def save_display(self, **kwargs): """Save :class:`~mediacore.forms.admin.settings.DisplayForm`.""" player_type = c.settings['player_type'].value self._save(display_form, **kwargs) # If the player_type changes, we must update the Media.encoded flag, # since some things may play now and/or not play anymore with the # new setting. if player_type != c.settings['player_type'].value: for m in Media.query.options(orm.eagerload('files')): m.update_status() DBSession.add(m) redirect(action='display')
def cleanup_players_table(enabled=False): """ Ensure that all available players are added to the database and that players are prioritized in incrementally increasing order. :param enabled: Should the default players be enabled upon creation? :type enabled: bool """ from mediacore.lib.players import (BlipTVFlashPlayer, DailyMotionEmbedPlayer, GoogleVideoFlashPlayer, JWPlayer, VimeoUniversalEmbedPlayer, YoutubeFlashPlayer) # When adding players, prefer them in the following order: default_players = [ JWPlayer, YoutubeFlashPlayer, VimeoUniversalEmbedPlayer, GoogleVideoFlashPlayer, BlipTVFlashPlayer, DailyMotionEmbedPlayer, ] unordered_players = [p for p in AbstractPlayer if p not in default_players] all_players = default_players + unordered_players # fetch the players that are already in the database s = players.select().order_by('priority') existing_players_query = DBSession.execute(s) existing_player_rows = [p for p in existing_players_query] existing_player_names = [p['name'] for p in existing_player_rows] # Ensure all priorities are monotonically increasing from 1..n priority = 0 for player_row in existing_player_rows: priority += 1 if player_row['priority'] != priority: u = players.update()\ .where(players.c.id == player_row['id'])\ .values(priority=priority) DBSession.execute(u) # Ensure that all available players are in the database for player_cls in all_players: if player_cls.name not in existing_player_names: enable_player = enabled and player_cls in default_players priority += 1 DBSession.execute(players.insert().values( name=player_cls.name, enabled=enable_player, data=player_cls.default_data, priority=priority, ))
def cleanup_players_table(enabled=False): """ Ensure that all available players are added to the database and that players are prioritized in incrementally increasing order. :param enabled: Should the default players be enabled upon creation? :type enabled: bool """ from mediacore.lib.players import ( BlipTVFlashPlayer, DailyMotionEmbedPlayer, GoogleVideoFlashPlayer, JWPlayer, VimeoUniversalEmbedPlayer, YoutubePlayer, ) # When adding players, prefer them in the following order: default_players = [ JWPlayer, YoutubePlayer, VimeoUniversalEmbedPlayer, GoogleVideoFlashPlayer, BlipTVFlashPlayer, DailyMotionEmbedPlayer, ] unordered_players = [p for p in AbstractPlayer if p not in default_players] all_players = default_players + unordered_players # fetch the players that are already in the database s = players.select().order_by("priority") existing_players_query = DBSession.execute(s) existing_player_rows = [p for p in existing_players_query] existing_player_names = [p["name"] for p in existing_player_rows] # Ensure all priorities are monotonically increasing from 1..n priority = 0 for player_row in existing_player_rows: priority += 1 if player_row["priority"] != priority: u = players.update().where(players.c.id == player_row["id"]).values(priority=priority) DBSession.execute(u) # Ensure that all available players are in the database for player_cls in all_players: if player_cls.name not in existing_player_names: enable_player = enabled and player_cls in default_players priority += 1 DBSession.execute( players.insert().values( name=player_cls.name, enabled=enable_player, data=player_cls.default_data, priority=priority ) )
def delete(self, id, **kwargs): """Delete a group. :param id: Group ID. :type id: ``int`` :returns: Redirect back to :meth:`index` after successful delete. """ group = fetch_row(Group, id) DBSession.delete(group) if request.is_xhr: return dict(success=True) redirect(action='index', id=None)
def init_model(engine, table_prefix=None): """Call me before using any of the tables or classes in the model.""" DBSession.configure(bind=engine) from mediacore.model import meta meta.metadata.bind = engine meta.engine = engine # Change all table names to include the given prefix. This can't be # easily done before the models are added to the metadata because # that happens on import, before the config is available. if table_prefix: table_prefix = table_prefix.rstrip('_') + '_' for table in meta.metadata.sorted_tables: table.name = table_prefix + table.name
def delete(self, id, **kwargs): """Delete a user. :param id: User ID. :type id: ``int`` :returns: Redirect back to :meth:`index` after successful delete. """ user = fetch_row(User, id) DBSession.delete(user) if request.is_xhr: return dict(success=True) redirect(action='index', id=None)
def example(cls, **kwargs): category = Category() defaults = dict(name=u'Foo', parent_id=0) defaults.update(kwargs) defaults.setdefault('slug', get_available_slug(Category, defaults['name'])) for key, value in defaults.items(): assert hasattr(category, key) setattr(category, key, value) DBSession.add(category) DBSession.flush() return category
def createMediaItem(self, title, author_email=None, author_name=None, slug=None, tags=None, podcast_id=None, category_ids=None, meta=None, **kwargs): mediaItem = Media() log.info("createMediaItem({title})".format(title=title)) if not slug: slug = title elif slug.startswith('_stub_'): slug = slug[len('_stub_'):] if slug != mediaItem.slug: mediaItem.slug = get_available_slug(Media, slug, mediaItem) if podcast_id: podcast_id = int(podcast_id) else: podcast_id = 0 if not meta: meta = {} else: try: meta = json.loads(meta) except Exception as e: return { "success": False, "message": "Invalid JSON object given for `meta`" } mediaItem.title = title mediaItem.author = Author(author_name or "No Author", author_email or "No Email") mediaItem.podcast_id = podcast_id or None mediaItem.set_tags(tags) mediaItem.set_categories(category_ids) mediaItem.update_status() mediaItem.meta = meta DBSession.add(mediaItem) DBSession.flush() return {"success": True, "id": mediaItem.id}
def example(cls, **kwargs): user = User() defaults = dict( user_name=u'joe', email_address=u'*****@*****.**', display_name=u'Joe Smith', created=datetime.now(), ) defaults.update(kwargs) for key, value in defaults.items(): setattr(user, key, value) DBSession.add(user) DBSession.flush() return user
class Tag(object): """ Tag (keyword) for labelling content .. attribute:: id .. attribute:: name Display name .. attribute:: slug A unique URL-friendly permalink string for looking up this object. .. attribute:: media_content .. attribute:: media_count_published """ query = DBSession.query_property() def __init__(self, name=None, slug=None): self.name = name or None self.slug = slug or name or None def __repr__(self): return '<Tag: %s>' % self.name def __unicode__(self): return self.name @validates('slug') def validate_slug(self, key, slug): return slugify(slug)
def fetch_enabled_players(): """Return player classes and their data dicts in ascending priority. Warnings are logged any time a row is found that does not match up to one of the classes that are currently registered. A warning will also be raised if there are no players configured/enabled. :rtype: list of tuples :returns: :class:`~mediacore.lib.players.AbstractPlayer` subclasses and the configured data associated with them. """ player_classes = dict((p.name, p) for p in AbstractPlayer) query = sql.select((players.c.name, players.c.data))\ .where(players.c.enabled == True)\ .order_by(players.c.priority.asc(), players.c.id.desc()) query_data = DBSession.execute(query).fetchall() while query_data: try: return [(player_classes[name], data) for name, data in query_data] except KeyError: log.warn('Player name %r exists in the database but has not ' 'been registered.' % name) query_data.remove((name, data)) log.warn('No registered players are configured in your database.') return []
class Vote(object): """Vote Model """ query = DBSession.query_property(VoteQuery) def __repr__(self): return '<Vote: %r media=%r user=%r>' % (self.id, self.media_id, self.user_name) def __unicode__(self): return 'Vote %r' % self.id def increment_likes(self): self.likes += 1 return self.likes def increment_dislikes(self): self.dislikes += 1 return self.dislikes def _get_parent(self): return self.media or None def _set_parent(self, parent): self.media = parent parent = property(_get_parent, _set_parent, None, """ The object this Vote belongs to, provided for convenience mostly. If the parent has not been eagerloaded, a query is executed automatically. """)
def get_available_slug(mapped_class, string, ignore=None): """Return a unique slug based on the provided string. Works by appending an int in sequence starting with 2: 1. awesome-stuff 2. awesome-stuff-2 3. awesome-stuff-3 :param mapped_class: The ORM-controlled model that the slug is for :param string: A title, name, etc :type string: unicode :param ignore: A record which doesn't count as a collision :type ignore: Int ID, ``mapped_class`` instance or None :returns: A unique slug :rtype: unicode """ if isinstance(ignore, mapped_class): ignore = ignore.id elif ignore is not None: ignore = int(ignore) new_slug = slug = slugify(string) appendix = 2 while DBSession.query(mapped_class.id)\ .filter(mapped_class.slug == new_slug)\ .filter(mapped_class.id != ignore)\ .first(): str_appendix = u'-%s' % appendix max_substr_len = SLUG_LENGTH - len(str_appendix) new_slug = slug[:max_substr_len] + str_appendix appendix += 1 return new_slug
class MediaFile(object): """ Audio or Video File """ meta = association_proxy('_meta', 'value', creator=MediaFilesMeta) query = DBSession.query_property(MediaFileQuery) def __repr__(self): return '<MediaFile: %r %r unique_id=%r>' \ % (self.type, self.storage.display_name, self.unique_id) @property def mimetype(self): """The best-guess mimetype based on this file's container format. Defaults to 'application/octet-stream'. """ type = self.type if type == AUDIO_DESC: type = AUDIO return guess_mimetype(self.container, type) def get_uris(self): """Return a list all possible playback URIs for this file. :rtype: list :returns: :class:`mediacore.lib.storage.StorageURI` instances. """ return self.storage.get_uris(self)
def example(cls, **kwargs): media = Media() defaults = dict( title=u'Foo Media', author=Author(u'Joe', u'*****@*****.**'), type=None, ) defaults.update(kwargs) defaults.setdefault('slug', get_available_slug(Media, defaults['title'])) for key, value in defaults.items(): assert hasattr(media, key) setattr(media, key, value) DBSession.add(media) DBSession.flush() return media