示例#1
0
 def _get_last_activity_async(self, bundle_id, properties):
     query = {'activity': bundle_id}
     datastore.find(query, sorting=['+timestamp'],
                    limit=self._MAX_RESUME_ENTRIES,
                    properties=properties,
                    reply_handler=self.__get_last_activity_reply_handler_cb,
                    error_handler=self.__get_last_activity_error_handler_cb)
示例#2
0
 def _get_last_activity_async(self, bundle_id, properties):
     query = {'activity': bundle_id}
     datastore.find(query, sorting=['+timestamp'],
                    limit=self._MAX_RESUME_ENTRIES,
                    properties=properties,
                    reply_handler=self.__get_last_activity_reply_handler_cb,
                    error_handler=self.__get_last_activity_error_handler_cb)
示例#3
0
    def _resume_activity(self, text, pattern, name, day):
        logging.warning('Voice command: %s' % text)
        logging.warning('Activity: %s' % name)
        logging.warning('Day: %s' % day)

        properties = ['uid', 'title', 'icon-color', 'activity', 'activity_id',
                      'mime_type', 'mountpoint', 'timestamp']

        timestamp = None
        t = date.today()

        if not _(day) == _('journal'):
            if _(day) == _('yesterday'):
                delta = -1
            else:
                delta = abs(t.weekday() - _WEEK_DAYS.index(_(day))) - 7

            d = t + timedelta(days=delta)
            n = d + timedelta(days=1)
            start = time.mktime(d.timetuple())
            end = time.mktime(n.timetuple())
            timestamp = {'start': start, 'end':end}

        query = {}
        if name:
            query['activity'] = _NAME_TO_ID.get(_(name))
        if timestamp:
            query['timestamp'] = timestamp

        datastore.find(query, sorting=['+timestamp'],
                   limit=1,
                   properties=properties,
                   reply_handler=self.__get_last_activity_reply_handler_cb,
                   error_handler=self.__get_last_activity_error_handler_cb)
    def get_entry_info_format(self, query, mime):
        books = []
        if query is not None and len(query) > 0:
            ds_objects, num_objects = datastore.find({
                'mime_type': '%s' % mime,
                'query': '*%s*' % query
            })
        else:
            ds_objects, num_objects = datastore.find(
                {'mime_type': '%s' % mime})

        logging.error('Local search %d books found %s format', num_objects,
                      mime)
        for i in range(0, num_objects):
            entry = {}
            entry['title'] = ds_objects[i].metadata['title']
            entry['mime'] = ds_objects[i].metadata['mime_type']
            entry['object_id'] = ds_objects[i].object_id

            if 'author' in ds_objects[i].metadata:
                entry['author'] = ds_objects[i].metadata['author']
            else:
                entry['author'] = ''

            if 'publisher' in ds_objects[i].metadata:
                entry['dcterms_publisher'] = \
                    ds_objects[i].metadata['publisher']
            else:
                entry['dcterms_publisher'] = ''

            if 'language' in ds_objects[i].metadata:
                entry['dcterms_language'] = \
                    ds_objects[i].metadata['language']
            else:
                entry['dcterms_language'] = ''

            if 'source' in ds_objects[i].metadata:
                entry['source'] = \
                    ds_objects[i].metadata['source']
            else:
                entry['source'] = ''

            if entry['source'] in _SOURCES_CONFIG:
                repo_configuration = _SOURCES_CONFIG[entry['source']]
                summary_field = repo_configuration['summary_field']
                if 'summary' in ds_objects[i].metadata:
                    entry[summary_field] = ds_objects[i].metadata['summary']
                else:
                    entry[summary_field] = ''
            else:
                repo_configuration = None
            books.append(opds.Book(repo_configuration, entry, ''))
        return books
    def get_entry_info_format(self, query, mime):
        books = []
        if query is not None and len(query) > 0:
            ds_objects, num_objects = datastore.find(
                    {'mime_type': '%s' % mime,
                    'query': '*%s*' % query})
        else:
            ds_objects, num_objects = datastore.find(
                    {'mime_type': '%s' % mime})

        logging.error('Local search %d books found %s format', num_objects,
                    mime)
        for i in range(0, num_objects):
            entry = {}
            entry['title'] = ds_objects[i].metadata['title']
            entry['mime'] = ds_objects[i].metadata['mime_type']
            entry['object_id'] = ds_objects[i].object_id

            if 'author' in ds_objects[i].metadata:
                entry['author'] = ds_objects[i].metadata['author']
            else:
                entry['author'] = ''

            if 'publisher' in ds_objects[i].metadata:
                entry['dcterms_publisher'] = \
                    ds_objects[i].metadata['publisher']
            else:
                entry['dcterms_publisher'] = ''

            if 'language' in ds_objects[i].metadata:
                entry['dcterms_language'] = \
                    ds_objects[i].metadata['language']
            else:
                entry['dcterms_language'] = ''

            if 'source' in ds_objects[i].metadata:
                entry['source'] = \
                    ds_objects[i].metadata['source']
            else:
                entry['source'] = ''

            if entry['source'] in _SOURCES_CONFIG:
                repo_configuration = _SOURCES_CONFIG[entry['source']]
                summary_field = repo_configuration['summary_field']
                if 'summary' in ds_objects[i].metadata:
                    entry[summary_field] = ds_objects[i].metadata['summary']
                else:
                    entry[summary_field] = ''
            else:
                repo_configuration = None
            books.append(opds.Book(repo_configuration, entry, ''))
        return books
示例#6
0
    def run_activity(self, bundle_id, resume_mode):
        if not resume_mode:
            registry = bundleregistry.get_registry()
            bundle = registry.get_bundle(bundle_id)
            misc.launch(bundle)
            return

        self._activity_selected = bundle_id
        query = {'activity': bundle_id}
        properties = ['uid', 'title', 'icon-color', 'activity', 'activity_id',
                      'mime_type', 'mountpoint']
        datastore.find(query, sorting=['+timestamp'],
                       limit=1, properties=properties,
                       reply_handler=self.__get_last_activity_reply_handler_cb,
                       error_handler=self.__get_last_activity_error_handler_cb)
示例#7
0
def publish(activity, force=False):
    if not [i for i in book.custom.index if i['ready']]:
        alert = NotifyAlert(5)
        alert.props.title = _('Nothing to publish')
        alert.props.msg = _('Mark arcticles from "Custom" '
                            'panel and try again.')
        alert.connect('response', __alert_notify_response_cb, activity)
        activity.add_alert(alert)
        alert.show()
        return

    title = activity.metadata['title']
    jobject = datastore.find({
        'activity_id': activity.get_id(),
        'activity': book.custom.uid
    })[0] or None

    logger.debug('publish: title=%s jobject=%s force=%s' \
            % (title, jobject and jobject[0].metadata['activity'], force))

    if jobject:
        if force:
            jobject = jobject[0]
        else:
            alert = ConfirmationAlert()
            alert.props.title = _('Overwrite existed bundle?')
            alert.props.msg = _(
                'A bundle for current object was already created. '
                'Click "OK" to overwrite it.')
            alert.connect('response', __alert_response_cb, activity, True)
            activity.add_alert(alert)
            alert.show()
            jobject[0].destroy()
            return
    else:
        jobject = datastore.create()
        jobject.metadata['activity_id'] = activity.get_id()
        jobject.metadata['activity'] = book.custom.uid
        jobject.metadata['mime_type'] = 'application/vnd.olpc-content'
        jobject.metadata['description'] = \
                'This is a bundle containing articles on %s.\n' \
                'To view these articles, open the \'Browse\' Activity.\n' \
                'Go to \'Books\', and select \'%s\'.' % (title, title)

    book.custom.sync_article()
    book.custom.revision += 1

    jobject.metadata['title'] = title
    _publish(title, jobject)
    jobject.destroy()

    book.custom.sync_index()

    alert = NotifyAlert()
    alert.props.title = _('Book published to your Journal')
    alert.props.msg = _('You can read the book in Browse or '
                        'access the .xol file from your Journal')
    alert.connect('response', __alert_notify_response_cb, activity)
    activity.add_alert(alert)
    alert.show()
示例#8
0
 def _find_starred(self):
     ''' Find all the _stars in the Journal. '''
     self.dsobjects, self._nobjects = datastore.find({'keep': '1'})
     for dsobj in self.dsobjects:
         if self._found_obj_id(dsobj.object_id):
             continue  # Already have this object -- TODO: update it
         self._add_new_from_journal(dsobj)
示例#9
0
 def set_store(self, mountpoint, pth):
     print 'set_store', mountpoint, pth
     store = Gtk.ListStore(str, str, str, str)
     # get objects from the local datastore
     ds_objects, num_objects = datastore.find({
         'mountpoints': [mountpoint],
         'mime_type': ['image/jpg', 'image/png', 'image/svg', 'image/jpeg']
     })
     for f in ds_objects:
         try:
             object = f.object_id
         except BaseException:
             print 'set object_id failed'
         try:
             title = f.metadata['title']
         except BaseException:
             title = ""
         try:
             mime_type = f.metadata['mime_type']
         except BaseException:
             mime_type = 'unknown'
         try:
             t = f.metadata['timestamp']
             timestamp = datetime.fromtimestamp(t)
         except BaseException:
             timestamp = ""
         store.append([object, title, mime_type, timestamp])
         print 'store.append', object, title, mime_type, timestamp
         f.destroy()
     return store
示例#10
0
def get_odt():
    dsobjects, nobjects = datastore.find(
        {'mime_type': ['application/vnd.oasis.opendocument.text']})
    paths = []
    for dsobject in dsobjects:
        paths.append(dsobject.file_path)
    return paths
示例#11
0
def get_image():
    paths = []
    dsobjects, nobjects = datastore.find({'mime_type': ['image/png',
                                                        'image/jpeg']})
    for dsobject in dsobjects:
        paths.append(dsobject.file_path)
    return paths
示例#12
0
 def _find_starred(self):
     ''' Find all the _stars in the Journal. '''
     self.dsobjects, self._nobjects = datastore.find({'keep': '1'})
     for dsobj in self.dsobjects:
         if self._found_obj_id(dsobj.object_id):
             continue  # Already have this object -- TODO: update it
         self._add_new_from_journal(dsobj)
示例#13
0
    def _get_image_list(self):
        value = mime.GENERIC_TYPE_IMAGE
        mime_types = mime.get_generic_type(value).mime_types
        (self.image_list,
         self.image_count) = datastore.find({'mime_type': mime_types})
        self.unbusy()

        if self.image_count == 0:
            # start new, or resume empty; with no images in journal
            # leave the "No image" message visible
            return False

        if self.image_count > 1:
            # start new, or resume empty; with more than one image in journal
            # add image choosing buttons to toolbar box
            self.list_set_visible(self._traverse_widgets, True)

        # start new, or resume empty; with at least one image in journal
        # display the first image
        self.current_image_index = 0
        self._change_image(0)
        self.traverse_update_sensitive()

        self.set_canvas(self.scrolled_window)
        self.scrolled_window.show()

        return False
示例#14
0
def get_image():
    paths = []
    dsobjects, nobjects = datastore.find({'mime_type': ['image/png',
                                                        'image/jpeg']})
    for dsobject in dsobjects:
        paths.append(dsobject.file_path)
    return paths
示例#15
0
 def set_store(self, src):
     print 'set_store', src
     store = Gtk.ListStore(str, str, str)
     # get objects from the local datastore
     if src == "datastore":
         ds_objects, num_objects = datastore.find(
             {'mime_type': ['application/x-classroompresenter']})
         for f in ds_objects:
             try:
                 object = f.object_id
             except BaseException:
                 print 'find object_id failed'
             try:
                 title = f.metadata['title']
             except BaseException:
                 title = ""
             try:
                 t = f.metadata['timestamp']
                 timestamp = datetime.fromtimestamp(t)
             except BaseException:
                 timestamp = ""
             store.append([object, title, timestamp])
             f.destroy()
     elif src == "activity":
         # source is activity bundle
         srcdir = path(activity.get_bundle_path()) / \
             'resources' / 'Presentations'
         for f in srcdir.files('*.cpxo'):
             store.append([f.name, "", f.getctime()])
     else:
         print 'error in src', src
     print 'return cpxo store'
     return store
示例#16
0
def get_rtf():
    dsobjects, nobjects = datastore.find({'mime_type': ['text/rtf',
                                                        'application/rtf']})
    paths = []
    for dsobject in dsobjects:
        paths.append(dsobject.file_path)
    return paths
示例#17
0
def get_rtf():
    dsobjects, nobjects = datastore.find({'mime_type': ['text/rtf',
                                                        'application/rtf']})
    paths = []
    for dsobject in dsobjects:
        paths.append(dsobject.file_path)
    return paths
示例#18
0
def get_odt():
    dsobjects, nobjects = datastore.find(
        {'mime_type':
         ['application/vnd.oasis.opendocument.text']})
    paths = []
    for dsobject in dsobjects:
        paths.append(dsobject.file_path)
    return paths
示例#19
0
def publish(activity, force=False):
    if not [i for i in book.custom.index if i['ready']]:
        alert = NotifyAlert(5)
        alert.props.title = _('Nothing to publish')
        alert.props.msg = _('Mark arcticles from "Custom" '
                            'panel and try again.')
        alert.connect('response', __alert_notify_response_cb, activity)
        activity.add_alert(alert)
        alert.show()
        return

    title = activity.metadata['title']
    jobject = datastore.find({
            'activity_id': activity.get_id(),
            'activity'   : book.custom.uid})[0] or None

    logger.debug('publish: title=%s jobject=%s force=%s' \
            % (title, jobject and jobject[0].metadata['activity'], force))

    if jobject:
        if force:
            jobject = jobject[0]
        else:
            alert = ConfirmationAlert()
            alert.props.title = _('Overwrite existed bundle?')
            alert.props.msg = _('A bundle for current object was already created. '
                                'Click "OK" to overwrite it.')
            alert.connect('response', __alert_response_cb, activity, True)
            activity.add_alert(alert)
            alert.show()
            jobject[0].destroy()
            return
    else:
        jobject = datastore.create()
        jobject.metadata['activity_id'] = activity.get_id()
        jobject.metadata['activity'] = book.custom.uid
        jobject.metadata['mime_type'] = 'application/vnd.olpc-content'
        jobject.metadata['description'] = \
                'This is a bundle containing articles on %s.\n' \
                'To view these articles, open the \'Browse\' Activity.\n' \
                'Go to \'Books\', and select \'%s\'.' % (title, title)

    book.custom.sync_article()
    book.custom.revision += 1

    jobject.metadata['title'] = title
    _publish(title, jobject)
    jobject.destroy()

    book.custom.sync_index()

    alert = NotifyAlert()
    alert.props.title = _('Book published to your Journal')
    alert.props.msg = _('You can read the book in Browse or '
                        'access the .xol file from your Journal')
    alert.connect('response', __alert_notify_response_cb, activity)
    activity.add_alert(alert)
    alert.show()
示例#20
0
 def _activities(self):
     activities = {}
     entries, count = datastore.find(self._query())
     for entry in entries:
         activity_id = entry.metadata.get('activity', '')
         if activity_id not in activities:
             activities[activity_id] = []
         activities[activity_id].append(self._instance(entry))
     return activities
示例#21
0
    def get_datastore_list(self, next=False):
        dslist = []
        """
        if not next:
            self.journal_page_num -= 1
        else:
            self.journal_page_num += 1
        if self.journal_page_num < 0: self.journal_page_num = 0
        if self.journal_max != 0:
            if self.journal_page_num * self.journal_page_size >= self.journal_max:
                self.journal_page_num -= 1
        #_logger.debug('fetch datastore data. limit:%s. offset: %s'%(self.limit,self.journal_page_num * self.journal_page_size))
        #(results,count)=datastore.find({'limit':self.limit,'offset':self.journal_page_num * self.journal_page_size})
        """
        self.journal_model.clear()
        ds_list = []
        num_found = 0
        mime_list = [self._activity.MIME_TYPE, 'application/zip']

        #build 650 doesn't seem to understand correctly the dictionary with a list right hand side
        info = self._activity.util.sugar_version()
        if len(info) > 0:
            (major, minor, micro, release) = info
            _logger.debug(
                'sugar version major:%s minor:%s micro:%s release:%s' % info)
        else:
            _logger.debug('sugar version failure')
            minor = 70

        try:
            if minor > 80:
                (ds_list, num_found) = datastore.find({'mime_type': mime_list})
            else:
                (results, count) = datastore.find(
                    {'mime_type': self._activity.MIME_TYPE})
                ds_list.extend(results)
                num_found += count
                (results,
                 count) = datastore.find({'mime_type': 'application/zip'})
                ds_list.extend(results)
        except Exception, e:
            _logger.error('datastore error %s' % e)
            return []
示例#22
0
 def get_length(self):
     if self._total_count == -1:
         jobjects, self._total_count = datastore.find(
             self._query,
             sorting=self._sorting,
             limit=ResultSet._CACHE_LIMIT,
             properties=PROPERTIES)
         self._cache.append_all(jobjects)
         self._offset = 0
     return self._total_count
示例#23
0
def get_most_recent_instance(bundle_id):
    dsobjects, nobjects = datastore.find({'activity': [bundle_id]})
    most_recent_time = -1
    most_recent_instance = None
    for activity in dsobjects:
        last_launch_time = get_last_launch_time(activity)
        if last_launch_time > most_recent_time:
            most_recent_time = get_last_launch_time(activity)
            most_recent_instance = activity
    return most_recent_instance
示例#24
0
def get_most_recent_instance(bundle_id):
    dsobjects, nobjects = datastore.find({'activity': [bundle_id]})
    most_recent_time = -1
    most_recent_instance = None
    for activity in dsobjects:
        last_launch_time = get_last_launch_time(activity)
        if last_launch_time > most_recent_time:
            most_recent_time = get_last_launch_time(activity)
            most_recent_instance = activity
    return most_recent_instance
    def __init__(self, bundle, handle):
        """Initialise the handler

        bundle -- the ActivityBundle to launch
        activity_handle -- stores the values which are to
            be passed to the service to uniquely identify
            the activity to be created and the sharing
            service that may or may not be connected with it

            sugar3.activity.activityhandle.ActivityHandle instance

        calls the "create" method on the service for this
        particular activity type and registers the
        _reply_handler and _error_handler methods on that
        call's results.

        The specific service which creates new instances of this
        particular type of activity is created during the activity
        registration process in shell bundle registry which creates
        service definition files for each registered bundle type.

        If the file '/etc/olpc-security' exists, then activity launching
        will be delegated to the prototype 'Rainbow' security service.
        """
        GObject.GObject.__init__(self)

        self._bundle = bundle
        self._service_name = bundle.get_bundle_id()
        self._handle = handle

        bus = dbus.SessionBus()
        bus_object = bus.get_object(_SHELL_SERVICE, _SHELL_PATH)
        self._shell = dbus.Interface(bus_object, _SHELL_IFACE)

        if handle.activity_id is not None and handle.object_id is None:
            datastore.find({'activity_id': self._handle.activity_id},
                           reply_handler=self._find_object_reply_handler,
                           error_handler=self._find_object_error_handler)
        else:
            self._launch_activity()
    def __init__(self, bundle, handle):
        """Initialise the handler

        bundle -- the ActivityBundle to launch
        activity_handle -- stores the values which are to
            be passed to the service to uniquely identify
            the activity to be created and the sharing
            service that may or may not be connected with it

            sugar3.activity.activityhandle.ActivityHandle instance

        calls the "create" method on the service for this
        particular activity type and registers the
        _reply_handler and _error_handler methods on that
        call's results.

        The specific service which creates new instances of this
        particular type of activity is created during the activity
        registration process in shell bundle registry which creates
        service definition files for each registered bundle type.

        If the file '/etc/olpc-security' exists, then activity launching
        will be delegated to the prototype 'Rainbow' security service.
        """
        GObject.GObject.__init__(self)

        self._bundle = bundle
        self._service_name = bundle.get_bundle_id()
        self._handle = handle

        bus = dbus.SessionBus()
        bus_object = bus.get_object(_SHELL_SERVICE, _SHELL_PATH)
        self._shell = dbus.Interface(bus_object, _SHELL_IFACE)

        if handle.activity_id is not None and handle.object_id is None:
            datastore.find({'activity_id': self._handle.activity_id},
                           reply_handler=self._find_object_reply_handler,
                           error_handler=self._find_object_error_handler)
        else:
            self._launch_activity()
示例#27
0
    def _prepare_shared_items(self):
        results = []
        if not self._shared_items:
            return json.dumps(results)

        if self._shared_items == ['*']:
            dsobjects, _nobjects = datastore.find({'keep': '1'})
        else:
            dsobjects = []
            for object_id in self._shared_items:
                dsobjects.append(datastore.get(object_id))

        for dsobj in dsobjects:
            title = ''
            desc = ''
            comment = []
            shared_by = {}
            downloaded_by = []
            object_id = dsobj.object_id
            if hasattr(dsobj, 'metadata'):
                if 'title' in dsobj.metadata:
                    title = dsobj.metadata['title']
                if 'description' in dsobj.metadata:
                    desc = dsobj.metadata['description']
                if 'comments' in dsobj.metadata:
                    try:
                        comment = json.loads(dsobj.metadata['comments'])
                    except:
                        comment = []
                if 'shared_by' in dsobj.metadata:
                    shared_by = json.loads(dsobj.metadata['shared_by'])
                if 'downloaded_by' in dsobj.metadata:
                    downloaded_by = json.loads(dsobj.metadata['downloaded_by'])
            else:
                logging.debug('dsobj has no metadata')

            utils.package_ds_object(dsobj, self._instance_path)

            results.append({
                'title': str(title),
                'desc': str(desc),
                'comment': comment,
                'id': str(object_id),
                'shared_by': shared_by,
                'downloaded_by': downloaded_by
            })
        logging.error(results)
        return json.dumps(results)
示例#28
0
    def _find_custom_paths(self, jobject):
        ''' Associate a Journal object with a card '''
        found_a_sequence = False
        if self.custom_paths[0] is None:
            basename, suffix, i = _find_the_number_in_the_name(
                jobject.metadata['title'])
            ''' If this is the first card, try to find paths for other custom
            cards based on the name; else just load the card. '''
            if i >= 0:
                dsobjects, nobjects = datastore.find(
                    {'mime_type': [str(jobject.metadata['mime_type'])]})
                self.custom_paths = []
                if nobjects > 0:
                    for j in range(DECKSIZE):
                        for i in range(nobjects):
                            if dsobjects[i].metadata['title'] == \
                                    _construct_a_name(basename, j + 1, suffix):
                                self.custom_paths.append(dsobjects[i])
                                break

                if len(self.custom_paths) < 9:
                    for i in range(3, 81):
                        self.custom_paths.append(
                            self.custom_paths[int(i / 27)])
                elif len(self.custom_paths) < 27:
                    for i in range(9, 81):
                        self.custom_paths.append(
                            self.custom_paths[int(i / 9)])
                elif len(self.custom_paths) < 81:
                    for i in range(9, 81):
                        self.custom_paths.append(
                            self.custom_paths[int(i / 3)])
                found_a_sequence = True
                self.activity.metadata['custom_object'] = jobject.object_id
                self.activity.metadata['custom_mime_type'] = \
                    jobject.metadata['mime_type']

        if not found_a_sequence:
            grid_index = self.grid.spr_to_grid(self._edit_card.spr)
            self.custom_paths[grid_index] = jobject
            self.activity.metadata['custom_' + str(grid_index)] = \
                jobject.object_id

        self.card_type = 'custom'
        self.activity.button_custom.set_icon('new-custom-game')
        self.activity.button_custom.set_tooltip(_('New custom game'))
        return
示例#29
0
    def _search_for_audio_note(self, obj_id, target=None):
        ''' Look to see if there is already a sound recorded for this
        dsobject: the object id is stored in a tag in the audio file. '''
        dsobjects, nobjects = datastore.find({'mime_type': ['audio/ogg']})
        # Look for tag that matches the target object id
        if target is None:
            if self._game.get_mode() == 'array':
                target = obj_id
            else:
                target = '%s-%d' % (obj_id, self._game.current_image)

        for dsobject in dsobjects:
            if 'tags' in dsobject.metadata and \
               target in dsobject.metadata['tags']:
                _logger.debug('Found audio note')
                self.metadata['dirty'] = 'True'
                return dsobject
        return None
示例#30
0
    def _load_data(self, widget=None):
        del self.treeview_list[:]
        del self.files_list[:]
        del self.old_list[:]
        del self.heatmap_list[:]

        dsobjects, journal_entries = datastore.find({})
        for dsobject in dsobjects:
            new = []
            new.append(dsobject.metadata['title'])
            new.append(misc.get_icon_name(dsobject.metadata))
            new.append(dsobject.metadata['activity_id'])
            new.append(profile.get_color())
            new.append(dsobject.get_object_id())
            new.append(dsobject.metadata)
            new.append(misc.get_date(dsobject.metadata))
            new.append(dsobject.metadata['mtime'])
            dsobject.metadata.connect('updated', self._ds_updated)
            self.treeview_list.append(new)
            self.old_list.append(new)

            # determine if a file
            if dsobject.metadata['mime_type'] in self.mime_types:
                new2 = []
                new2.append(dsobject.metadata['title'])
                new2.append(misc.get_icon_name(dsobject.metadata))
                new2.append(dsobject.metadata['activity_id'])
                new2.append(profile.get_color())
                new2.append(dsobject.get_object_id())
                new2.append(dsobject.metadata)
                new2.append(misc.get_date(dsobject.metadata))
                new2.append(dsobject.metadata['mtime'])
                self.files_list.append(new2)

            self.old_list = sorted(self.old_list, key=lambda x: x[7])
            self.journal_entries = journal_entries
            self._add_to_treeview(self.treeview_list)

            # get number of activities installed
            registry = bundleregistry.get_registry()

            self.label_total_activities.set_text(str(len(registry)))
            self.label_journal_entries.set_text(str(self.journal_entries))
            self.label_contribs.set_text(str(len(self.files_list)))
示例#31
0
    def load_journal_table(self):
        self.btn_save.props.sensitive = False
        self.btn_delete.props.sensitive = False
        query = {}
        ds_objects, num_objects = datastore.find(query, properties=['uid', 
            'title',  'mime_type'])

        self.ls_journal.clear()
        for i in xrange (0, num_objects, 1):
            iter = self.ls_journal.append()
            title = ds_objects[i].metadata['title']
            self.ls_journal.set(iter, COLUMN_TITLE, title)
            mime = ds_objects[i].metadata['mime_type']
            self.ls_journal.set(iter, COLUMN_MIME, mime)
            self.ls_journal.set(iter, COLUMN_JOBJECT, ds_objects[i])
            size = self.get_size(ds_objects[i]) / 1024
            self.ls_journal.set(iter, COLUMN_SIZE, size)

        v_adjustment = self.list_scroller_journal.get_vadjustment()
        v_adjustment.value = 0
示例#32
0
    def load_journal_table(self):
        self.btn_save.props.sensitive = False
        self.btn_delete.props.sensitive = False
        query = {}
        ds_objects, num_objects = datastore.find(
            query, properties=['uid', 'title', 'mime_type'])

        self.ls_journal.clear()
        for i in xrange(0, num_objects, 1):
            iter = self.ls_journal.append()
            title = ds_objects[i].metadata['title']
            self.ls_journal.set(iter, COLUMN_TITLE, title)
            mime = ds_objects[i].metadata['mime_type']
            self.ls_journal.set(iter, COLUMN_MIME, mime)
            self.ls_journal.set(iter, COLUMN_JOBJECT, ds_objects[i])
            size = self.get_size(ds_objects[i]) / 1024
            self.ls_journal.set(iter, COLUMN_SIZE, size)

        v_adjustment = self.list_scroller_journal.get_vadjustment()
        v_adjustment.value = 0
    def load_journal_table(self, col):

        ds_mounts = get_mounts()
        mountpoint_id = None

        query = {}
        if mountpoint_id is not None:
            query['mountpoints'] = [mountpoint_id]
            ds_objects, num_objects = datastore.find(query,
                                                     properties=[
                                                         'uid', 'timestamp',
                                                         'title', 'mime_type',
                                                         'description'
                                                     ],
                                                     sorting='-timestamp')

            self.ls_journal[col].clear()
            for i in xrange(0, num_objects, 1):
                mime = ds_objects[i].metadata['mime_type']

                if mime.startswith('image/') or mime.startswith('video/'):
                    iter = self.ls_journal[col].append()
                    title = ds_objects[i].metadata['title']
                    self.ls_journal[col].set(iter, COLUMN_TITLE, title)
                    jobject_wrapper = JobjectWrapper()
                    jobject_wrapper.set_jobject(ds_objects[i])
                    jobject_wrapper.set_mime_type(mime)
                    mtime = ds_objects[i].metadata.get('timestamp')
                    jobject_wrapper.set_timestamp(mtime)
                    desc = ds_objects[i].metadata.get('description')
                    jobject_wrapper.set_description(desc)
                    title = ds_objects[i].metadata.get('uid')
                    jobject_wrapper.set_title(title)
                    self.ls_journal[col].set(iter, COLUMN_MTIME, mtime)
                    self.ls_journal[col].set(iter, COLUMN_JOBJECT,
                                             jobject_wrapper)
                    size = self.get_size(ds_objects[i]) / 1024
示例#34
0
def get_activity(bundle_id):
    dsobjects, nobjects = datastore.find({'activity': [bundle_id]})
    return dsobjects
 def _get_image_list(self):
     value = mime.GENERIC_TYPE_IMAGE
     mime_types = mime.get_generic_type(value).mime_types
     (self.image_list, self.image_count) = datastore.find({'mime_type':
                                                          mime_types})
示例#36
0
    def read(self, max_count):
        logging.debug('ResultSet.read position: %r' % self._position)

        if max_count * 5 > ResultSet._CACHE_LIMIT:
            raise RuntimeError(
                'max_count (%i) too big for ResultSet._CACHE_LIMIT'
                ' (%i).' % (max_count, ResultSet._CACHE_LIMIT))

        if self._position == -1:
            self.seek(0)

        if self._position < self._offset:
            remaining_forward_entries = 0
        else:
            remaining_forward_entries = \
                self._offset + len(self._cache) - self._position

        if self._position > self._offset + len(self._cache):
            remaining_backwards_entries = 0
        else:
            remaining_backwards_entries = self._position - self._offset

        last_cached_entry = self._offset + len(self._cache)

        if (remaining_forward_entries <= 0 and
            remaining_backwards_entries <= 0) or \
                max_count > ResultSet._CACHE_LIMIT:

            # Total cache miss: remake it
            offset = max(0, self._position - max_count)
            logging.debug('remaking cache, offset: %r limit: %r' %
                          (offset, max_count * 2))
            jobjects, self._total_count = datastore.find(
                self._query,
                sorting=self._sorting,
                offset=offset,
                limit=ResultSet._CACHE_LIMIT,
                properties=PROPERTIES)

            self._cache.remove_all(self._cache)
            self._cache.append_all(jobjects)
            self._offset = offset

        elif remaining_forward_entries < 2 * max_count and \
                last_cached_entry < self._total_count:

            # Add one page to the end of cache
            logging.debug('appending one more page, offset: %r' %
                          last_cached_entry)
            jobjects, self._total_count = datastore.find(
                self._query,
                sorting=self._sorting,
                offset=last_cached_entry,
                limit=max_count,
                properties=PROPERTIES)

            # update cache
            self._cache.append_all(jobjects)

            # apply the cache limit
            objects_excess = len(self._cache) - ResultSet._CACHE_LIMIT
            if objects_excess > 0:
                self._offset += objects_excess
                self._cache.remove_all(self._cache[:objects_excess])

        elif remaining_backwards_entries < 2 * max_count and self._offset > 0:

            # Add one page to the beginning of cache
            limit = min(self._offset, max_count)
            self._offset = max(0, self._offset - max_count)

            logging.debug('prepending one more page, offset: %r limit: %r' %
                          (self._offset, limit))
            jobjects, self._total_count = datastore.find(self._query,
                                                         sorting=self._sorting,
                                                         offset=self._offset,
                                                         limit=limit,
                                                         properties=PROPERTIES)

            # update cache
            self._cache.prepend_all(jobjects)

            # apply the cache limit
            objects_excess = len(self._cache) - ResultSet._CACHE_LIMIT
            if objects_excess > 0:
                self._cache.remove_all(self._cache[-objects_excess:])
        else:
            logging.debug('cache hit and no need to grow the cache')

        first_pos = self._position - self._offset
        last_pos = self._position - self._offset + max_count
        return self._cache[first_pos:last_pos]
示例#37
0
def get_audio():
    paths = []
    dsobjects, nobjects = datastore.find({'mime_type': ['audio/ogg']})
    for dsobject in dsobjects:
        paths.append(dsobject.file_path)
    return paths
示例#38
0
def get_starred_count():
    dsobjects, nobjects = datastore.find({'keep': '1'})
    return nobjects
示例#39
0
 def _find_starred(self):
     ''' Find all the _stars in the Journal. '''
     self.dsobjects, self._nobjects = datastore.find({'keep': '1'})
     for dsobj in self.dsobjects:
         if self._found_obj_id(dsobj.object_id):
             continue  # Already have this object -- TODO: update it
         self.reflection_data.append({
             'title': _('Untitled'), 'obj_id': dsobj.object_id})
         if hasattr(dsobj, 'metadata'):
             if 'creation_time' in dsobj.metadata:
                 self.reflection_data[-1]['creation_time'] = \
                     dsobj.metadata['creation_time']
             else:
                 self.reflection_data[-1]['creation_time'] = \
                     int(time.time())
             if 'timestamp' in dsobj.metadata:
                 self.reflection_data[-1]['modification_time'] = \
                     dsobj.metadata['timestamp']
             else:
                 self.reflection_data[-1]['modification_time'] = \
                     self.reflection_data[-1]['creation_time']
             if 'activity' in dsobj.metadata:
                 self.reflection_data[-1]['activities'] = \
                     [utils.bundle_id_to_icon(dsobj.metadata['activity'])]
             if 'title' in dsobj.metadata:
                 self.reflection_data[-1]['title'] = \
                     dsobj.metadata['title']
             if 'description' in dsobj.metadata:
                 self.reflection_data[-1]['content'] = \
                     [{'text': dsobj.metadata['description']}]
             else:
                 self.reflection_data[-1]['content'] = []
             if 'tags' in dsobj.metadata:
                 self.reflection_data[-1]['tags'] = []
                 tags = dsobj.metadata['tags'].split()
                 for tag in tags:
                     if tag[0] != '#':
                         self.reflection_data[-1]['tags'].append('#' + tag)
                     else:
                         self.reflection_data[-1]['tags'].append(tag)
             if 'comments' in dsobj.metadata:
                 try:
                     comments = json.loads(dsobj.metadata['comments'])
                 except:
                     comments = []
                 self.reflection_data[-1]['comments'] = []
                 for comment in comments:
                     try:
                         data = {'nick': comment['from'],
                                 'comment': comment['message']}
                         if 'icon-color' in comment:
                             colors = comment['icon-color'].split(',')
                             darker = 1 - utils.lighter_color(colors)
                             data['color'] = colors[darker]
                         else:
                             data['color'] = '#000000'
                         self.reflection_data[-1]['comments'].append(data)
                     except:
                         _logger.debug('could not parse comment %s'
                                       % comment)
             if 'mime_type' in dsobj.metadata and \
                dsobj.metadata['mime_type'][0:5] == 'image':
                 new_path = os.path.join(self.tmp_path,
                                         dsobj.object_id)
                 try:
                     shutil.copy(dsobj.file_path, new_path)
                 except Exception as e:
                     logging.error("Couldn't copy %s to %s: %s" %
                                   (dsobj.file_path, new_path, e))
                 self.reflection_data[-1]['content'].append(
                     {'image': new_path})
             elif 'preview' in dsobj.metadata:
                 pixbuf = utils.get_pixbuf_from_journal(dsobj, 300, 225)
                 if pixbuf is not None:
                     path = os.path.join(self.tmp_path,
                                         dsobj.object_id + '.png')
                     utils.save_pixbuf_to_file(pixbuf, path)
                     self.reflection_data[-1]['content'].append(
                         {'image': path})
             self.reflection_data[-1]['stars'] = 0
示例#40
0
def get_activity(bundle_id):
    dsobjects, nobjects = datastore.find({'activity': [bundle_id]})
    return dsobjects
示例#41
0
def get_starred_count():
    dsobjects, nobjects = datastore.find({'keep': '1'})
    return nobjects
示例#42
0
 def _get_image_list(self):
     value = mime.GENERIC_TYPE_IMAGE
     mime_types = mime.get_generic_type(value).mime_types
     (self.image_list,
      self.image_count) = datastore.find({'mime_type': mime_types})
示例#43
0
def get_audio():
    paths = []
    dsobjects, nobjects = datastore.find({'mime_type': ['audio/ogg']})
    for dsobject in dsobjects:
        paths.append(dsobject.file_path)
    return paths