Exemple #1
0
 def _get_last_activity_async(self, bundle_id, properties):
     query = {'activity': bundle_id}
     datastore.find(query, sorting=['+timestamp'],
                    limit=self._MAX_RESUME_ENTRIES,
                    properties=properties,
                    reply_handler=self.__get_last_activity_reply_handler_cb,
                    error_handler=self.__get_last_activity_error_handler_cb)
Exemple #2
0
 def check_for_recent_images(self):
     """scans the journal for pictures that are not in database, records jobject_id if found in
     table groups with the journal id in category. Can be faster because we don't have to fetch file itself.
     """
     ds_list = []
     num_found = 0
     mime_list = ['image/jpg','image/png','image/jpeg','image/gif',]
     
     #build 650 doesn't seem to understand correctly the dictionary with a list right hand side
     info = xophotoactivity.sugar_version()
     if len(info)>0:
         (major,minor,micro,release) = info
         _logger.debug('sugar version major:%s minor:%s micro:%s release:%s'%info)
     else:
         _logger.debug('sugar version failure')
         minor = 70
     if minor > 80:
         (results,count) = datastore.find({'mime_type': ['image/jpeg','image/jpg', 'image/png','image/gif']})
     else:
         (results,count) = datastore.find({'mime_type': 'image/jpeg'})
         ds_list.extend(results)
         num_found += count            
         (results,count) = datastore.find({'mime_type': 'image/jpg'})
         ds_list.extend(results)
         num_found += count
         (results,count) = datastore.find({'mime_type': 'image/png'})
         ds_list.extend(results)
         num_found += count
         (results,count) = datastore.find({'mime_type': 'image/gif'})
     ds_list.extend(results)
     num_found += count
     
     _logger.debug('Journal/datastore entries found:%s'%num_found)
     added = 0
     a_row_found = False
     cursor = self.db.connection().cursor()
     journal_list = []
     for ds in ds_list:
         #at least for now assume that the newest images are returned first
         if not a_row_found:
             journal_list.append(ds.object_id)
             dict = ds.get_metadata().get_dictionary()
             if dict["mime_type"] in mime_list:
                 cursor.execute('select * from groups where category = ? and jobject_id = ?',\
                                (display.journal_id,str(ds.object_id),))
                 rows = cursor.fetchall()
                 if len(rows) == 0:
                     #may need to add date entered into ds (create date could be confusing)
                     self.db.put_ds_into_picture(ds.object_id)
                     self.db.add_image_to_album(display.journal_id,ds.object_id)
                     added += 1
                 else: #assume that pictures are returned in last in first out order
                     #no longer true since we are getting each mime_type separately (build 650 kludge)
                     #a_row_found = True
                     pass
         ds.destroy()
     #now go through albums and remove references that are no longer in datastore
     #cursor.execute('select * from groups')
     _logger.debug('scan found %s. Added %s datastore object ids from datastore to picture'%(count,added,))
     return (num_found,added,)
def unique_title(name):
    if name == '':
        name = 'Untitled'
    objs, num_objects = datastore.find({'title': name, 'TimeLapse': 'yes'})
    if num_objects == 0:
        return name
    i = 1
    while num_objects != 0:
        i += 1
        objs, num_objects = datastore.find({
            'title': name + str(i),
            'TimeLapse': 'yes'
        })
    return name + str(i)
Exemple #4
0
def imgbrowse():
    global imgs
    #set up pageview to show images in image
    #imgs is a list of tuples: (fn, src))
    imgs = []
    #get paths to images in image folder
    d = IMAGEPATH
    print 'imgbrowse: ', len(d.files()), len(d.files('*.png'))
    for f in d.files('*.png'):
        imgs.append((f.name, f, 's'))
    for f in d.files('*.jpg'):
        imgs.append((f.name, f, 's'))
    #get objects from the local datastore
    print 'imgbrowse: get objects from the local datastore'
    ds_objects, num_objects = datastore.find(
        {'mime_type': ['image/png', 'image/jpeg', 'image/svg']})
    print 'imgbrowse: ', num_objects
    for f in ds_objects:
        obj = datastore.get(f.object_id)
        pth = path(obj.get_file_path())
        print 'imgbrowse: ', obj.get_file_path()
        imgs.append((pth.name, obj, 'd'))
        f.destroy()
    print 'imgbrowse: call pageview'
    vw.pageview(imgs, setnewimg, "images")
Exemple #5
0
def clipbrowse():
    global clips
    #set up pageview to show audio clips in sound (and later the journal)
    #clips is a list of tuples: (src, fn)
    clips = []
    #get paths to sound clips in sound folder
    d = SOUNDPATH
    print 'clipbrowse: ', len(d.files()), len(d.files('*.ogg'))
    for f in d.files('*.ogg'):
        clips.append((f.name, f, 's'))
        print 'clipbrowse: ', f.name
    #get objects from the local datastore
    print 'clipbrowse: get objects from the local datastore'
    ds_objects, num_objects = datastore.find(
        {'mime_type': ['application/ogg', 'audio/ogg', 'audio/mp3']})
    print 'clipbrowse: ', num_objects
    for f in ds_objects:
        obj = datastore.get(f.object_id)
        pth = path(obj.get_file_path())
        if len(pth) > 0:
            clips.append((pth.name, obj, 'd'))
            print 'clipbrowse:', pth.name
        f.destroy()
    print 'clipbrowse: call pageview'
    pv.pageview(clips, setnewclip, "audio clips")
def change_name(old_name, new_name):
    dsobjects, num = datastore.find({'title': old_name, 'TimeLapse': 'yes'})
    for dsobject in dsobjects:
        dsobject.metadata['title'] = new_name
        dsobject.metadata['TimeLapsetitle'] = new_name
        datastore.write(dsobject)
        dsobject.destroy()
 def set_store(self, mountpoint, pth):
     print 'set_store', mountpoint, pth
     store = gtk.ListStore(str, str, str, str)
     #get objects from the local datastore
     ds_objects, num_objects = datastore.find({
         'mountpoints': [mountpoint],
         'mime_type': ['image/jpg', 'image/png', 'image/svg', 'image/jpeg']
     })
     for f in ds_objects:
         try:
             object = f.object_id
         except:
             print 'set object_id failed'
         try:
             title = f.metadata['title']
         except:
             title = ""
         try:
             mime_type = f.metadata['mime_type']
         except:
             mime_type = 'unknown'
         try:
             t = f.metadata['timestamp']
             timestamp = datetime.fromtimestamp(t)
         except:
             timestamp = ""
         store.append([object, title, mime_type, timestamp])
         print 'store.append', object, title, mime_type, timestamp
         f.destroy()
     return store
def clickOnImport():
    global uglyflag
    uglyflag = False
    #get list of available bundles from journal
    items = []
    print 'import'
    #get mount points
    ds_mounts = datastore.mounts()
    for i in range(0, len(ds_mounts), 1):
        if ds_mounts[i]['uri'].find('datastore') > 0:
            mountpoint = ds_mounts[i]['id']

    #get objects from the local datastore
    ds_objects, num_objects = datastore.find({
        'mountpoints': [mountpoint],
        'mime_type': ['application/x-imagequiz', 'application/zip']
    })
    print 'import: num_objects', num_objects
    for i in xrange(0, num_objects, 1):
        ds_objects[i].destroy()
        obj = ds_objects[i].object_id
        title = ds_objects[i].metadata['title']
        mime_type = ds_objects[i].metadata['mime_type']
        activity = ds_objects[i].metadata['activity']
        if mime_type == 'application/x-imagequiz':
            pth1 = path(ds_objects[i].get_file_path())
            pos = pth1.find('(')
            if pos >= 0:
                pos1 = pth1[pos:].find(')')
                pth = pth1[:pos] + pth1[pos + pos1 + 1:].replace('..', '.')
            else:
                pth = pth1
            print 'import:', pth1, pth
            items.append((title, pth))
    pv.pageview(items, getbundle, 'Quiz')
Exemple #9
0
 def set_store(self, mountpoint, pth):
     print 'set_store', mountpoint, pth
     store = gtk.ListStore(str, str, str, str)
     #get objects from the local datastore
     ds_objects, num_objects = datastore.find({'mountpoints':[mountpoint], 'mime_type':['image/jpg', 'image/png', 'image/svg', 'image/jpeg']})
     for f in ds_objects:
         try:
             object = f.object_id
         except:
             print 'set object_id failed'
         try:
             title = f.metadata['title']
         except:
             title = ""
         try:
             mime_type = f.metadata['mime_type']
         except:
             mime_type = 'unknown'
         try:
             t = f.metadata['timestamp']
             timestamp = datetime.fromtimestamp(t)
         except:
             timestamp = ""
         store.append([object, title, mime_type, timestamp])
         print 'store.append', object, title, mime_type, timestamp
         f.destroy()
     return store
    def load_journal_table(self):
        self.btn_save.props.sensitive = False
        self.btn_delete.props.sensitive = False
        ds_mounts = datastore.mounts()
        mountpoint_id = None
        if len(ds_mounts) == 1 and ds_mounts[0]['id'] == 1:
               pass
        else:
            for mountpoint in ds_mounts:
                id = mountpoint['id'] 
                uri = mountpoint['uri']
                if uri.startswith('/home'):
                    mountpoint_id = id

        query = {}
        if mountpoint_id is not None:
            query['mountpoints'] = [ mountpoint_id ]
        ds_objects, num_objects = datastore.find(query, properties=['uid', 
            'title',  'mime_type'])

        self.ls_journal.clear()
        for i in xrange (0, num_objects, 1):
            iter = self.ls_journal.append()
            title = ds_objects[i].metadata['title']
            self.ls_journal.set(iter, COLUMN_TITLE, title)
            mime = ds_objects[i].metadata['mime_type']
            self.ls_journal.set(iter, COLUMN_MIME, mime)
            self.ls_journal.set(iter, COLUMN_JOBJECT, ds_objects[i])
            size = self.get_size(ds_objects[i]) / 1024
            self.ls_journal.set(iter, COLUMN_SIZE, size)

        self.ls_journal.set_sort_column_id(COLUMN_TITLE,  gtk.SORT_ASCENDING)
        v_adjustment = self.list_scroller_journal.get_vadjustment()
        v_adjustment.value = 0
Exemple #11
0
 def set_store(self, src):
     print "set_store", src
     store = gtk.ListStore(str, str, str)
     # get objects from the local datastore
     if src == "datastore":
         ds_objects, num_objects = datastore.find({"mime_type": ["application/x-classroompresenter"]})
         for f in ds_objects:
             try:
                 object = f.object_id
             except:
                 print "find object_id failed"
             try:
                 title = f.metadata["title"]
             except:
                 title = ""
             try:
                 t = f.metadata["timestamp"]
                 timestamp = datetime.fromtimestamp(t)
             except:
                 timestamp = ""
             store.append([object, title, timestamp])
             f.destroy()
     elif src == "activity":
         # source is activity bundle
         srcdir = path(activity.get_bundle_path()) / "resources" / "Presentations"
         for f in srcdir.files("*.cpxo"):
             store.append([f.name, "", f.getctime()])
     else:
         print "error in src", src
     print "return cpxo store"
     return store
Exemple #12
0
 def set_store(self, src):
     print 'set_store', src
     store = gtk.ListStore(str, str, str)
     #get objects from the local datastore
     if src == "datastore":
         ds_objects, num_objects = datastore.find(
             {'mime_type': ['application/x-classroompresenter']})
         for f in ds_objects:
             try:
                 object = f.object_id
             except:
                 print 'find object_id failed'
             try:
                 title = f.metadata['title']
             except:
                 title = ""
             try:
                 t = f.metadata['timestamp']
                 timestamp = datetime.fromtimestamp(t)
             except:
                 timestamp = ""
             store.append([object, title, timestamp])
             f.destroy()
     elif src == "activity":
         #source is activity bundle
         srcdir = path(
             activity.get_bundle_path()) / 'resources' / 'Presentations'
         for f in srcdir.files('*.cpxo'):
             store.append([f.name, "", f.getctime()])
     else:
         print 'error in src', src
     print 'return cpxo store'
     return store
Exemple #13
0
def load_dungeon_by_id(id):
    ds_objects, num_objects = datastore.find({'FortuneMaker_VERSION':FM_VERSION,'FM_UID':id})

    if num_objects == 0:
        return False

    return load_dungeon(ds_objects[0])
def load_dungeon_by_id(id):
    ds_objects, num_objects = datastore.find({'FortuneMaker_VERSION':FM_VERSION,'FM_UID':id})

    if num_objects == 0:
        return False

    return load_dungeon(ds_objects[0])
Exemple #15
0
    def loadJournalTable(self):
        self.ls_right = gtk.ListStore(gobject.TYPE_STRING, gobject.TYPE_STRING,
                                      gobject.TYPE_PYOBJECT)
        ds_objects, num_objects = datastore.find(
            {
                'mime_type':
                ['image/jpeg', 'image/gif', 'image/tiff', 'image/png']
            },
            properties=['uid', 'title', 'mime_type'])
        self.ls_right.clear()
        for i in xrange(0, num_objects, 1):
            iter = self.ls_right.append()
            title = ds_objects[i].metadata['title']
            mime_type = ds_objects[i].metadata['mime_type']
            if mime_type == 'image/jpeg' \
                and not title.endswith('.jpg') \
                and not title.endswith('.jpeg') \
                and not title.endswith('.JPG') \
                and not title.endswith('.JPEG') :
                title = title + '.jpg'
            if mime_type == 'image/png' \
                and not title.endswith('.png') \
                and not title.endswith('.PNG'):
                title = title + '.png'
            if mime_type == 'image/gif' \
                and not title.endswith('.gif')\
                and not title.endswith('.GIF'):
                title = title + '.gif'
            if mime_type == 'image/tiff' \
                and not title.endswith('.tiff')\
                and not title.endswith('.TIFF'):
                title = title + '.tiff'
            self.ls_right.set(iter, COLUMN_IMAGE, title)
            jobject_wrapper = JobjectWrapper()
            jobject_wrapper.setJobject(ds_objects[i])
            self.ls_right.set(iter, COLUMN_PATH, jobject_wrapper)

        valid_endings = ('.jpg', '.jpeg', '.JPEG', '.JPG', '.gif', '.GIF',
                         '.tiff', '.TIFF', '.png', '.PNG')
        ds_mounts = datastore.mounts()
        if len(ds_mounts) == 1 and ds_mounts[0]['id'] == 1:
            # datastore.mounts() is stubbed out,
            # we're running .84 or better
            for dirname, dirnames, filenames in os.walk('/media'):
                if '.olpc.store' in dirnames:
                    dirnames.remove('.olpc.store')
                    # don't visit .olpc.store directories
                for filename in filenames:
                    if filename.endswith(valid_endings):
                        iter = self.ls_right.append()
                        jobject_wrapper = JobjectWrapper()
                        jobject_wrapper.setFilePath(
                            os.path.join(dirname, filename))
                        self.ls_right.set(iter, COLUMN_IMAGE, filename)
                        self.ls_right.set(iter, COLUMN_PATH, jobject_wrapper)
        self.ls_right.set_sort_column_id(COLUMN_IMAGE, gtk.SORT_ASCENDING)

        print ds_objects
        print num_objects
Exemple #16
0
 def get_length(self):
     if self._total_count == -1:
         jobjects, self._total_count = datastore.find(
             self._query, sorting=self._sorting, limit=ResultSet._CACHE_LIMIT, properties=PROPERTIES
         )
         self._cache.append_all(jobjects)
         self._offset = 0
     return self._total_count
Exemple #17
0
def publish(activity, force=False):
    if not [i for i in book.custom.index if i['ready']]:
        activity.notify_alert(
            _('Nothing to publish'),
            _('Mark arcticles from "Custom" panel and try again.'))
        return

    title = activity.metadata['title']
    jobject = datastore.find({
        'activity_id': activity.get_id(),
        'activity': book.custom.uid
    })[0] or None

    logger.debug('publish: title=%s jobject=%s force=%s' \
            % (title, jobject and jobject[0].metadata['activity'], force))

    if jobject:
        if force:
            jobject = jobject[0]
        else:
            try:
                # check for 0.84 code
                from jarabe import config
            except:
                # 0.82 couldn't override .xol bundles
                activity.notify_alert(
                        _('Bundle exists'),
                        _('A bundle by "%s" name already exists. Please ' \
                        'click "Erase" in the Journal. You can click ' \
                        '"Publish" again afterwards.') % \
                        jobject[0].metadata['title'])
                return

            activity.confirmation_alert(
                    _('Overwrite existed bundle?'),
                    _('A bundle for current object was already created. ' \
                          'Click "OK" to overwrite it.'),
                    publish, activity, True)
            jobject[0].destroy()
            return
    else:
        jobject = datastore.create()
        jobject.metadata['activity_id'] = activity.get_id()
        jobject.metadata['activity'] = book.custom.uid
        jobject.metadata['mime_type'] = 'application/vnd.olpc-content'
        jobject.metadata['description'] = \
                'This is a bundle containing articles on %s.\n' \
                'To view these articles, open the \'Browse\' Activity.\n' \
                'Go to \'Books\', and select \'%s\'.' % (title, title)

    book.custom.sync_article()
    book.custom.revision += 1

    jobject.metadata['title'] = title
    _publish(title, jobject)
    jobject.destroy()

    book.custom.sync_index()
Exemple #18
0
 def get_length(self):
     if self._total_count == -1:
         jobjects, self._total_count = datastore.find(
             self._query,
             sorting=self._sorting,
             limit=ResultSet._CACHE_LIMIT,
             properties=PROPERTIES)
         self._cache.append_all(jobjects)
         self._offset = 0
     return self._total_count
Exemple #19
0
def list_fh_files():
    ds_objects, num_objects = datastore.find({'FortuneMaker_VERSION':FM_VERSION})
    file_list = []
    for i in xrange(0, num_objects, 1):
        if ds_objects[i].metadata.has_key('FM_UID'):
            file_list.append( ds_objects[i] )
        else:
            #TODO: Attempt to read uid from file?
            pass
    return file_list
def list_fh_files():
    ds_objects, num_objects = datastore.find({'FortuneMaker_VERSION':FM_VERSION})
    file_list = []
    for i in xrange(0, num_objects, 1):
        if ds_objects[i].metadata.has_key('FM_UID'):
            file_list.append( ds_objects[i] )
        else:
            #TODO: Attempt to read uid from file?
            pass
    return file_list
Exemple #21
0
    def __init__(self):
        gtk.VBox.__init__(self, False, 0)

        # store mathogen prac activity when it is found
        mpa = None

        self.label = gtk.Label("")
        self.pack_start(self.label, True, True, 5)
        self.label.show()

        # testing challenge repository
        challreg = challrepo.get_global_repository()
        self.writeline(str(challreg._challenges))

        # retrieve the activity bundle info
        # make a label with the name of each

        registry = bundleregistry.get_registry()
        for info in registry:
            if info.get_bundle_id() == "org.davidmason.mathogen_prac":
                thing = ", ".join([info.get_name(), info.get_bundle_id()])
                self.writeline(thing)
                mpa = info

        if mpa is None:
            self.writeline("activity not found")
        else:
            self.writeline("activity found")
            query = {'activity': mpa.get_bundle_id()}
            properties = [
                'uid', 'title', 'icon-color', 'activity', 'activity_id',
                'mime_type', 'mountpoint'
            ]
            datastore.find(query,
                           sorting=['+timestamp'],
                           limit=5,
                           properties=properties,
                           reply_handler=self._query_reply_handler_cb,
                           error_handler=self._query_error_handler_cb)
            self.writeline("sent datastore find command")

        self.show()
 def _search_for_audio_note(self, obj_id):
     ''' Look to see if there is already a sound recorded for this
     dsobject '''
     dsobjects, nobjects = datastore.find({'mime_type': ['audio/ogg']})
     # Look for tag that matches the target object id
     for dsobject in dsobjects:
         if 'tags' in dsobject.metadata and \
            obj_id in dsobject.metadata['tags']:
             _logger.debug('Found audio note')
             return dsobject
     return None
Exemple #23
0
    def openSaveAsDialog(self):
        dialog = None
        special_button = None
        if hacks['xo']:
            t = gui.Table()
            t.tr()
            if self.script.journal:
                t.td(gui.Image("games/broadway/images/dialog-folders.png"))
                t.tr()
                t.td(gui.Label(_("Folders")))
            else:
                t.td(gui.Image("games/broadway/images/dialog-journal.png"))
                t.tr()
                t.td(gui.Label(_("Journal")))
            special_button = gui.Button(t)

            def closeAndSaveAs():
                dialog.close()
                self.script.refreshTheater()
                self.script.filepath = None
                self.script.journal = not self.script.journal
                self.openSaveAsDialog()

            special_button.connect(gui.CLICK, closeAndSaveAs)
        if self.script.journal:
            if self.script.filepath:
                saveName = self.script.filepath.metadata['title']
            elif self.script.metadata.title:
                saveName = self.script.metadata.title
            else:
                saveName = ""
            dialog = gui.JournalDialog(_("Save a script"),
                                       saveName,
                                       True,
                                       special_button=special_button)
            dialog.loadJournalItems(
                datastore.find({"mime_type": "application/broadway"})[0])
        else:
            if self.script.filepath:
                saveName = os.path.basename(self.script.filepath)
            else:
                saveName = filenameStrip(
                    self.script.metadata.title) + information['filetype']
            dialog = gui.FileDialog(_("Save a script"),
                                    _("Okay"),
                                    path=directories['export-folder'],
                                    filter=[information['filetype']],
                                    default=saveName,
                                    favorites=defaults['favorites'],
                                    special_button=special_button)
        dialog.open()
        dialog.connect(gui.CLOSE, self.script.refreshTheater)
        dialog.connect(gui.CHANGE, self.saveFile)
Exemple #24
0
    def remove_badges(self):
        """
        Removes all of the user's badges from their Sash
        """

        # Find all of the activities that award badges
        ds_objects, num_objects = datastore.find(
            {'has_badges': 'True'})

        for x in range(num_objects):
            ds_objects[x].destroy()
            datastore.delete(ds_objects[x].object_id)
Exemple #25
0
    def __init__(self):
        gtk.VBox.__init__(self, False, 0)

        # store mathogen prac activity when it is found
        mpa = None

        self.label = gtk.Label("")
        self.pack_start(self.label, True, True, 5)
        self.label.show()


        # testing challenge repository
        challreg = challrepo.get_global_repository()
        self.writeline(str(challreg._challenges))
        

        # retrieve the activity bundle info
        # make a label with the name of each

        registry = bundleregistry.get_registry()
        for info in registry:
            if info.get_bundle_id() == "org.davidmason.mathogen_prac":
                thing = ", ".join([info.get_name(), info.get_bundle_id()])
                self.writeline(thing)
                mpa = info

        if mpa is None:
            self.writeline("activity not found")
        else:
            self.writeline("activity found")
            query = {'activity': mpa.get_bundle_id()}
            properties = ['uid', 'title', 'icon-color', 'activity', 'activity_id', 'mime_type', 'mountpoint']
            datastore.find(query, sorting=['+timestamp'],
                           limit=5,
                           properties=properties,
                           reply_handler=self._query_reply_handler_cb,
                           error_handler=self._query_error_handler_cb)
            self.writeline("sent datastore find command")

        self.show()
Exemple #26
0
    def __init__(self, bundle, handle):
        """Initialise the handler

        bundle -- the ActivityBundle to launch
        activity_handle -- stores the values which are to
            be passed to the service to uniquely identify
            the activity to be created and the sharing
            service that may or may not be connected with it

            sugar.activity.activityhandle.ActivityHandle instance

        calls the "create" method on the service for this
        particular activity type and registers the
        _reply_handler and _error_handler methods on that
        call's results.

        The specific service which creates new instances of this
        particular type of activity is created during the activity
        registration process in shell bundle registry which creates
        service definition files for each registered bundle type.

        If the file '/etc/olpc-security' exists, then activity launching
        will be delegated to the prototype 'Rainbow' security service.
        """
        gobject.GObject.__init__(self)

        self._bundle = bundle
        self._service_name = bundle.get_bundle_id()
        self._handle = handle

        bus = dbus.SessionBus()
        bus_object = bus.get_object(_SHELL_SERVICE, _SHELL_PATH)
        self._shell = dbus.Interface(bus_object, _SHELL_IFACE)

        if handle.activity_id is not None and handle.object_id is None:
            datastore.find({'activity_id': self._handle.activity_id},
                           reply_handler=self._find_object_reply_handler,
                           error_handler=self._find_object_error_handler)
        else:
            self._launch_activity()
Exemple #27
0
    def load_badges(self):
        """
        Loads all of the user's badges that they have achieved.

        This allows faster sorting and searching times by creating
        a badge image and adding it to a dictionary instead of creating
        a new badge image everytime a redisplay is called.
        """

        # Find all of the activities that award badges
        ds_objects, num_objects = datastore.find(
            {'has_badges': 'True'})

        # Create a list of tuples of all the activites
        list_activites = [(ds_object.metadata['activity'],
                           json.loads(ds_object.metadata['badge_list']))
                          for ds_object in ds_objects
                          if 'has_badges' in ds_object.metadata]

        # Creates a dictionary of earned badges and populates it
        self.earned_badges = {}
        for activity, badges in list_activites:
            for badge in badges.keys():
                self.earned_badges[badge] = {'info': badges[badge]['criteria'],
                                             'time': badges[badge]['time'],
                                             'name': badge,
                                             'bundle_id':
                                             badges[badge]['bundle_id'],
                                             'activity': activity}

        # Path to the images of the badges
        path = os.path.expanduser('~/.local/share/badges')

        # Create a dictionary of all the badge images
        self.badge_images = {}

        # Loop through all of the earned badges
        for badge in self.earned_badges.values():

            # Create an image and tooltip for the badge and display it
            badge_image = Gtk.Image()
            badge_image.set_from_file(os.path.join(path, badge['bundle_id'],
                                      badge['name'] + '.png'))
            badge_image.set_tooltip_text("Name: " + badge['name'] +
                                         "\nDate Acquired: " +
                                         badge['time'] +
                                         "\nActivity: " +
                                         badge['activity'] +
                                         "\n\n" + badge['info'])

            # Adds that badge image to the dictionary
            self.badge_images[badge['name']] = badge_image
Exemple #28
0
 def on_play_clicked(self, widget):
     print 'playing audio'
     if self.selected_name != None:
         obj, num = datastore.find({
             'title': self.selected_name,
             'TimeLapse': 'yes',
             'mime_type': 'audio/ogg'
         })
         print 'num of play objects:', num
         soundfile = obj[0].get_file_path()
         if os.path.isfile(os.path.abspath(soundfile)):
             self.player.set_property(
                 "uri", "file://" + os.path.abspath(soundfile))
             self.player.set_state(gst.STATE_PLAYING)
Exemple #29
0
 def openSaveAsDialog(self):
     dialog = None
     special_button= None
     if hacks['xo']:
         t= gui.Table()
         t.tr()
         if self.script.journal:
             t.td(gui.Image("games/broadway/images/dialog-folders.png"))
             t.tr()
             t.td(gui.Label(_("Folders")))
         else:
             t.td(gui.Image("games/broadway/images/dialog-journal.png"))
             t.tr()
             t.td(gui.Label(_("Journal")))
         special_button = gui.Button(t)
         def closeAndSaveAs():
             dialog.close()
             self.script.refreshTheater()
             self.script.filepath = None
             self.script.journal = not self.script.journal
             self.openSaveAsDialog()
         special_button.connect(gui.CLICK, closeAndSaveAs)
     if self.script.journal:
         if self.script.filepath:
             saveName = self.script.filepath.metadata['title']
         elif self.script.metadata.title:
             saveName = self.script.metadata.title
         else:
             saveName = ""
         dialog = gui.JournalDialog(_("Save a script"),
                               saveName,
                               True,
                               special_button=special_button)
         dialog.loadJournalItems(datastore.find({"mime_type" : "application/broadway"})[0])
     else:
         if self.script.filepath:
             saveName = os.path.basename(self.script.filepath)
         else:
             saveName = filenameStrip(self.script.metadata.title) + information['filetype']
         dialog = gui.FileDialog(_("Save a script"),
                                 _("Okay"), 
                                 path=directories['export-folder'], 
                                 filter=[information['filetype']],
                                 default= saveName,
                                 favorites = defaults['favorites'],
                                 special_button = special_button)
     dialog.open()
     dialog.connect(gui.CLOSE, self.script.refreshTheater);
     dialog.connect(gui.CHANGE, self.saveFile);
Exemple #30
0
 def get_pixbuf(self, entry_title, main_image=False):
     print 'makeing new data entry'
     obj, num = datastore.find({
         'title': entry_title,
         'TimeLapse': 'yes',
         'mime_type': 'image/png'
     })
     if num != 0:
         filepath = obj[0].get_file_path()
     else:
         return None
     if not main_image:
         pixbuf = gtk.gdk.pixbuf_new_from_file_at_size(filepath, 96, 96)
     else:
         pixbuf = gtk.gdk.pixbuf_new_from_file_at_size(filepath, 640, 480)
     return pixbuf
Exemple #31
0
 def scan_images(self):
     """
     returns a list of journal object ids that have mime_type equal to one
     of the entries in mimetype table of xophoto database. 
     """
     rtn = 0
     mime_list = self.db.get_mime_list()
     (results,count) = datastore.find({})
     for f in results:
         dict = f.get_metadata().get_dictionary()
         if dict["mime_type"] in mime_list:
             #record the id, file size, file date, in_ds
             self.db.create_picture_record(f.object_id, f.get_file_path())
             rtn += 1
         f.destroy()
     self.db.commit()
     _logger.debug('%s entries found in journal. Number of pictures %s'%(count,rtn,))
     return rtn
Exemple #32
0
def export_textfile(activity, filename, dungeon_id, filetext=''):
    """
    Exports text to journal (in fortune map format)
    Requires activity instance, file name, dungeon id, and text

    @Returns: a DSObject representing the file in the datastore.
    """
    ds_objects, num_objects = datastore.find({
        'title': filename,
        'FortuneMaker_VERSION': FM_VERSION
    })

    if num_objects == 0:
        # Create a datastore object
        file_dsobject = datastore.create()

    else:
        file_dsobject = ds_objects[0]

    # Store unique id for easy search of journal
    file_dsobject.metadata['FM_UID'] = dungeon_id

    # Write any metadata (here we specifically set the title of the file and
    # specify that this is a plain text file).
    file_dsobject.metadata['title'] = filename
    file_dsobject.metadata['mime_type'] = FILE_MIME
    file_dsobject.metadata['FortuneMaker_VERSION'] = FM_VERSION

    #Write the actual file to the data directory of this activity's root.
    file_path = os.path.join(activity.get_activity_root(), 'instance',
                             filename)
    f = open(file_path, 'w')
    try:
        f.write(filetext)
    finally:
        f.close()

    #Set the file_path in the datastore.
    file_dsobject.set_file_path(file_path)

    datastore.write(file_dsobject)
    return file_dsobject
Exemple #33
0
 def get_last_game(self):
     """ Make a query in the datastore to load last game """
     
     metadata = None
     
     # This query returns the last activity instance that has a path
     ds_objects, num_objects = datastore.find({'activity': 'org.ceibaljam.Saludame'}, sorting='-mtime', properties=['uid', 'title', 'mtime'])
     for entry in ds_objects:
         filepath = entry.get_file_path()
         if filepath:
             metadata = entry.get_metadata()
             filepath = entry.get_file_path()
             print "Last game is ", metadata['title'], " ", metadata['mtime']
             self.read_file(filepath)
             break
             
     for entry in ds_objects:
         entry.destroy()
         
     return metadata
 def scan_images(self):
     """
     returns a list of journal object ids that have mime_type equal to one
     of the entries in mimetype table of xophoto database. 
     """
     rtn = 0
     mime_list = self.db.get_mime_list()
     (results, count) = datastore.find({})
     for f in results:
         dict = f.get_metadata().get_dictionary()
         if dict["mime_type"] in mime_list:
             #record the id, file size, file date, in_ds
             self.db.create_picture_record(f.object_id, f.get_file_path())
             rtn += 1
         f.destroy()
     self.db.commit()
     _logger.debug('%s entries found in journal. Number of pictures %s' % (
         count,
         rtn,
     ))
     return rtn
Exemple #35
0
    def on_delete_clicked(self, widget):
        if self.selected_name != None:
            count = 0
            for obj in self.model:
                if obj[0] == self.selected_name:
                    iterator = self.model.iter_nth_child(None, count)
                    self.model.remove(iterator)
                count += 1
            ds_objects, num_objects = datastore.find({
                'title': self.selected_name,
                'TimeLapse': 'yes'
            })

            for i in xrange(0, num_objects, 1):
                print 'DELETING ' + ds_objects[i].metadata['title'] + '...'
                ds_objects[i].destroy()
                datastore.delete(ds_objects[i].object_id)
            self.MainImage.clear()
            self.get_widget('name_entry').set_text('')
            description_buffer = gtk.TextBuffer(table=None)
            description_buffer.set_text('')
            self.get_widget("description_entry").set_buffer(description_buffer)
    def __init__(self, activity, bundle_id):

        self._id = bundle_id
        ds_objects, num_objects = datastore.find({'activity': activity})

        # Path for all badges
        badge_path = os.path.expanduser('~/.local/share/badges')

        # Creates a new directory for badges if one doesn't exist
        try:
            os.makedirs(badge_path)

        # Directory already exists
        except OSError:
            pass

        # Destination path for the activity's badges
        dest = os.path.join(badge_path, self._id)
        # Source path for the activity's local badges
        source = os.path.abspath('badges/')

        # Create a new directory for badges for this activity if none exist
        try:
            if not os.path.exists(dest):
                os.symlink(source, dest)

        # Directory already exists
        except OSError:
            pass

        # Create a datastore object for this activity if one doesn't exist
        if not ds_objects:
            self._list = datastore.create()
            self._list.metadata['activity'] = activity
            self._list.metadata['has_badges'] = 'True'
            self._list.metadata['badge_list'] = json.dumps({})
            datastore.write(self._list)
        else:
            self._list = ds_objects[0]
def export_textfile(activity, filename, dungeon_id, filetext=''):
    """
    Exports text to journal (in fortune map format)
    Requires activity instance, file name, dungeon id, and text

    @Returns: a DSObject representing the file in the datastore.
    """
    ds_objects, num_objects = datastore.find({'title':filename,'FortuneMaker_VERSION':FM_VERSION})

    if num_objects == 0:
        # Create a datastore object
        file_dsobject = datastore.create()

    else:
        file_dsobject = ds_objects[0]

    # Store unique id for easy search of journal
    file_dsobject.metadata['FM_UID'] = dungeon_id

    # Write any metadata (here we specifically set the title of the file and
    # specify that this is a plain text file).
    file_dsobject.metadata['title'] = filename
    file_dsobject.metadata['mime_type'] = FILE_MIME
    file_dsobject.metadata['FortuneMaker_VERSION'] = FM_VERSION

    #Write the actual file to the data directory of this activity's root.
    file_path = os.path.join(activity.get_activity_root(), 'instance', filename)
    f = open(file_path, 'w')
    try:
        f.write(filetext)
    finally:
        f.close()

    #Set the file_path in the datastore.
    file_dsobject.set_file_path(file_path)

    datastore.write(file_dsobject)
    file_dsobject.destroy()
    def check_for_recent_images(self):
        """scans the journal for pictures that are not in database, records jobject_id if found in
        table groups with the journal id in category. Can be faster because we don't have to fetch file itself.
        """
        ds_list = []
        num_found = 0
        mime_list = [
            'image/jpg',
            'image/png',
            'image/jpeg',
            'image/gif',
        ]

        #build 650 doesn't seem to understand correctly the dictionary with a list right hand side
        info = xophotoactivity.sugar_version()
        if len(info) > 0:
            (major, minor, micro, release) = info
            _logger.debug(
                'sugar version major:%s minor:%s micro:%s release:%s' % info)
        else:
            _logger.debug('sugar version failure')
            minor = 70
        if minor > 80:
            (results, count) = datastore.find({
                'mime_type':
                ['image/jpeg', 'image/jpg', 'image/png', 'image/gif']
            })
        else:
            (results, count) = datastore.find({'mime_type': 'image/jpeg'})
            ds_list.extend(results)
            num_found += count
            (results, count) = datastore.find({'mime_type': 'image/jpg'})
            ds_list.extend(results)
            num_found += count
            (results, count) = datastore.find({'mime_type': 'image/png'})
            ds_list.extend(results)
            num_found += count
            (results, count) = datastore.find({'mime_type': 'image/gif'})
        ds_list.extend(results)
        num_found += count

        _logger.debug('Journal/datastore entries found:%s' % num_found)
        added = 0
        a_row_found = False
        cursor = self.db.connection().cursor()
        journal_list = []
        for ds in ds_list:
            #at least for now assume that the newest images are returned first
            if not a_row_found:
                journal_list.append(ds.object_id)
                dict = ds.get_metadata().get_dictionary()
                if dict["mime_type"] in mime_list:
                    cursor.execute('select * from groups where category = ? and jobject_id = ?',\
                                   (display.journal_id,str(ds.object_id),))
                    rows = cursor.fetchall()
                    if len(rows) == 0:
                        #may need to add date entered into ds (create date could be confusing)
                        self.db.put_ds_into_picture(ds.object_id)
                        self.db.add_image_to_album(display.journal_id,
                                                   ds.object_id)
                        added += 1
                    else:  #assume that pictures are returned in last in first out order
                        #no longer true since we are getting each mime_type separately (build 650 kludge)
                        #a_row_found = True
                        pass
            ds.destroy()
        #now go through albums and remove references that are no longer in datastore
        #cursor.execute('select * from groups')
        _logger.debug(
            'scan found %s. Added %s datastore object ids from datastore to picture'
            % (
                count,
                added,
            ))
        return (
            num_found,
            added,
        )
Exemple #39
0
    def read(self, max_count):
        logging.debug('ResultSet.read position: %r' % self._position)

        if max_count * 5 > ResultSet._CACHE_LIMIT:
            raise RuntimeError(
                'max_count (%i) too big for ResultSet._CACHE_LIMIT'
                ' (%i).' % (max_count, ResultSet._CACHE_LIMIT))

        if self._position == -1:
            self.seek(0)

        if self._position < self._offset:
            remaining_forward_entries = 0
        else:
            remaining_forward_entries = \
                self._offset + len(self._cache) - self._position

        if self._position > self._offset + len(self._cache):
            remaining_backwards_entries = 0
        else:
            remaining_backwards_entries = self._position - self._offset

        last_cached_entry = self._offset + len(self._cache)

        if (remaining_forward_entries <= 0 and
            remaining_backwards_entries <= 0) or \
                max_count > ResultSet._CACHE_LIMIT:

            # Total cache miss: remake it
            offset = max(0, self._position - max_count)
            logging.debug('remaking cache, offset: %r limit: %r' %
                          (offset, max_count * 2))
            jobjects, self._total_count = datastore.find(
                self._query,
                sorting=self._sorting,
                offset=offset,
                limit=ResultSet._CACHE_LIMIT,
                properties=PROPERTIES)

            self._cache.remove_all(self._cache)
            self._cache.append_all(jobjects)
            self._offset = offset

        elif remaining_forward_entries < 2 * max_count and \
                last_cached_entry < self._total_count:

            # Add one page to the end of cache
            logging.debug('appending one more page, offset: %r' %
                          last_cached_entry)
            jobjects, self._total_count = datastore.find(
                self._query,
                sorting=self._sorting,
                offset=last_cached_entry,
                limit=max_count,
                properties=PROPERTIES)

            # update cache
            self._cache.append_all(jobjects)

            # apply the cache limit
            objects_excess = len(self._cache) - ResultSet._CACHE_LIMIT
            if objects_excess > 0:
                self._offset += objects_excess
                self._cache.remove_all(self._cache[:objects_excess])

        elif remaining_backwards_entries < 2 * max_count and self._offset > 0:

            # Add one page to the beginning of cache
            limit = min(self._offset, max_count)
            self._offset = max(0, self._offset - max_count)

            logging.debug('prepending one more page, offset: %r limit: %r' %
                          (self._offset, limit))
            jobjects, self._total_count = datastore.find(
                self._query,
                sorting=self._sorting,
                offset=self._offset,
                limit=limit,
                properties=PROPERTIES)

            # update cache
            self._cache.prepend_all(jobjects)

            # apply the cache limit
            objects_excess = len(self._cache) - ResultSet._CACHE_LIMIT
            if objects_excess > 0:
                self._cache.remove_all(self._cache[-objects_excess:])
        else:
            logging.debug('cache hit and no need to grow the cache')

        first_pos = self._position - self._offset
        last_pos = self._position - self._offset + max_count
        return self._cache[first_pos:last_pos]
Exemple #40
0
 def get_description_buffer(self, title):
     objects, num = datastore.find({'title': title, 'TimeLapse': 'yes'})
     description = objects[0].metadata['description']
     description_buffer = gtk.TextBuffer(table=None)
     description_buffer.set_text(description)
     return description_buffer
Exemple #41
0
 def _find_starred(self):
     ''' Find all the favorites in the Journal. '''
     self._dsobjects, self._nobjects = datastore.find({'keep': '1'})
     return
 def _find_starred(self):
     ''' Find all the favorites in the Journal. '''
     self.dsobjects, nobjects = datastore.find({'keep': '1'})
     _logger.debug('found %d starred items', nobjects)
Exemple #43
0
 def exportActual(self, fancy, amount, location):
     pecial_button= None
     if hacks['xo']:
         t= gui.Table()
         t.tr()
         if self.script.journal:
             t.td(gui.Image("games/broadway/images/dialog-folders.png"))
             t.tr()
             t.td(gui.Label(_("Folders")))
         else:
             t.td(gui.Image("games/broadway/images/dialog-journal.png"))
             t.tr()
             t.td(gui.Label(_("Journal")))
         special_button = gui.Button(t)
         def closeAndExportAs():
             dialog.close()
             self.script.refreshTheater()
             self.script.journal = not self.script.journal
             self.exportActual(fancy, amount, location)
         special_button.connect(gui.CLICK, closeAndExportAs)
     self.script.refreshTheater()
     if location == "Here":
         if fancy == "Fancy":
             valueType = ['.html']
             mimeType = "text/html"
         else:
             valueType = ['.txt']
             mimeType = "text/plain"
         if self.script.journal:
             exportName = self.script.metadata.title
             dialog = gui.JournalDialog(_("Export a script"),
                                        exportName,
                                        True,
                                        special_button=special_button)
             dialog.loadJournalItems(datastore.find({'mime_type' : mimeType})[0])
         else:
             exportName = filenameStrip(self.script.metadata.title) + valueType[0]
             dialog = gui.FileDialog(_("Export as %(fanciness)s") % {"fanciness" : _(fancy)},
                                _("Okay"), 
                                path=directories['export-folder'], 
                                filter=valueType,
                                default = exportName,
                                favorites = defaults['favorites'],
                                special_button=special_button)
         dialog.open()
         dialog.connect(gui.CLOSE, self.script.refreshTheater);
         dialog.connect(gui.CHANGE, self.exportFile, fancy, amount, dialog);
     else:
         teachers = getTeacherList()
         if teachers:
             dialog = gui.TeacherDialog()
             dialog.loadTeachers(teachers)
             dialog.connect(gui.CLOSE, self.script.refreshTheater);
             dialog.connect(gui.CHANGE, self.upload, fancy, amount, dialog);
             dialog.open()
         else:
             dialog= gui.ConfirmDialog(_("Connection Error"), [_("You are not connected to the internet."),
                                                          _("Connect and then click Okay below.")])
             def tryAgain():
                 dialog.close()
                 self.exportActual(fancy, amount, location)
             dialog.connect(gui.CLOSE, self.script.refreshTheater);
             dialog.okayButton.connect(gui.CLICK, tryAgain)
             dialog.open();
Exemple #44
0
    def exportActual(self, fancy, amount, location):
        pecial_button = None
        if hacks['xo']:
            t = gui.Table()
            t.tr()
            if self.script.journal:
                t.td(gui.Image("games/broadway/images/dialog-folders.png"))
                t.tr()
                t.td(gui.Label(_("Folders")))
            else:
                t.td(gui.Image("games/broadway/images/dialog-journal.png"))
                t.tr()
                t.td(gui.Label(_("Journal")))
            special_button = gui.Button(t)

            def closeAndExportAs():
                dialog.close()
                self.script.refreshTheater()
                self.script.journal = not self.script.journal
                self.exportActual(fancy, amount, location)

            special_button.connect(gui.CLICK, closeAndExportAs)
        self.script.refreshTheater()
        if location == "Here":
            if fancy == "Fancy":
                valueType = ['.html']
                mimeType = "text/html"
            else:
                valueType = ['.txt']
                mimeType = "text/plain"
            if self.script.journal:
                exportName = self.script.metadata.title
                dialog = gui.JournalDialog(_("Export a script"),
                                           exportName,
                                           True,
                                           special_button=special_button)
                dialog.loadJournalItems(
                    datastore.find({'mime_type': mimeType})[0])
            else:
                exportName = filenameStrip(
                    self.script.metadata.title) + valueType[0]
                dialog = gui.FileDialog(_("Export as %(fanciness)s") %
                                        {"fanciness": _(fancy)},
                                        _("Okay"),
                                        path=directories['export-folder'],
                                        filter=valueType,
                                        default=exportName,
                                        favorites=defaults['favorites'],
                                        special_button=special_button)
            dialog.open()
            dialog.connect(gui.CLOSE, self.script.refreshTheater)
            dialog.connect(gui.CHANGE, self.exportFile, fancy, amount, dialog)
        else:
            teachers = getTeacherList()
            if teachers:
                dialog = gui.TeacherDialog()
                dialog.loadTeachers(teachers)
                dialog.connect(gui.CLOSE, self.script.refreshTheater)
                dialog.connect(gui.CHANGE, self.upload, fancy, amount, dialog)
                dialog.open()
            else:
                dialog = gui.ConfirmDialog(_("Connection Error"), [
                    _("You are not connected to the internet."),
                    _("Connect and then click Okay below.")
                ])

                def tryAgain():
                    dialog.close()
                    self.exportActual(fancy, amount, location)

                dialog.connect(gui.CLOSE, self.script.refreshTheater)
                dialog.okayButton.connect(gui.CLICK, tryAgain)
                dialog.open()
Exemple #45
0
    def read(self, max_count):
        logging.debug('ResultSet.read position: %r' % self._position)

        if max_count * 5 > ResultSet._CACHE_LIMIT:
            raise RuntimeError('max_count (%i) too big for ResultSet._CACHE_LIMIT'
                               ' (%i).' % (max_count, ResultSet._CACHE_LIMIT))

        if self._position == -1:
            self.seek(0)

        if self._position < self._offset:
            remaining_forward_entries = 0
        else:
            remaining_forward_entries = self._offset + len(self._cache) - \
                                        self._position

        if self._position > self._offset + len(self._cache):
            remaining_backwards_entries = 0
        else:
            remaining_backwards_entries = self._position - self._offset

        last_cached_entry = self._offset + len(self._cache)

        if (remaining_forward_entries <= 0 and remaining_backwards_entries <= 0) or \
           max_count > ResultSet._CACHE_LIMIT:

            # Total cache miss: remake it
            offset = max(0, self._position - max_count)
            logging.debug('remaking cache, offset: %r limit: %r' % (offset, max_count * 2))
            jobjects, self._total_count = datastore.find(self._query,
                    sorting=self._sorting,
                    offset=offset,
                    limit=ResultSet._CACHE_LIMIT,
                    properties=PROPERTIES)

            self._cache.remove_all(self._cache)
            self._cache.append_all(jobjects)
            self._offset = offset
            
        elif remaining_forward_entries < 2 * max_count and \
             last_cached_entry < self._total_count:

            # Add one page to the end of cache
            logging.debug('appending one more page, offset: %r' % last_cached_entry)
            jobjects, self._total_count = datastore.find(self._query,
                                                         sorting=self._sorting,
                                                         offset=last_cached_entry,
                                                         limit=max_count,
                                                         properties=PROPERTIES)
            # update cache
            self._cache.append_all(jobjects)

            # apply the cache limit
            objects_excess = len(self._cache) - ResultSet._CACHE_LIMIT
            if objects_excess > 0:
                self._offset += objects_excess
                self._cache.remove_all(self._cache[:objects_excess])

        elif remaining_backwards_entries < 2 * max_count and self._offset > 0:

            # Add one page to the beginning of cache
            limit = min(self._offset, max_count)
            self._offset = max(0, self._offset - max_count)

            logging.debug('prepending one more page, offset: %r limit: %r' % 
                          (self._offset, limit))
            jobjects, self._total_count = datastore.find(self._query,
                    sorting=self._sorting,
                    offset=self._offset,
                    limit=limit,
                    properties=PROPERTIES)

            # update cache
            self._cache.prepend_all(jobjects)

            # apply the cache limit
            objects_excess = len(self._cache) - ResultSet._CACHE_LIMIT
            if objects_excess > 0:
                self._cache.remove_all(self._cache[-objects_excess:])
        else:
            logging.debug('cache hit and no need to grow the cache')

        first_pos = self._position - self._offset
        last_pos = self._position - self._offset + max_count
        return self._cache[first_pos:last_pos]
Exemple #46
0
"""
Created on Apr 8, 2011

@author: cgueret
"""

import os
import dbus
from sugar.datastore import datastore

if os.path.exists("/tmp/olpc-session-bus"):
    os.environ["DBUS_SESSION_BUS_ADDRESS"] = "unix:path=/tmp/olpc-session-bus"

if __name__ == "__main__":
    try:
        entry = datastore.create()
        entry.metadata["title"] = "Terminal-test"
        print entry.metadata.get_dictionary().copy()
        datastore.write(entry)

        query = {}
        query["query"] = "*Terminal*"
        objects, count = datastore.find(query, limit=2, sorting="-mtime")
        print objects, count
    except dbus.DBusException:
        print "ERROR: Unable to connect to the datastore.\n"
    except Exception, e:
        print "ERROR: %s" % (e)