Пример #1
0
    def __writeback_test(self, filename, mimetype, prop, expectedKey=None):
        """
        Set a value in @prop for the @filename. Then ask tracker-extractor
        for metadata and check in the results dictionary if the property is there.

        Note: given the special translation of some property-names in the dictionary
        with extracted metadata, there is an optional parameter @expectedKey
        to specify what property to check in the dictionary. If None, then
        the @prop is used.
        """

        path = self.prepare_test_image(self.datadir_path(filename))
        initial_mtime = path.stat().st_mtime

        TEST_VALUE = prop.replace(":", "") + "test"
        SPARQL_TMPL = """
           DELETE { ?u %s ?v } WHERE { ?u nie:url '%s' ; %s ?v }
           INSERT { ?u %s '%s' }
           WHERE  { ?u nie:url '%s' }
        """
        self.tracker.update(
            SPARQL_TMPL %
            (prop, path.as_uri(), prop, prop, TEST_VALUE, path.as_uri()))

        log("Waiting for change on %s" % path)
        self.wait_for_file_change(path, initial_mtime)
        log("Got the change")

        results = get_tracker_extract_jsonld_output(path, mimetype)
        keyDict = expectedKey or prop
        self.assertIn(TEST_VALUE, results[keyDict])
    def test_reextraction(self):
        """Tests whether known files are still re-extracted on user request."""
        miner_fs = self.system.miner_fs
        store = self.system.store

        # Insert a valid file and wait extraction of its metadata.
        file_path = os.path.join(self.datadir, os.path.basename(VALID_FILE))
        shutil.copy(VALID_FILE, file_path)
        file_id, file_urn = store.await_resource_inserted(
            VALID_FILE_CLASS, title=VALID_FILE_TITLE)

        # Remove a key piece of metadata.
        store.update('DELETE { <%s> nie:title ?title }'
                     ' WHERE { <%s> nie:title ?title }' % (file_urn, file_urn))
        store.await_property_changed(VALID_FILE_CLASS, file_id, 'nie:title')
        assert not store.ask('ASK { <%s> nie:title ?title }' % file_urn)

        log("Sending re-index request")
        # Request re-indexing (same as `tracker index --file ...`)
        miner_fs.index_file('file://' + os.path.join(self.datadir, file_path))

        # The extractor should reindex the file and re-add the metadata that we
        # deleted, so we should see the nie:title property change.
        store.await_property_changed(VALID_FILE_CLASS, file_id, 'nie:title')

        title_result = store.query('SELECT ?title { <%s> nie:title ?title }' %
                                   file_urn)
        assert len(title_result) == 1
        self.assertEqual(title_result[0][0], VALID_FILE_TITLE)
    def test_reextraction(self):
        """Tests whether known files are still re-extracted on user request."""
        miner_fs = self.system.miner_fs
        store = self.system.store

        # Insert a valid file and wait extraction of its metadata.
        file_path = os.path.join(MINER_TMP_DIR, os.path.basename(VALID_FILE))
        shutil.copy(VALID_FILE, file_path)
        file_id, file_urn = store.await_resource_inserted(
            VALID_FILE_CLASS, title=VALID_FILE_TITLE)

        # Remove a key piece of metadata.
        store.update(
            'DELETE { <%s> nie:title ?title }'
            ' WHERE { <%s> nie:title ?title }' % (file_urn, file_urn))
        store.await_property_changed(file_id, 'nie:title')
        assert not store.ask('ASK { <%s> nie:title ?title }' % file_urn)

        log("Sending re-index request")
        # Request re-indexing (same as `tracker index --file ...`)
        miner_fs.index_file(uri(file_path))

        # The extractor should reindex the file and re-add the metadata that we
        # deleted, so we should see the nie:title property change.
        store.await_property_changed(file_id, 'nie:title')

        title_result = store.query('SELECT ?title { <%s> nie:title ?title }' % file_urn)
        assert len(title_result) == 1
        self.assertEqual(title_result[0][0], VALID_FILE_TITLE)
Пример #4
0
    def test_01_camera_video (self):
        """
        Camera video recording simulation:

        1. Create resource in the store for the new file
        2. Write the file
        3. Wait for miner-fs to index it
        4. Ensure no duplicates are found
        """

        fileurn = "tracker://test_camera_video_01/" + str(random.randint (0,100))
        origin_filepath = os.path.join (self.get_data_dir (), self.get_test_video ())
        dest_filepath = os.path.join (self.get_dest_dir (), self.get_test_video ())
        dest_fileuri = "file://" + dest_filepath

        self.insert_video_resource_info(fileurn, dest_fileuri)

        # Copy the image to the dest path
        self.slowcopy_file (origin_filepath, dest_filepath)
        assert os.path.exists (dest_filepath)
        dest_id, dest_urn = self.system.store.await_resource_inserted ('nmm:Video', dest_fileuri)
        self.assertEquals (self.get_urn_count_by_url (dest_fileuri), 1)

        # Clean the new file so the test directory is as before
        log ("Remove and wait")
        os.remove (dest_filepath)
        self.system.store.await_resource_deleted (dest_id)
        self.assertEquals (self.get_urn_count_by_url (dest_fileuri), 0)
Пример #5
0
    def test_01_camera_video(self):
        """
        Camera video recording simulation:

        1. Create resource in the store for the new file
        2. Write the file
        3. Wait for miner-fs to index it
        4. Ensure no duplicates are found
        """

        fileurn = "tracker://test_camera_video_01/" + str(
            random.randint(0, 100))
        origin_filepath = os.path.join(self.get_data_dir(),
                                       self.get_test_video())
        dest_filepath = os.path.join(self.get_dest_dir(),
                                     self.get_test_video())
        dest_fileuri = "file://" + dest_filepath

        self.insert_video_resource_info(fileurn, dest_fileuri)

        # Copy the image to the dest path
        self.slowcopy_file(origin_filepath, dest_filepath)
        assert os.path.exists(dest_filepath)
        dest_id, dest_urn = self.system.store.await_resource_inserted(
            NMM_PHOTO, dest_fileuri)
        self.assertEquals(self.get_urn_count_by_url(dest_fileuri), 1)

        # Clean the new file so the test directory is as before
        log("Remove and wait")
        os.remove(dest_filepath)
        self.system.store.await_resource_deleted(NMM_PHOTO, dest_id)
        self.assertEquals(self.get_urn_count_by_url(dest_fileuri), 0)
    def test_02_copy_from_unmonitored_to_monitored(self):
        """
        Copy an file from unmonitored directory to monitored directory
        and verify if data base is updated accordingly
        """
        source = os.path.join(self.workdir, "test-no-monitored", "file0.txt")
        dest = os.path.join(self.workdir, "test-monitored", "file0.txt")
        shutil.copyfile(source, dest)

        dest_id, dest_urn = self.system.store.await_resource_inserted(
            NFO_DOCUMENT, self.uri(dest))

        # verify if miner indexed this file.
        result = self.__get_text_documents()
        self.assertEqual(len(result), 4)
        unpacked_result = [r[0] for r in result]
        self.assertIn(self.uri("test-monitored/file1.txt"), unpacked_result)
        self.assertIn(self.uri("test-monitored/dir1/file2.txt"),
                      unpacked_result)
        self.assertIn(self.uri("test-monitored/dir1/dir2/file3.txt"),
                      unpacked_result)
        self.assertIn(self.uri("test-monitored/file0.txt"), unpacked_result)

        # Clean the new file so the test directory is as before
        log("Remove and wait")
        os.remove(dest)
        self.system.store.await_resource_deleted(NFO_DOCUMENT, dest_id)
Пример #7
0
    def __writeback_test (self, filename, mimetype, prop, expectedKey=None):
        """
        Set a value in @prop for the @filename. Then ask tracker-extractor
        for metadata and check in the results dictionary if the property is there.

        Note: given the special translation of some property-names in the dictionary
        with extracted metadata, there is an optional parameter @expectedKey
        to specify what property to check in the dictionary. If None, then
        the @prop is used.
        """

        # FIXME: filename is actually a URI! :(
        filename_real = filename[len('file://'):]
        initial_mtime = os.stat(filename_real).st_mtime

        TEST_VALUE = prop.replace (":","") + "test"
        SPARQL_TMPL = """
           INSERT { ?u %s '%s' }
           WHERE  { ?u nie:url '%s' }
        """ 
        self.__clean_property (prop, filename)
        self.tracker.update (SPARQL_TMPL % (prop, TEST_VALUE, filename))

        log("Waiting for change on %s" % filename_real)
        self.wait_for_file_change(filename_real, initial_mtime)
        log("Got the change")

        results = get_tracker_extract_jsonld_output (filename, mimetype)
        keyDict = expectedKey or prop
        self.assertIn (TEST_VALUE, results[keyDict])
        self.__clean_property (prop, filename, False)
Пример #8
0
    def set_up_environment(self, settings=None, ontodir=None):
        """
        Sets up the XDG_*_HOME variables and make sure the directories exist

        Settings should be a dict mapping schema names to dicts that hold the
        settings that should be changed in those schemas. The contents dicts
        should map key->value, where key is a key name and value is a suitable
        GLib.Variant instance.
        """
        self._basedir = tempfile.mkdtemp()

        self._dirs = {
            "XDG_DATA_HOME": self.xdg_data_home(),
            "XDG_CACHE_HOME": self.xdg_cache_home()
        }

        for var, directory in self._dirs.items():
            os.makedirs(directory)
            os.makedirs(os.path.join(directory, 'tracker'))
            os.environ[var] = directory

        if ontodir:
            helpers.log("export %s=%s" %
                        ("TRACKER_DB_ONTOLOGIES_DIR", ontodir))
            os.environ["TRACKER_DB_ONTOLOGIES_DIR"] = ontodir

        for var, value in TEST_ENV_VARS.iteritems():
            helpers.log("export %s=%s" % (var, value))
            os.environ[var] = value

        # Previous loop should have set DCONF_PROFILE to the test location
        if settings is not None:
            self._apply_settings(settings)
Пример #9
0
    def test_01_sync_audio_nb219946 (self):
        """
        Sync simulation (after fix for NB#219946):

        1. Create resource in the store for the new file, using blank nodes
        2. Write the file
        3. Wait for miner-fs to index it
        4. Ensure no duplicates are found
        """

        origin_filepath = os.path.join (self.get_data_dir (), self.get_test_music ())
        dest_filepath = os.path.join (self.get_dest_dir (), self.get_test_music ())
        dest_fileuri = "file://" + dest_filepath

        log ("Synchronizing audio file in '%s'..." % (dest_filepath))

        # Insert new resource in the store
        insert = """
        DELETE { ?file a rdfs:Resource }
        WHERE  { ?file nie:url '%s'}

        INSERT { _:x a                       nie:DataObject,
                                             nmm:MusicPiece,
                                             nfo:Media,
                                             nfo:Audio,
                                             nie:InformationElement ;
                     nie:url                 '%s' ;
                     nmm:musicAlbum          <urn:album:SinCos> ;
                     nfo:duration            '15' ;
                     nmm:performer           <urn:artist:AbBaby> ;
                     nmm:trackNumber         '13' ;
                     nfo:averageAudioBitrate '32000' ;
                     nfo:genre               'Pop' ;
                     nfo:isContentEncrypted  'false' ;
                     nie:title               'Simply Juvenile'
        }

        INSERT { <urn:album:SinCos> a              nmm:MusicAlbum;
                                    nmm:albumTitle 'SinCos'
        }

        INSERT { <urn:artist:AbBaby> a              nmm:Artist;
                                     nmm:artistName 'AbBaby'
        }
        """ % (dest_fileuri, dest_fileuri)
        self.tracker.update (insert)
        self.assertEquals (self.get_urn_count_by_url (dest_fileuri), 1)

        # Copy the image to the dest path
        self.slowcopy_file (origin_filepath, dest_filepath)
        assert os.path.exists (dest_filepath)
        self.system.tracker_miner_fs_wait_for_idle (MINER_FS_IDLE_TIMEOUT)
        self.assertEquals (self.get_urn_count_by_url (dest_fileuri), 1)

        # Clean the new file so the test directory is as before
        log ("Remove and wait")
        os.remove (dest_filepath)
        self.system.tracker_miner_fs_wait_for_idle (MINER_FS_IDLE_TIMEOUT)
        self.assertEquals (self.get_urn_count_by_url (dest_fileuri), 0)
Пример #10
0
def get_tracker_extract_jsonld_output(filename, mime_type=None):
    """
    Runs `tracker-extract --file` to extract metadata from a file.
    """

    tracker_extract = os.path.join(cfg.TRACKER_EXTRACT_PATH)
    command = [
        tracker_extract, '--verbosity=0', '--output-format=json-ld', '--file',
        str(filename)
    ]
    if mime_type is not None:
        command.extend(['--mime', mime_type])

    # We depend on parsing the output, so verbosity MUST be 0.
    env = os.environ.copy()
    env['TRACKER_VERBOSITY'] = '0'
    # Tell GStreamer not to fork to create the registry
    env['GST_REGISTRY_FORK'] = 'no'

    log('Running: %s' % ' '.join(command))
    try:
        p = subprocess.Popen(command,
                             env=env,
                             stdout=subprocess.PIPE,
                             stderr=subprocess.PIPE)
    except OSError as e:
        if e.errno == errno.ENOENT:
            raise RuntimeError(
                "Did not find tracker-extract binary. Is the 'extract' option disabled?"
            )
        else:
            raise RuntimeError("Error running tracker-extract: %s" % (e))
    stdout, stderr = p.communicate()

    if p.returncode != 0:
        raise RuntimeError(
            "tracker-extract returned non-zero exit code: %s\n"
            "Error output:\n%s\n" %
            (p.returncode, stderr.decode('unicode-escape').strip()))

    if len(stderr) > 0:
        error_output = stderr.decode('unicode-escape').strip()
        log("Error output from tracker-extract:\n%s" % error_output)

    try:
        output = stdout.decode('utf-8')

        if len(output.strip()) == 0:
            raise RuntimeError("tracker-extract didn't return any data.\n"
                               "Error output was: %s" % error_output)

        data = json.loads(output)
    except ValueError as e:
        raise RuntimeError(
            "tracker-extract did not return valid JSON data: %s\n"
            "Output was: %s" % (e, output))

    return data
Пример #11
0
    def setUpClass (self):
        #print "Starting the daemon in test mode"
        self.__prepare_directories ()
        
        self.system = TrackerSystemAbstraction ()

        self.system.tracker_writeback_testing_start (CONF_OPTIONS)
        # Returns when ready
        log ("Ready to go!")
Пример #12
0
    def setUpClass(self):
        #print "Starting the daemon in test mode"
        self.__prepare_directories()

        self.system = TrackerSystemAbstraction()

        self.system.tracker_writeback_testing_start(CONF_OPTIONS)
        # Returns when ready
        log("Ready to go!")
    def test_01_NB217627_content_created_date(self):
        """
        NB#217627 - Order if results is different when an image is marked as favorite.
        """
        jpeg_path = self.prepare_test_image(self.datadir_path('writeback-test-1.jpeg'))
        tif_path = self.prepare_test_image(self.datadir_path('writeback-test-2.tif'))
        png_path = self.prepare_test_image(self.datadir_path('writeback-test-4.png'))

        query_images = """
          SELECT nie:url(?u) ?contentCreated WHERE {
              ?u a nfo:Visual ;
              nfo:fileLastModified ?contentCreated
          } ORDER BY ?contentCreated
          """
        results = self.tracker.query(query_images)
        self.assertEqual(len(results), 3, results)

        log("Waiting 2 seconds to ensure there is a noticiable difference in the timestamp")
        time.sleep(2)

        initial_mtime = jpeg_path.stat().st_mtime

        # This triggers the writeback
        mark_as_favorite = """
         INSERT {
           ?u nao:hasTag nao:predefined-tag-favorite .
         } WHERE {
           ?u nie:url <%s> .
         }
        """ % jpeg_path.as_uri()
        self.tracker.update(mark_as_favorite)
        log("Setting favorite in <%s>" % jpeg_path.as_uri())

        self.wait_for_file_change(jpeg_path, initial_mtime)

        # Check the value is written in the file
        metadata = get_tracker_extract_jsonld_output(jpeg_path, "")

        tags = metadata.get('nao:hasTag', [])
        tag_names = [tag['nao:prefLabel'] for tag in tags]
        self.assertIn(self.favorite, tag_names,
                      "Tag hasn't been written in the file")

        # Now check the modification date of the files and it should be the same :)
        new_results = self.tracker.query(query_images)
        # for (uri, date) in new_results:
        ##     print "Checking dates of <%s>" % uri
        ##     previous_date = convenience_dict[uri]
        ##     print "Before: %s \nAfter : %s" % (previous_date, date)
        ##     self.assertEquals (date, previous_date, "File <%s> has change its contentCreated date!" % uri)

        # Indeed the order of the results should be the same
        for i in range(0, len(results)):
            self.assertEqual(results[i][0], new_results[i][0], "Order of the files is different")
            self.assertEqual(results[i][1], new_results[i][1], "Date has change in file <%s>" % results[i][0])
    def test_01_NB217627_content_created_date(self):
        """
        NB#217627 - Order if results is different when an image is marked as favorite.
        """
        query_images = """
          SELECT nie:url(?u) ?contentCreated WHERE {
              ?u a nfo:Visual ;
              nfo:fileLastModified ?contentCreated
          } ORDER BY ?contentCreated
          """
        results = self.tracker.query(query_images)
        self.assertEquals(len(results), 3, results)

        log("Waiting 2 seconds to ensure there is a noticiable difference in the timestamp"
            )
        time.sleep(2)

        url = self.get_test_filename_jpeg()

        filename = url[len('file://'):]
        initial_mtime = os.stat(filename).st_mtime

        # This triggers the writeback
        mark_as_favorite = """
         INSERT {
           ?u nao:hasTag nao:predefined-tag-favorite .
         } WHERE {
           ?u nie:url <%s> .
         }
        """ % url
        self.tracker.update(mark_as_favorite)
        log("Setting favorite in <%s>" % url)

        self.wait_for_file_change(filename, initial_mtime)

        # Check the value is written in the file
        metadata = get_tracker_extract_jsonld_output(filename, "")
        self.assertIn(self.favorite, metadata["nao:hasTag"],
                      "Tag hasn't been written in the file")

        # Now check the modification date of the files and it should be the same :)
        new_results = self.tracker.query(query_images)
        ## for (uri, date) in new_results:
        ##     print "Checking dates of <%s>" % uri
        ##     previous_date = convenience_dict[uri]
        ##     print "Before: %s \nAfter : %s" % (previous_date, date)
        ##     self.assertEquals (date, previous_date, "File <%s> has change its contentCreated date!" % uri)

        # Indeed the order of the results should be the same
        for i in range(0, len(results)):
            self.assertEquals(results[i][0], new_results[i][0],
                              "Order of the files is different")
            self.assertEquals(results[i][1], new_results[i][1],
                              "Date has change in file <%s>" % results[i][0])
Пример #15
0
 def test_01_stopwords (self):
     stopwords = self.__get_some_stopwords ()
     TEXT = " ".join (["this a completely normal text automobile"] + stopwords)
     
     self.set_text (TEXT)
     results = self.search_word ("automobile")
     self.assertEquals (len (results), 1)
     log ("Stopwords: %s" % stopwords)
     for i in range (0, len (stopwords)):
         results = self.search_word (stopwords[i])
         self.assertEquals (len (results), 0)
Пример #16
0
 def tracker_store_restart_with_new_ontologies(self, ontodir):
     self.store.stop()
     if ontodir:
         helpers.log("[Conf] Setting %s - %s" %
                     ("TRACKER_DB_ONTOLOGIES_DIR", ontodir))
         os.environ["TRACKER_DB_ONTOLOGIES_DIR"] = ontodir
     try:
         self.store.start()
     except GLib.Error:
         raise UnableToBootException("Unable to boot the store \n(" +
                                     str(e) + ")")
Пример #17
0
 def test_01_stopwords (self):
     stopwords = self.__get_some_stopwords ()
     TEXT = " ".join (["this a completely normal text automobile"] + stopwords)
     
     self.set_text (TEXT)
     results = self.search_word ("automobile")
     self.assertEquals (len (results), 1)
     log ("Stopwords: %s" % stopwords)
     for i in range (0, len (stopwords)):
         results = self.search_word (stopwords[i])
         self.assertEquals (len (results), 0)
Пример #18
0
 def slowcopy_file_fd (self, src, fdest, rate=SLOWCOPY_RATE):
     """
     @rate: bytes per 100ms
     """
     log ("Copying slowly\n '%s' to\n '%s'" % (src, fdest.name))
     fsrc = open (src, 'rb')
     buffer_ = fsrc.read (rate)
     while (buffer_ != ""):
         fdest.write (buffer_)
         time.sleep (0.1)
         buffer_ = fsrc.read (rate)
     fsrc.close ()
Пример #19
0
    def test_01_camera_picture (self):
        """
        Camera simulation:

        1. Create resource in the store for the new file
        2. Write the file
        3. Wait for miner-fs to index it
        4. Ensure no duplicates are found
        """

        fileurn = "tracker://test_camera_picture_01/" + str(random.randint (0,100))
        origin_filepath = os.path.join (self.get_data_dir (), self.get_test_image ())
        dest_filepath = os.path.join (self.get_dest_dir (), self.get_test_image ())
        dest_fileuri = "file://" + dest_filepath

        # Insert new resource in the store, including nie:mimeType and nie:url
        insert = """
        INSERT { <%s> a nie:InformationElement,
                        nie:DataObject,
                        nfo:Image,
                        nfo:Media,
                        nfo:Visual,
                        nmm:Photo
        }

        DELETE { <%s> nie:mimeType ?_1 }
        WHERE { <%s> nie:mimeType ?_1 }

        INSERT { <%s> a            rdfs:Resource ;
                      nie:mimeType \"image/jpeg\"
        }

        DELETE { <%s> nie:url ?_2 }
        WHERE { <%s> nie:url ?_2 }

        INSERT { <%s> a       rdfs:Resource ;
                      nie:url \"%s\"
        }
        """ % (fileurn, fileurn, fileurn, fileurn, fileurn, fileurn, fileurn, dest_fileuri)
        self.tracker.update (insert)
        self.assertEquals (self.get_urn_count_by_url (dest_fileuri), 1)

        # Copy the image to the dest path
        self.slowcopy_file (origin_filepath, dest_filepath)
        assert os.path.exists (dest_filepath)
        self.system.tracker_miner_fs_wait_for_idle (MINER_FS_IDLE_TIMEOUT)
        self.assertEquals (self.get_urn_count_by_url (dest_fileuri), 1)

        # Clean the new file so the test directory is as before
        log ("Remove and wait")
        os.remove (dest_filepath)
        self.system.tracker_miner_fs_wait_for_idle (MINER_FS_IDLE_TIMEOUT)
        self.assertEquals (self.get_urn_count_by_url (dest_fileuri), 0)
Пример #20
0
 def slowcopy_file_fd(self, src, fdest, rate=SLOWCOPY_RATE):
     """
     @rate: bytes per 100ms
     """
     log("Copying slowly\n '%s' to\n '%s'" % (src, fdest.name))
     fsrc = open(src, 'rb')
     buffer_ = fsrc.read(rate)
     while (buffer_ != b""):
         fdest.write(buffer_)
         time.sleep(0.1)
         buffer_ = fsrc.read(rate)
     fsrc.close()
Пример #21
0
    def test_02_camera_video_geolocation(self):
        """
        Camera simulation:

        1. Create resource in the store for the new file
        2. Set nlo:location
        2. Write the file
        3. Wait for miner-fs to index it
        4. Ensure no duplicates are found
        """

        fileurn = "tracker://test_camera_video_02/" + str(
            random.randint(0, 100))
        origin_filepath = os.path.join(self.get_data_dir(),
                                       self.get_test_video())
        dest_filepath = os.path.join(self.get_dest_dir(),
                                     self.get_test_video())
        dest_fileuri = "file://" + dest_filepath

        geolocationurn = "tracker://test_camera_video_02_geolocation/" + str(
            random.randint(0, 100))
        postaladdressurn = "tracker://test_camera_video_02_postaladdress/" + str(
            random.randint(0, 100))

        self.insert_video_resource_info(fileurn, dest_fileuri)

        # FIRST, open the file for writing, and just write some garbage, to simulate that
        # we already started recording the video...
        fdest = open(dest_filepath, 'wb')
        fdest.write("some garbage written here")
        fdest.write("to simulate we're recording something...")
        fdest.seek(0)

        # SECOND, set slo:location
        self.insert_dummy_location_info(fileurn, geolocationurn,
                                        postaladdressurn)

        #THIRD, start copying the image to the dest path
        self.slowcopy_file_fd(origin_filepath, fdest)
        fdest.close()
        assert os.path.exists(dest_filepath)

        # FOURTH, ensure we have only 1 resource
        dest_id, dest_urn = self.system.store.await_resource_inserted(
            NMM_VIDEO, dest_fileuri)
        self.assertEquals(self.get_urn_count_by_url(dest_fileuri), 1)

        # Clean the new file so the test directory is as before
        log("Remove and wait")
        os.remove(dest_filepath)
        self.system.store.await_resource_deleted(NMM_VIDEO, dest_id)
        self.assertEquals(self.get_urn_count_by_url(dest_fileuri), 0)
Пример #22
0
 def search_word(self, word):
     """
     Return list of URIs with the word in them
     """
     log("Search for: %s" % word)
     results = self.tracker.query("""
             SELECT ?url WHERE {
               ?u a nfo:TextDocument ;
                   nie:url ?url ;
                   fts:match '%s'.
              }
              """ % (word))
     return [r[0] for r in results]
Пример #23
0
 def search_word (self, word):
     """
     Return list of URIs with the word in them
     """
     log ("Search for: %s" % word)
     results = self.tracker.query ("""
             SELECT ?url WHERE {
               ?u a nfo:TextDocument ;
                   nie:url ?url ;
                   fts:match '%s'.
              }
              """ % (word))
     return [r[0] for r in results]
Пример #24
0
    def test_01_NB217627_content_created_date (self):
        """
        NB#217627 - Order if results is different when an image is marked as favorite.
        """
        query_images = """
          SELECT nie:url(?u) ?contentCreated WHERE {
              ?u a nfo:Visual ;
              nfo:fileLastModified ?contentCreated
          } ORDER BY ?contentCreated
          """
        results = self.tracker.query (query_images)
        self.assertEquals (len (results), 3, results)

        log ("Waiting 2 seconds to ensure there is a noticiable difference in the timestamp")
        time.sleep (2)

        url = self.get_test_filename_jpeg ()

        filename = url[len('file://'):]
        initial_mtime = os.stat(filename).st_mtime

        # This triggers the writeback
        mark_as_favorite = """
         INSERT {
           ?u nao:hasTag nao:predefined-tag-favorite .
         } WHERE {
           ?u nie:url <%s> .
         }
        """ % url
        self.tracker.update (mark_as_favorite)
        log ("Setting favorite in <%s>" % url)

        self.wait_for_file_change (filename, initial_mtime)

        # Check the value is written in the file
        metadata = get_tracker_extract_output (filename, "")
        self.assertIn (self.favorite, metadata ["nao:hasTag"],
                       "Tag hasn't been written in the file")
        
        # Now check the modification date of the files and it should be the same :)
        new_results = self.tracker.query (query_images)
        ## for (uri, date) in new_results:
        ##     print "Checking dates of <%s>" % uri
        ##     previous_date = convenience_dict[uri]
        ##     print "Before: %s \nAfter : %s" % (previous_date, date)
        ##     self.assertEquals (date, previous_date, "File <%s> has change its contentCreated date!" % uri)

        # Indeed the order of the results should be the same
        for i in range (0, len (results)):
            self.assertEquals (results[i][0], new_results[i][0], "Order of the files is different")
            self.assertEquals (results[i][1], new_results[i][1], "Date has change in file <%s>" % results[i][0])
Пример #25
0
def get_tracker_extract_output(filename):
    """
    Runs `tracker-extract --file` to extract metadata from a file.
    """

    tracker_extract = os.path.join (cfg.EXEC_PREFIX, 'tracker-extract')
    command = [tracker_extract, '--file', filename]

    try:
        log ('Running: %s' % ' '.join(command))
        output = subprocess.check_output (command)
    except subprocess.CalledProcessError as e:
        raise Exception("Error %i from tracker-extract, output: %s" %
                        (e.returncode, e.output))

    parser = ExtractorParser()
    return parser.parse_tracker_extract_output(output)
Пример #26
0
def get_tracker_extract_output(filename):
    """
    Runs `tracker-extract --file` to extract metadata from a file.
    """

    tracker_extract = os.path.join(cfg.EXEC_PREFIX, 'tracker-extract')
    command = [tracker_extract, '--file', filename]

    try:
        log('Running: %s' % ' '.join(command))
        output = subprocess.check_output(command)
    except subprocess.CalledProcessError as e:
        raise Exception("Error %i from tracker-extract, output: %s" %
                        (e.returncode, e.output))

    parser = ExtractorParser()
    return parser.parse_tracker_extract_output(output)
    def await_resource_deleted (self, id, fail_message = None):
        """
        Block until we are notified of a resources deletion
        """
        assert (self.deletes_match_function == None)

        def match_cb (deletes_list, in_main_loop = True):
            matched = False
            filtered_list = []

            #print "Looking for %i in " % id, deletes_list, "\n"

            for delete in deletes_list:
                if delete[1] == id:
                    matched = True
                else:
                    filtered_list += [delete]

            if matched and in_main_loop:
                glib.source_remove (self.graph_updated_timeout_id)
                self.graph_updated_timeout_id = 0
                self.deletes_match_function = None

            self.store.loop.quit ()

            return (matched, filtered_list)

        log ("Await deletion of %i (%i existing)" % (id, len (self.deletes_list)))

        (existing_match, self.deletes_list) = match_cb (self.deletes_list, False)

        if not existing_match:
            self.graph_updated_timeout_id = glib.timeout_add_seconds (REASONABLE_TIMEOUT, self._timeout_cb)
            self.deletes_match_function = match_cb

            # Run the event loop until the correct notification arrives
            self.store.loop.run ()

        if self.match_timed_out:
            if fail_message is not None:
                self.fail (fail_message)
            else:
                self.fail ("Resource %i has not been deleted." % id)

        return
Пример #28
0
    def test_02_camera_video_geolocation (self):
        """
        Camera simulation:

        1. Create resource in the store for the new file
        2. Set nlo:location
        2. Write the file
        3. Wait for miner-fs to index it
        4. Ensure no duplicates are found
        """

        fileurn = "tracker://test_camera_video_02/" + str(random.randint (0,100))
        origin_filepath = os.path.join (self.get_data_dir (), self.get_test_video ())
        dest_filepath = os.path.join (self.get_dest_dir (), self.get_test_video ())
        dest_fileuri = "file://" + dest_filepath

        geolocationurn = "tracker://test_camera_video_02_geolocation/" + str(random.randint (0,100))
        postaladdressurn = "tracker://test_camera_video_02_postaladdress/" + str(random.randint (0,100))

        self.insert_video_resource_info (fileurn, dest_fileuri)

        # FIRST, open the file for writing, and just write some garbage, to simulate that
        # we already started recording the video...
        fdest = open (dest_filepath, 'wb')
        fdest.write ("some garbage written here")
        fdest.write ("to simulate we're recording something...")
        fdest.seek (0)

        # SECOND, set slo:location
        self.insert_dummy_location_info (fileurn, geolocationurn, postaladdressurn)

        #THIRD, start copying the image to the dest path
        self.slowcopy_file_fd (origin_filepath, fdest)
        fdest.close ()
        assert os.path.exists (dest_filepath)

        # FOURTH, ensure we have only 1 resource
        dest_id, dest_urn = self.system.store.await_resource_inserted ('nmm:Video', dest_fileuri)
        self.assertEquals (self.get_urn_count_by_url (dest_fileuri), 1)

        # Clean the new file so the test directory is as before
        log ("Remove and wait")
        os.remove (dest_filepath)
        self.system.store.await_resource_deleted (dest_id)
        self.assertEquals (self.get_urn_count_by_url (dest_fileuri), 0)
Пример #29
0
    def __prepare_directories (self):
        if (os.path.exists (os.getcwd() + "/test-writeback-data")):
            # Use local directory if available
            datadir = os.getcwd() + "/test-writeback-data"
        else:
            datadir = os.path.join (cfg.DATADIR, "tracker-tests",
                                    "test-writeback-data")

        if not os.path.exists(WRITEBACK_TMP_DIR):
            os.makedirs(WRITEBACK_TMP_DIR)
        else:
            if not os.path.isdir(WRITEBACK_TMP_DIR):
                raise Exception("%s exists already and is not a directory" % WRITEBACK_TMP_DIR)

        for testfile in [TEST_FILE_JPEG, TEST_FILE_PNG,TEST_FILE_TIFF]:
            origin = os.path.join (datadir, testfile)
            log ("Copying %s -> %s" % (origin, WRITEBACK_TMP_DIR))
            shutil.copy (origin, WRITEBACK_TMP_DIR)
    def setUpClass (self):
        log ("Using %s as temp dir\n" % MINER_TMP_DIR)
        if os.path.exists (MINER_TMP_DIR):
            shutil.rmtree (MINER_TMP_DIR)
        os.makedirs (MINER_TMP_DIR)

        self.system = TrackerSystemAbstraction ()
        self.system.set_up_environment (CONF_OPTIONS, None)
        self.store = StoreHelper ()
        self.store.start ()
        self.miner_fs = MinerFsHelper ()
        self.miner_fs.start ()

        # GraphUpdated seems to not be emitted if the extractor isn't running
        # even though the file resource still gets inserted - maybe because
        # INSERT SILENT is used in the FS miner?
        self.extractor = ExtractorHelper ()
        self.extractor.start ()
Пример #31
0
    def reset(self):
        """
        Remove all stored values, resetting configuration to the default.

        This can be done by removing the entire 'trackertest' configuration
        database.
        """

        self._check_using_correct_dconf_profile()

        # XDG_CONFIG_HOME is useless, so we use HOME. This code should not be
        # needed unless for some reason the test is not being run via the
        # 'test-runner.sh' script.
        dconf_db = os.path.join(os.environ["HOME"], ".config", "dconf",
                                "trackertest")
        if os.path.exists(dconf_db):
            log("[Conf] Removing dconf database: " + dconf_db)
            os.remove(dconf_db)
Пример #32
0
    def setUpClass(self):
        log("Using %s as temp dir\n" % MINER_TMP_DIR)
        if os.path.exists(MINER_TMP_DIR):
            shutil.rmtree(MINER_TMP_DIR)
        os.makedirs(MINER_TMP_DIR)

        self.system = TrackerSystemAbstraction()
        self.system.set_up_environment(CONF_OPTIONS, None)
        self.store = StoreHelper()
        self.store.start()

        # GraphUpdated seems to not be emitted if the extractor isn't running
        # even though the file resource still gets inserted - maybe because
        # INSERT SILENT is used in the FS miner?
        self.extractor = ExtractorHelper()
        self.extractor.start()

        self.miner_fs = MinerFsHelper()
        self.miner_fs.start()
Пример #33
0
def get_tracker_extract_jsonld_output(filename, mime_type=None):
    """
    Runs `tracker-extract --file` to extract metadata from a file.
    """

    tracker_extract = os.path.join(cfg.TRACKER_EXTRACT_PATH)
    command = [
        tracker_extract, '--verbosity=0', '--output-format=json-ld', '--file',
        filename
    ]
    if mime_type is not None:
        command.extend(['--mime', mime_type])

    # We depend on parsing the output, so verbosity MUST be 0.
    env = os.environ.copy()
    env['TRACKER_VERBOSITY'] = '0'

    log('Running: %s' % ' '.join(command))
    p = subprocess.Popen(command,
                         env=env,
                         stdout=subprocess.PIPE,
                         stderr=subprocess.PIPE)
    stdout, stderr = p.communicate()

    if p.returncode != 0:
        raise RuntimeError(
            "tracker-extract returned non-zero exit code: %s\n"
            "Error output:\n%s\n" %
            (p.returncode, stderr.decode('unicode-escape').strip()))

    if len(stderr) > 0:
        log("Error output from tracker-extract:\n%s" %
            stderr.decode('unicode-escape').strip())

    try:
        output = stdout.decode('utf-8')
        data = json.loads(output)
    except ValueError as e:
        raise RuntimeError(
            "tracker-extract did not return valid JSON data: %s\n"
            "Output was: %s" % (e, output))

    return data
Пример #34
0
    def __prepare_directories(self):
        if (os.path.exists(os.getcwd() + "/test-writeback-data")):
            # Use local directory if available
            datadir = os.getcwd() + "/test-writeback-data"
        else:
            datadir = os.path.join(cfg.DATADIR, "tracker-tests",
                                   "test-writeback-data")

        if not os.path.exists(WRITEBACK_TMP_DIR):
            os.makedirs(WRITEBACK_TMP_DIR)
        else:
            if not os.path.isdir(WRITEBACK_TMP_DIR):
                raise Exception("%s exists already and is not a directory" %
                                WRITEBACK_TMP_DIR)

        for testfile in [TEST_FILE_JPEG, TEST_FILE_PNG, TEST_FILE_TIFF]:
            origin = os.path.join(datadir, testfile)
            log("Copying %s -> %s" % (origin, WRITEBACK_TMP_DIR))
            shutil.copy(origin, WRITEBACK_TMP_DIR)
Пример #35
0
    def setUp(self):
        # Create temp directory to monitor
        if (os.path.exists(APPLICATIONS_TMP_DIR)):
            shutil.rmtree(APPLICATIONS_TMP_DIR)
        os.makedirs(APPLICATIONS_TMP_DIR)

        # Use local directory if available. Installation otherwise.
        if os.path.exists(os.path.join(os.getcwd(), "test-apps-data")):
            self.datadir = os.path.join(os.getcwd(), "test-apps-data")
        else:
            self.datadir = os.path.join(cfg.DATADIR, "tracker-tests",
                                        "test-apps-data")

        self.system = TrackerSystemAbstraction()
        self.system.tracker_all_testing_start(CONF_OPTIONS)

        # Returns when ready
        self.tracker = self.system.store

        log("Ready to go!")
Пример #36
0
def create_test_flac(path, duration, timeout=10):
    """
    Create a .flac audio file for testing purposes.

    FLAC audio doesn't compress test data particularly efficiently, so
    committing an audio file more than a few seconds long to Git is not
    practical. This function creates a .flac file containing a test tone.
    The 'duration' parameter sets the length in seconds of the time.

    The function is guaranteed to return or raise an exception within the
    number of seconds given in the 'timeout' parameter.
    """

    Gst.init([])

    num_buffers = math.ceil(duration * 44100 / 1024.0)

    pipeline_src = ' ! '.join([
        'audiotestsrc num-buffers=%s samplesperbuffer=1024' % num_buffers,
        'capsfilter caps="audio/x-raw,rate=44100"',
        'flacenc',
        'filesink location=%s' % path,
    ])

    log("Running pipeline: %s" % pipeline_src)
    pipeline = Gst.parse_launch(pipeline_src)
    ret = pipeline.set_state(Gst.State.PLAYING)

    msg = pipeline.get_bus().poll(Gst.MessageType.ERROR | Gst.MessageType.EOS,
                                  timeout * Gst.SECOND)
    if msg and msg.type == Gst.MessageType.EOS:
        pass
    elif msg and msg.type == Gst.MessageType.ERROR:
        raise RuntimeError(msg.parse_error())
    elif msg:
        raise RuntimeError("Got unexpected GStreamer message %s" % msg.type)
    else:
        raise RuntimeError(
            "Timeout generating test audio file after %i seconds" % timeout)

    pipeline.set_state(Gst.State.NULL)
Пример #37
0
    def setUpClass (self):
        #print "Starting the daemon in test mode"
        self.__prepare_directories ()
        
        self.system = TrackerSystemAbstraction ()

        self.system.tracker_writeback_testing_start (CONF_OPTIONS)

        def await_resource_extraction(url):
            # Make sure a resource has been crawled by the FS miner and by
            # tracker-extract. The extractor adds nie:contentCreated for
            # image resources, so know once this property is set the
            # extraction is complete.
            self.system.store.await_resource_inserted('nfo:Image', url=url, required_property='nfo:width')

        await_resource_extraction (self.get_test_filename_jpeg())
        await_resource_extraction (self.get_test_filename_tiff())
        await_resource_extraction (self.get_test_filename_png())

        # Returns when ready
        log ("Ready to go!")
Пример #38
0
    def set_up_environment(self, settings=None, ontodir=None):
        """
        Sets up the XDG_*_HOME variables and make sure the directories exist

        Settings should be a dict mapping schema names to dicts that hold the
        settings that should be changed in those schemas. The contents dicts
        should map key->value, where key is a key name and value is a suitable
        GLib.Variant instance.
        """

        for var, directory in TEST_ENV_DIRS.iteritems():
            helpers.log("export %s=%s" % (var, directory))
            self.__recreate_directory(directory)
            os.environ[var] = directory

        for directory in EXTRA_DIRS:
            self.__recreate_directory(directory)

        if ontodir:
            helpers.log("export %s=%s" %
                        ("TRACKER_DB_ONTOLOGIES_DIR", ontodir))
            os.environ["TRACKER_DB_ONTOLOGIES_DIR"] = ontodir

        for var, value in TEST_ENV_VARS.iteritems():
            helpers.log("export %s=%s" % (var, value))
            os.environ[var] = value

        # Previous loop should have set DCONF_PROFILE to the test location
        if settings is not None:
            self._apply_settings(settings)
Пример #39
0
    def setUpClass(self):
        #print "Starting the daemon in test mode"
        self.__prepare_directories()

        self.system = TrackerSystemAbstraction()

        self.system.tracker_writeback_testing_start(CONF_OPTIONS)

        def await_resource_extraction(url):
            # Make sure a resource has been crawled by the FS miner and by
            # tracker-extract. The extractor adds nie:contentCreated for
            # image resources, so know once this property is set the
            # extraction is complete.
            self.system.store.await_resource_inserted(
                NFO_IMAGE, url=url, required_property='nfo:width')

        await_resource_extraction(self.get_test_filename_jpeg())
        await_resource_extraction(self.get_test_filename_tiff())
        await_resource_extraction(self.get_test_filename_png())

        # Returns when ready
        log("Ready to go!")
Пример #40
0
    def prepare_test_file(self, path, expect_mime_type, expect_property):
        """Copies a file into the test working directory.

        The function waits until the file has been seen by the Tracker
        miner before returning.

        """
        log("Copying %s -> %s" % (path, self.workdir))
        shutil.copy(path, self.workdir)

        output_path = pathlib.Path(
            os.path.join(self.workdir, os.path.basename(path)))

        # Make sure a resource has been crawled by the FS miner and by
        # tracker-extract. The extractor adds nie:contentCreated for
        # image resources, so know once this property is set the
        # extraction is complete.
        self.system.store.await_resource_inserted(
            expect_mime_type,
            url=output_path.as_uri(),
            required_property=expect_property)
        return output_path
Пример #41
0
    def __prepare_directories(self):
        #
        #     ~/test-writeback-monitored/
        #

        for d in ["test-writeback-monitored"]:
            directory = os.path.join(WRITEBACK_TMP_DIR, d)
            if (os.path.exists(directory)):
                shutil.rmtree(directory)
            os.makedirs(directory)

        if (os.path.exists(os.getcwd() + "/test-writeback-data")):
            # Use local directory if available
            datadir = os.getcwd() + "/test-writeback-data"
        else:
            datadir = os.path.join(cfg.DATADIR, "tracker-tests",
                                   "test-writeback-data")

        for testfile in [TEST_FILE_JPEG, TEST_FILE_PNG, TEST_FILE_TIFF]:
            origin = os.path.join(datadir, testfile)
            log("Copying %s -> %s" % (origin, WRITEBACK_TMP_DIR))
            shutil.copy(origin, WRITEBACK_TMP_DIR)
            time.sleep(2)
Пример #42
0
    def test_02_copy_from_unmonitored_to_monitored (self):
        """
        Copy an file from unmonitored directory to monitored directory
        and verify if data base is updated accordingly
        """
        source = os.path.join (MINER_TMP_DIR, "test-no-monitored", "file0.txt")
        dest = os.path.join (MINER_TMP_DIR, "test-monitored", "file0.txt")
        shutil.copyfile (source, dest)
        self.system.tracker_miner_fs_wait_for_idle ()

        # verify if miner indexed this file.
        result = self.__get_text_documents ()
        self.assertEquals (len (result), 4)
        unpacked_result = [ r[0] for r in result]
        self.assertIn ( uri ("test-monitored/file1.txt"), unpacked_result)
        self.assertIn ( uri ("test-monitored/dir1/file2.txt"), unpacked_result)
        self.assertIn ( uri ("test-monitored/dir1/dir2/file3.txt"), unpacked_result)
        self.assertIn ( uri ("test-monitored/file0.txt"), unpacked_result)

        # Clean the new file so the test directory is as before
        log ("Remove and wait")
        os.remove (dest)
        self.system.tracker_miner_fs_wait_for_idle ()
Пример #43
0
    def setUp (self):
        # Create temp directory to monitor
        if (os.path.exists (APPLICATIONS_TMP_DIR)):
            shutil.rmtree (APPLICATIONS_TMP_DIR)
        os.makedirs (APPLICATIONS_TMP_DIR)

        # Use local directory if available. Installation otherwise.
        if os.path.exists (os.path.join (os.getcwd (),
                                         "test-apps-data")):
            self.datadir = os.path.join (os.getcwd (),
                                         "test-apps-data")
        else:
            self.datadir = os.path.join (cfg.DATADIR,
                                         "tracker-tests",
                                         "test-apps-data")


        self.system = TrackerSystemAbstraction ()
        self.system.tracker_all_testing_start (CONF_OPTIONS)

        # Returns when ready
        self.tracker = self.system.store

        log ("Ready to go!")
Пример #44
0
    def __prepare_directories (self):
        #
        #     ~/test-writeback-monitored/
        #
        
        for d in ["test-writeback-monitored"]:
            directory = os.path.join (WRITEBACK_TMP_DIR, d)
            if (os.path.exists (directory)):
                shutil.rmtree (directory)
            os.makedirs (directory)


        if (os.path.exists (os.getcwd() + "/test-writeback-data")):
            # Use local directory if available
            datadir = os.getcwd() + "/test-writeback-data"
        else:
            datadir = os.path.join (cfg.DATADIR, "tracker-tests",
                                    "test-writeback-data")

        for testfile in [TEST_FILE_JPEG, TEST_FILE_PNG,TEST_FILE_TIFF]:
            origin = os.path.join (datadir, testfile)
            log ("Copying %s -> %s" % (origin, WRITEBACK_TMP_DIR))
            shutil.copy (origin, WRITEBACK_TMP_DIR)
            time.sleep (2)
Пример #45
0
 def tearDownClass(self):
     #print "Stopping the daemon in test mode (Doing nothing now)"
     self.system.tracker_writeback_testing_stop()
     log("Test finished")
Пример #46
0
    def test_02_camera_picture_geolocation (self):
        """
        Camera simulation:

        1. Create resource in the store for the new file
        2. Set nlo:location
        2. Write the file
        3. Wait for miner-fs to index it
        4. Ensure no duplicates are found
        """

        fileurn = "tracker://test_camera_picture_02/" + str(random.randint (0,100))
        dest_filepath = os.path.join (self.get_dest_dir (), self.get_test_image ())
        dest_fileuri = "file://" + dest_filepath

        geolocationurn = "tracker://test_camera_picture_02_geolocation/" + str(random.randint (0,100))
        postaladdressurn = "tracker://test_camera_picture_02_postaladdress/" + str(random.randint (0,100))

        # Insert new resource in the store, including nie:mimeType and nie:url
        insert = """
        INSERT { <%s> a nie:InformationElement,
                        nie:DataObject,
                        nfo:Image,
                        nfo:Media,
                        nfo:Visual,
                        nmm:Photo
        }

        DELETE { <%s> nie:mimeType ?_1 }
        WHERE { <%s> nie:mimeType ?_1 }

        INSERT { <%s> a            rdfs:Resource ;
                      nie:mimeType \"image/jpeg\"
        }

        DELETE { <%s> nie:url ?_2 }
        WHERE { <%s> nie:url ?_2 }

        INSERT { <%s> a       rdfs:Resource ;
                      nie:url \"%s\"
        }
        """ % (fileurn, fileurn, fileurn, fileurn, fileurn, fileurn, fileurn, dest_fileuri)
        self.tracker.update (insert)
        self.assertEquals (self.get_urn_count_by_url (dest_fileuri), 1)

        # FIRST, open the file for writing, and just write some garbage, to simulate that
        # we already started recording the video...
        fdest = open (dest_filepath, 'wb')
        fdest.write ("some garbage written here")
        fdest.write ("to simulate we're recording something...")
        fdest.seek (0)

        # SECOND, set slo:location
        location_insert = """
        INSERT { <%s> a             nco:PostalAddress ;
                      nco:country  \"SPAIN\" ;
                      nco:locality \"Tres Cantos\"
        }

        INSERT { <%s> a                 slo:GeoLocation ;
                      slo:postalAddress <%s>
        }

        INSERT { <%s> a            rdfs:Resource ;
                      slo:location <%s>
        }
        """ % (postaladdressurn, geolocationurn, postaladdressurn, fileurn, geolocationurn)
        self.tracker.update (location_insert)

        #THIRD, start copying the image to the dest path
        original_file = os.path.join (self.get_data_dir (),self.get_test_image ())
        self.slowcopy_file_fd (original_file, fdest)
        fdest.close ()
        assert os.path.exists (dest_filepath)

        # FOURTH, ensure we have only 1 resource
        self.system.tracker_miner_fs_wait_for_idle (MINER_FS_IDLE_TIMEOUT)
        self.assertEquals (self.get_urn_count_by_url (dest_fileuri), 1)

        # Clean the new file so the test directory is as before
        log ("Remove and wait")
        os.remove (dest_filepath)
        self.system.tracker_miner_fs_wait_for_idle (MINER_FS_IDLE_TIMEOUT)
        self.assertEquals (self.get_urn_count_by_url (dest_fileuri), 0)
Пример #47
0
 def remove_test_data(self):
     try:
         shutil.rmtree(os.path.join(self.workdir, 'test-monitored'))
         shutil.rmtree(os.path.join(self.workdir, 'test-no-monitored'))
     except Exception as e:
         log("Failed to remove temporary data dir: %s" % e)
    def await_resource_inserted (self, rdf_class, url = None, title = None):
        """
        Block until a resource matching the parameters becomes available
        """
        assert (self.inserts_match_function == None)

        def match_cb (inserts_list, in_main_loop = True):
            matched = False
            filtered_list = []
            known_subjects = set ()

            #print "Got inserts: ", inserts_list, "\n"

            # FIXME: this could be done in an easier way: build one query that filters
            # based on every subject id in inserts_list, and returns the id of the one
            # that matched :)
            for insert in inserts_list:
                id = insert[1]

                if not matched and id not in known_subjects:
                    known_subjects.add (id)

                    where = "  ?urn a %s " % rdf_class

                    if url is not None:
                        where += "; nie:url \"%s\"" % url

                    if title is not None:
                        where += "; nie:title \"%s\"" % title

                    query = "SELECT ?urn WHERE { %s FILTER (tracker:id(?urn) = %s)}" % (where, insert[1])
                    #print "%s\n" % query
                    result_set = self.store.query (query)
                    #print result_set, "\n\n"

                    if len (result_set) > 0:
                        matched = True
                        self.matched_resource_urn = result_set[0][0]
                        self.matched_resource_id = insert[1]

                if not matched or id != self.matched_resource_id:
                    filtered_list += [insert]

            if matched and in_main_loop:
                glib.source_remove (self.graph_updated_timeout_id)
                self.graph_updated_timeout_id = 0
                self.inserts_match_function = None
                self.store.loop.quit ()

            return (matched, filtered_list)


        self.matched_resource_urn = None
        self.matched_resource_id = None

        log ("Await new %s (%i existing inserts)" % (rdf_class, len (self.inserts_list)))

        # Check the list of previously received events for matches
        (existing_match, self.inserts_list) = match_cb (self.inserts_list, False)

        if not existing_match:
            self.graph_updated_timeout_id = glib.timeout_add_seconds (REASONABLE_TIMEOUT, self._timeout_cb)
            self.inserts_match_function = match_cb

            # Run the event loop until the correct notification arrives
            self.store.loop.run ()

        if self.match_timed_out:
            self.fail ("Timeout waiting for resource: class %s, URL %s, title %s" % (rdf_class, url, title))

        return (self.matched_resource_id, self.matched_resource_urn)
Пример #49
0
    def test_01_sync_audio_nb219946 (self):
        """
        Sync simulation (after fix for NB#219946):

        1. Create resource in the store for the new file, using blank nodes
        2. Write the file
        3. Wait for miner-fs to index it
        4. Ensure no duplicates are found

        During stage 3 you should see the following error from the FS miner, if
        viewing its logs:

            (tracker-miner-fs:16008): Tracker-CRITICAL **:   (Sparql buffer)
                Error in task 0 of the array-update: UNIQUE constraint failed:
                nie:DataObject.nie:url (strerror of errno ...)

            (tracker-miner-fs:16008): Tracker-CRITICAL **: Could not execute
                sparql: UNIQUE constraint failed: nie:DataObject.nie:url
                (strerror of errno ...)

        This is because the test already inserted the resource in the store.
        """

        origin_filepath = os.path.join (self.get_data_dir (), self.get_test_music ())
        dest_filepath = os.path.join (self.get_dest_dir (), self.get_test_music ())
        dest_fileuri = "file://" + dest_filepath

        log ("Synchronizing audio file in '%s'..." % (dest_filepath))

        # Insert new resource in the store
        insert = """
        DELETE { ?file a rdfs:Resource }
        WHERE  { ?file nie:url '%s'}

        INSERT { _:x a                       nie:DataObject,
                                             nmm:MusicPiece,
                                             nfo:Media,
                                             nfo:Audio,
                                             nie:InformationElement ;
                     nie:url                 '%s' ;
                     nmm:musicAlbum          <urn:album:SinCos> ;
                     nfo:duration            '15' ;
                     nmm:performer           <urn:artist:AbBaby> ;
                     nmm:trackNumber         '13' ;
                     nfo:averageAudioBitrate '32000' ;
                     nfo:genre               'Pop' ;
                     nfo:isContentEncrypted  'false' ;
                     nie:title               'Simply Juvenile' ;
                     nie:isStoredAs          _:x
        }

        INSERT { <urn:album:SinCos> a              nmm:MusicAlbum;
                                    nmm:albumTitle 'SinCos'
        }

        INSERT { <urn:artist:AbBaby> a              nmm:Artist;
                                     nmm:artistName 'AbBaby'
        }
        """ % (dest_fileuri, dest_fileuri)
        self.tracker.update (insert)
        self.assertEquals (self.get_urn_count_by_url (dest_fileuri), 1)

        resource_id = self.tracker.get_resource_id(dest_fileuri)

        # Copy the image to the dest path
        self.slowcopy_file (origin_filepath, dest_filepath)
        assert os.path.exists (dest_filepath)
        self.tracker.await_resource_inserted ('nmm:MusicPiece', url=dest_fileuri)

        self.assertEquals (self.get_urn_count_by_url (dest_fileuri), 1)

        # Clean the new file so the test directory is as before
        log ("Remove and wait")
        os.remove (dest_filepath)
        self.tracker.await_resource_deleted (resource_id)
        self.assertEquals (self.get_urn_count_by_url (dest_fileuri), 0)