Ejemplo n.º 1
0
    def setUpClass(self):
        log("Using %s as temp dir\n" % MINER_TMP_DIR)
        if os.path.exists(MINER_TMP_DIR):
            shutil.rmtree(MINER_TMP_DIR)
        os.makedirs(MINER_TMP_DIR)

        self.system = TrackerSystemAbstraction()
        self.system.set_up_environment(CONF_OPTIONS, None)
        self.store = StoreHelper()
        self.store.start()

        # GraphUpdated seems to not be emitted if the extractor isn't running
        # even though the file resource still gets inserted - maybe because
        # INSERT SILENT is used in the FS miner?
        self.extractor = ExtractorHelper()
        self.extractor.start()

        self.miner_fs = MinerFsHelper()
        self.miner_fs.start()
    def setUpClass (self):
        log ("Using %s as temp dir\n" % MINER_TMP_DIR)
        if os.path.exists (MINER_TMP_DIR):
            shutil.rmtree (MINER_TMP_DIR)
        os.makedirs (MINER_TMP_DIR)

        self.system = TrackerSystemAbstraction ()
        self.system.set_up_environment (CONF_OPTIONS, None)
        self.store = StoreHelper ()
        self.store.start ()
        self.miner_fs = MinerFsHelper ()
        self.miner_fs.start ()

        # GraphUpdated seems to not be emitted if the extractor isn't running
        # even though the file resource still gets inserted - maybe because
        # INSERT SILENT is used in the FS miner?
        self.extractor = ExtractorHelper ()
        self.extractor.start ()
class MinerResourceRemovalTest (ut.TestCase):

    # Use the same instances of store and miner-fs for the whole test suite,
    # because they take so long to do first-time init.
    @classmethod
    def setUpClass (self):
        log ("Using %s as temp dir\n" % MINER_TMP_DIR)
        if os.path.exists (MINER_TMP_DIR):
            shutil.rmtree (MINER_TMP_DIR)
        os.makedirs (MINER_TMP_DIR)

        self.system = TrackerSystemAbstraction ()
        self.system.set_up_environment (CONF_OPTIONS, None)
        self.store = StoreHelper ()
        self.store.start ()

        # GraphUpdated seems to not be emitted if the extractor isn't running
        # even though the file resource still gets inserted - maybe because
        # INSERT SILENT is used in the FS miner?
        self.extractor = ExtractorHelper ()
        self.extractor.start ()

        self.miner_fs = MinerFsHelper ()
        self.miner_fs.start ()

    @classmethod
    def tearDownClass (self):
        self.miner_fs.stop ()
        self.extractor.stop ()
        self.store.stop ()

    def setUp (self):
        self.store.reset_graph_updates_tracking ()

    def tearDown (self):
        self.system.unset_up_environment ()

    def create_test_content (self, file_urn, title):
        sparql = "INSERT { \
                    _:ie a nmm:MusicPiece ; \
                         nie:title \"%s\" ; \
                         nie:isStoredAs <%s> \
                  } " % (title, file_urn)

        self.store.update (sparql)

        return self.store.await_resource_inserted (rdf_class = 'nmm:MusicPiece',
                                                   title = title)

    def create_test_file (self, file_name):
        file_path = get_test_path (file_name)

        file = open (file_path, 'w')
        file.write ("Test")
        file.close ()

        return self.store.await_resource_inserted (rdf_class = 'nfo:Document',
                                                   url = get_test_uri (file_name));

    def assertResourceExists (self, urn):
        if self.store.ask ("ASK { <%s> a rdfs:Resource }" % urn) == False:
            self.fail ("Resource <%s> does not exist" % urn)

    def assertResourceMissing (self, urn):
        if self.store.ask ("ASK { <%s> a rdfs:Resource }" % urn) == True:
            self.fail ("Resource <%s> should not exist" % urn)


    def test_01_file_deletion (self):
        """
        Ensure every logical resource (nie:InformationElement) contained with
        in a file is deleted when the file is deleted.
        """

        (file_1_id, file_1_urn) = self.create_test_file ("test_1.txt")
        (file_2_id, file_2_urn) = self.create_test_file ("test_2.txt")
        (ie_1_id, ie_1_urn) = self.create_test_content (file_1_urn, "Test resource 1")
        (ie_2_id, ie_2_urn) = self.create_test_content (file_2_urn, "Test resource 2")

        os.unlink (get_test_path ("test_1.txt"))

        self.store.await_resource_deleted (file_1_id)
        self.store.await_resource_deleted (ie_1_id,
                                           "Associated logical resource failed to be deleted " \
                                           "when its containing file was removed.")

        self.assertResourceMissing (file_1_urn)
        self.assertResourceMissing (ie_1_urn)
        self.assertResourceExists (file_2_urn)
        self.assertResourceExists (ie_2_urn)

    def test_02_removable_device_data (self):
        """
Ejemplo n.º 4
0
 def setUp(self):
     self.extractor = ExtractorHelper()
     self.extractor.start()
Ejemplo n.º 5
0
class ExtractionTestCase(ut.TestCase):
    """
    Test checks if the tracker extractor is able to retrieve metadata
    """
    def __init__(self, methodName='runTest', descfile=None):
        """
        Descfile is the description file in a relative path
        """
        ut.TestCase.__init__(self, methodName)

        # Load the description file
        assert descfile
        self.rel_description = descfile
        self.configParser = self.__load_description_file(self.rel_description)

        # Add a method to the class called after the description file
        methodName = self.rel_description.lower()[:-len(".expected")].replace(
            " ", "_")[-60:]

        if (self.__is_expected_failure()):
            setattr(self, methodName, self.expected_failure_test_extraction)
        else:
            setattr(self, methodName, self.generic_test_extraction)

        # unittest framework will run the test called "self._testMethodName"
        # So we set that variable to our new name
        self._testMethodName = methodName

    def runTest(self):
        """
        Empty function pointer, that should NEVER be called. It is required to exist by unittest.
        """
        assert False

    def __load_description_file(self, descfile):
        configParser = ConfigParser.RawConfigParser()
        # Make it case sensitive:
        configParser.optionxform = str

        abs_description = os.path.abspath(descfile)
        loaded_files = configParser.read(abs_description)
        if not abs_description in loaded_files:
            raise Exception("Unable to load %s" % (abs_description))

        return configParser

    def __is_expected_failure(self):
        assert self.configParser
        return self.configParser.has_option("TestFile", "ExpectedFailure")

    def __get_bugnumber(self):
        assert self.configParser
        if self.configParser.has_option("TestFile", "Bugzilla"):
            return "'" + self.configParser.get("TestFile", "Bugzilla") + "'"
        else:
            return None

    def setUp(self):
        self.extractor = ExtractorHelper()
        self.extractor.start()

    def tearDown(self):
        self.extractor.stop()

    def expected_failure_test_extraction(self):
        try:
            self.generic_test_extraction()
        except Exception:
            raise ut.case._ExpectedFailure(sys.exc_info())

        if self.__get_bugnumber():
            raise Exception("Unexpected success. Maybe bug: " +
                            self.__get_bugnumber() + " has been fixed?")
        else:
            raise Exception("Unexpected success. Check " +
                            self.rel_description)

    def generic_test_extraction(self):
        abs_description = os.path.abspath(self.rel_description)

        # Filename contains the file to extract, in a relative path to the description file
        desc_root, desc_file = os.path.split(abs_description)
        self.file_to_extract = ""
        try:
            self.file_to_extract = os.path.join(
                desc_root, self.configParser.get("TestFile", "Filename"))
        except Exception, e:
            self.fail("%s in %s" % (e, abs_description))

        try:
            result = self.extractor.get_metadata(
                "file://" + self.file_to_extract, "")

            self.__assert_extraction_ok(result)
        except NoMetadataException, e:
            self.fail(
                "Probably a missing gstreamer plugin (or crash in the extractor?)"
            )
Ejemplo n.º 6
0
 def setUp (self):
     self.extractor = ExtractorHelper ()
     self.extractor.start ()
Ejemplo n.º 7
0
class ExtractionTestCase (ut.TestCase):
    """
    Test checks if the tracker extractor is able to retrieve metadata
    """
    def __init__ (self, methodName='runTest', descfile=None):
        """
        Descfile is the description file in a relative path
        """
        ut.TestCase.__init__ (self, methodName)

        # Load the description file
        assert descfile
        self.rel_description = descfile
        self.configParser = self.__load_description_file (self.rel_description)

        # Add a method to the class called after the description file
        methodName = self.rel_description.lower()[:-len(".expected")].replace (" ", "_")[-60:]

        if (self.__is_expected_failure ()):
            setattr (self,
                     methodName,
                     self.expected_failure_test_extraction)
        else:
            setattr (self,
                     methodName,
                     self.generic_test_extraction)

        # unittest framework will run the test called "self._testMethodName"
        # So we set that variable to our new name
        self._testMethodName = methodName

    def runTest (self):
        """
        Empty function pointer, that should NEVER be called. It is required to exist by unittest.
        """
        assert False

    def __load_description_file (self, descfile):
        configParser = ConfigParser.RawConfigParser ()
        # Make it case sensitive:
        configParser.optionxform = str

        abs_description = os.path.abspath (descfile)
        loaded_files = configParser.read (abs_description)
        if not abs_description in loaded_files:
            raise Exception("Unable to load %s" % (abs_description))

        return configParser

    def __is_expected_failure (self):
        assert self.configParser
        return self.configParser.has_option ("TestFile", "ExpectedFailure")

    def __get_bugnumber (self):
        assert self.configParser
        if self.configParser.has_option ("TestFile", "Bugzilla"):
            return "'" + self.configParser.get ("TestFile", "Bugzilla") + "'"
        else:
            return None



    def setUp (self):
        self.extractor = ExtractorHelper ()
        self.extractor.start ()

    def tearDown (self):
        self.extractor.stop ()

    def expected_failure_test_extraction (self):
        try:
            self.generic_test_extraction ()
        except Exception:
            raise ut.case._ExpectedFailure(sys.exc_info())

        if self.__get_bugnumber ():
            raise Exception ("Unexpected success. Maybe bug: " + self.__get_bugnumber () + " has been fixed?")
        else:
            raise Exception ("Unexpected success. Check " + self.rel_description)

    def generic_test_extraction (self):
        abs_description = os.path.abspath (self.rel_description)

        # Filename contains the file to extract, in a relative path to the description file
        desc_root, desc_file = os.path.split (abs_description)
        self.file_to_extract = ""
        try:
            self.file_to_extract = os.path.join (desc_root, self.configParser.get ("TestFile", "Filename"))
        except Exception, e:
            self.fail ("%s in %s"
                       % (e, abs_description))

        try:
            result = self.extractor.get_metadata ("file://" + self.file_to_extract, "")

            self.__assert_extraction_ok (result)
        except NoMetadataException, e:
            self.fail ("Probably a missing gstreamer plugin (or crash in the extractor?)")
Ejemplo n.º 8
0
class MinerResourceRemovalTest(ut.TestCase):

    # Use the same instances of store and miner-fs for the whole test suite,
    # because they take so long to do first-time init.
    @classmethod
    def setUpClass(self):
        log("Using %s as temp dir\n" % MINER_TMP_DIR)
        if os.path.exists(MINER_TMP_DIR):
            shutil.rmtree(MINER_TMP_DIR)
        os.makedirs(MINER_TMP_DIR)

        self.system = TrackerSystemAbstraction()
        self.system.set_up_environment(CONF_OPTIONS, None)
        self.store = StoreHelper()
        self.store.start()

        # GraphUpdated seems to not be emitted if the extractor isn't running
        # even though the file resource still gets inserted - maybe because
        # INSERT SILENT is used in the FS miner?
        self.extractor = ExtractorHelper()
        self.extractor.start()

        self.miner_fs = MinerFsHelper()
        self.miner_fs.start()

    @classmethod
    def tearDownClass(self):
        self.miner_fs.stop()
        self.extractor.stop()
        self.store.stop()

    def setUp(self):
        self.store.reset_graph_updates_tracking()

    def tearDown(self):
        self.system.unset_up_environment()

    def create_test_content(self, file_urn, title):
        sparql = "INSERT { \
                    _:ie a nmm:MusicPiece ; \
                         nie:title \"%s\" ; \
                         nie:isStoredAs <%s> \
                  } " % (title, file_urn)

        self.store.update(sparql)

        return self.store.await_resource_inserted(rdf_class='nmm:MusicPiece',
                                                  title=title)

    def create_test_file(self, file_name):
        file_path = get_test_path(file_name)

        file = open(file_path, 'w')
        file.write("Test")
        file.close()

        return self.store.await_resource_inserted(rdf_class='nfo:Document',
                                                  url=get_test_uri(file_name))

    def assertResourceExists(self, urn):
        if self.store.ask("ASK { <%s> a rdfs:Resource }" % urn) == False:
            self.fail("Resource <%s> does not exist" % urn)

    def assertResourceMissing(self, urn):
        if self.store.ask("ASK { <%s> a rdfs:Resource }" % urn) == True:
            self.fail("Resource <%s> should not exist" % urn)

    def test_01_file_deletion(self):
        """
        Ensure every logical resource (nie:InformationElement) contained with
        in a file is deleted when the file is deleted.
        """

        (file_1_id, file_1_urn) = self.create_test_file("test_1.txt")
        (file_2_id, file_2_urn) = self.create_test_file("test_2.txt")
        (ie_1_id, ie_1_urn) = self.create_test_content(file_1_urn,
                                                       "Test resource 1")
        (ie_2_id, ie_2_urn) = self.create_test_content(file_2_urn,
                                                       "Test resource 2")

        os.unlink(get_test_path("test_1.txt"))

        self.store.await_resource_deleted(file_1_id)
        self.store.await_resource_deleted (ie_1_id,
                                           "Associated logical resource failed to be deleted " \
                                           "when its containing file was removed.")

        self.assertResourceMissing(file_1_urn)
        self.assertResourceMissing(ie_1_urn)
        self.assertResourceExists(file_2_urn)
        self.assertResourceExists(ie_2_urn)

    def test_02_removable_device_data(self):
        """
class MinerResourceRemovalTest (ut.TestCase):
    graph_updated_handler_id = 0

    # Use the same instances of store and miner-fs for the whole test suite,
    # because they take so long to do first-time init.
    @classmethod
    def setUpClass (self):
        log ("Using %s as temp dir\n" % MINER_TMP_DIR)
        if os.path.exists (MINER_TMP_DIR):
            shutil.rmtree (MINER_TMP_DIR)
        os.makedirs (MINER_TMP_DIR)

        self.system = TrackerSystemAbstraction ()
        self.system.set_up_environment (CONF_OPTIONS, None)
        self.store = StoreHelper ()
        self.store.start ()
        self.miner_fs = MinerFsHelper ()
        self.miner_fs.start ()

        # GraphUpdated seems to not be emitted if the extractor isn't running
        # even though the file resource still gets inserted - maybe because
        # INSERT SILENT is used in the FS miner?
        self.extractor = ExtractorHelper ()
        self.extractor.start ()

    @classmethod
    def tearDownClass (self):
        self.store.bus._clean_up_signal_match (self.graph_updated_handler_id)
        self.extractor.stop ()
        self.miner_fs.stop ()
        self.store.stop ()

    def setUp (self):
        self.inserts_list = []
        self.deletes_list = []
        self.inserts_match_function = None
        self.deletes_match_function = None
        self.match_timed_out = False

        self.graph_updated_handler_id = self.store.bus.add_signal_receiver (self._graph_updated_cb,
                                                                            signal_name = "GraphUpdated",
                                                                            path = "/org/freedesktop/Tracker1/Resources",
                                                                            dbus_interface = "org.freedesktop.Tracker1.Resources")

    def tearDown (self):
        self.system.unset_up_environment ()

    # A system to follow GraphUpdated and make sure all changes are tracked.
    # This code saves every change notification received, and exposes methods
    # to await insertion or deletion of a certain resource which first check
    # the list of events already received and wait for more if the event has
    # not yet happened.
    #
    # FIXME: put this stuff in StoreHelper
    def _timeout_cb (self):
        self.match_timed_out = True
        self.store.loop.quit ()
        # Don't fail here, exceptions don't get propagated correctly
        # from the GMainLoop

    def _graph_updated_cb (self, class_name, deletes_list, inserts_list):
        """
        Process notifications from tracker-store on resource changes.
        """
        matched = False
        if inserts_list is not None:
            if self.inserts_match_function is not None:
                # The match function will remove matched entries from the list
                (matched, inserts_list) = self.inserts_match_function (inserts_list)
            self.inserts_list += inserts_list

        if deletes_list is not None:
            if self.deletes_match_function is not None:
                (matched, deletes_list) = self.deletes_match_function (deletes_list)
            self.deletes_list += deletes_list

    def await_resource_inserted (self, rdf_class, url = None, title = None):
        """
        Block until a resource matching the parameters becomes available
        """
        assert (self.inserts_match_function == None)

        def match_cb (inserts_list, in_main_loop = True):
            matched = False
            filtered_list = []
            known_subjects = set ()

            #print "Got inserts: ", inserts_list, "\n"

            # FIXME: this could be done in an easier way: build one query that filters
            # based on every subject id in inserts_list, and returns the id of the one
            # that matched :)
            for insert in inserts_list:
                id = insert[1]

                if not matched and id not in known_subjects:
                    known_subjects.add (id)

                    where = "  ?urn a %s " % rdf_class

                    if url is not None:
                        where += "; nie:url \"%s\"" % url

                    if title is not None:
                        where += "; nie:title \"%s\"" % title

                    query = "SELECT ?urn WHERE { %s FILTER (tracker:id(?urn) = %s)}" % (where, insert[1])
                    #print "%s\n" % query
                    result_set = self.store.query (query)
                    #print result_set, "\n\n"

                    if len (result_set) > 0:
                        matched = True
                        self.matched_resource_urn = result_set[0][0]
                        self.matched_resource_id = insert[1]

                if not matched or id != self.matched_resource_id:
                    filtered_list += [insert]

            if matched and in_main_loop:
                glib.source_remove (self.graph_updated_timeout_id)
                self.graph_updated_timeout_id = 0
                self.inserts_match_function = None
                self.store.loop.quit ()

            return (matched, filtered_list)


        self.matched_resource_urn = None
        self.matched_resource_id = None

        log ("Await new %s (%i existing inserts)" % (rdf_class, len (self.inserts_list)))

        # Check the list of previously received events for matches
        (existing_match, self.inserts_list) = match_cb (self.inserts_list, False)

        if not existing_match:
            self.graph_updated_timeout_id = glib.timeout_add_seconds (REASONABLE_TIMEOUT, self._timeout_cb)
            self.inserts_match_function = match_cb

            # Run the event loop until the correct notification arrives
            self.store.loop.run ()

        if self.match_timed_out:
            self.fail ("Timeout waiting for resource: class %s, URL %s, title %s" % (rdf_class, url, title))

        return (self.matched_resource_id, self.matched_resource_urn)


    def await_resource_deleted (self, id, fail_message = None):
        """
        Block until we are notified of a resources deletion
        """
        assert (self.deletes_match_function == None)

        def match_cb (deletes_list, in_main_loop = True):
            matched = False
            filtered_list = []

            #print "Looking for %i in " % id, deletes_list, "\n"

            for delete in deletes_list:
                if delete[1] == id:
                    matched = True
                else:
                    filtered_list += [delete]

            if matched and in_main_loop:
                glib.source_remove (self.graph_updated_timeout_id)
                self.graph_updated_timeout_id = 0
                self.deletes_match_function = None

            self.store.loop.quit ()

            return (matched, filtered_list)

        log ("Await deletion of %i (%i existing)" % (id, len (self.deletes_list)))

        (existing_match, self.deletes_list) = match_cb (self.deletes_list, False)

        if not existing_match:
            self.graph_updated_timeout_id = glib.timeout_add_seconds (REASONABLE_TIMEOUT, self._timeout_cb)
            self.deletes_match_function = match_cb

            # Run the event loop until the correct notification arrives
            self.store.loop.run ()

        if self.match_timed_out:
            if fail_message is not None:
                self.fail (fail_message)
            else:
                self.fail ("Resource %i has not been deleted." % id)

        return


    def create_test_content (self, file_urn, title):
        sparql = "INSERT { \
                    _:ie a nmm:MusicPiece ; \
                         nie:title \"%s\" ; \
                         nie:isStoredAs <%s> \
                  } " % (title, file_urn)

        self.store.update (sparql)

        return self.await_resource_inserted (rdf_class = 'nmm:MusicPiece',
                                             title = title)

    def create_test_file (self, file_name):
        file_path = get_test_path (file_name)

        file = open (file_path, 'w')
        file.write ("Test")
        file.close ()

        return self.await_resource_inserted (rdf_class = 'nfo:Document',
                                             url = get_test_uri (file_name));

    def assertResourceExists (self, urn):
        if self.store.ask ("ASK { <%s> a rdfs:Resource }" % urn) == False:
            self.fail ("Resource <%s> does not exist" % urn)

    def assertResourceMissing (self, urn):
        if self.store.ask ("ASK { <%s> a rdfs:Resource }" % urn) == True:
            self.fail ("Resource <%s> should not exist" % urn)


    def test_01_file_deletion (self):
        """
        Ensure every logical resource (nie:InformationElement) contained with
        in a file is deleted when the file is deleted.
        """

        (file_1_id, file_1_urn) = self.create_test_file ("test_1.txt")
        (file_2_id, file_2_urn) = self.create_test_file ("test_2.txt")
        (ie_1_id, ie_1_urn) = self.create_test_content (file_1_urn, "Test resource 1")
        (ie_2_id, ie_2_urn) = self.create_test_content (file_2_urn, "Test resource 2")

        os.unlink (get_test_path ("test_1.txt"))

        self.await_resource_deleted (file_1_id)
        self.await_resource_deleted (ie_1_id,
                                     "Associated logical resource failed to be deleted " \
                                     "when its containing file was removed.")

        self.assertResourceMissing (file_1_urn)
        self.assertResourceMissing (ie_1_urn)
        self.assertResourceExists (file_2_urn)
        self.assertResourceExists (ie_2_urn)

    def test_02_removable_device_data (self):
        """