Esempio n. 1
0
class TestFileCacher(TestService):
    """Service that performs automatically some tests for the
    FileCacher service.

    """

    def __init__(self, shard):
        logger.initialize(ServiceCoord("TestFileCacher", shard))
        TestService.__init__(self, shard, custom_logger=logger)

        # Assume we store the cache in "./cache/fs-cache-TestFileCacher-0/"
        self.cache_base_path = os.path.join(config.cache_dir,
                                            "fs-cache-TestFileCacher-0")
        self.cache_path = None
        self.content = None
        self.fake_content = None
        self.digest = None
        self.file_obj = None
        self.file_cacher = FileCacher(self)
        #self.file_cacher = FileCacher(self, path="fs-storage")

    def prepare(self):
        """Initialization for the test code - make sure that the cache
        is empty before testing.

        """
        logger.info("Please delete directory %s before." %
                    self.cache_base_path)

### TEST 000 ###

    def test_000(self):
        """Send a ~100B random binary file to the storage through
        FileCacher as a file-like object. FC should cache the content
        locally.

        """
        self.size = 100
        self.content = "".join(chr(random.randint(0, 255))
                               for unused_i in xrange(self.size))

        logger.info("  I am sending the ~100B binary file to FileCacher")
        try:
            data = self.file_cacher.put_file_from_fobj(StringIO(self.content),
                                                       u"Test #000")
        except Exception as error:
            self.test_end(False, "Error received: %r." % error)
            return

        if not os.path.exists(os.path.join(self.cache_base_path, data)):
            self.test_end(False, "File not stored in local cache.")
        elif open(os.path.join(self.cache_base_path, data), "rb").read() != \
                self.content:
            self.test_end(False, "Local cache's content differ "
                          "from original file.")
        else:
            self.cache_path = os.path.join(self.cache_base_path, data)
            self.digest = data
            self.test_end(True, "Data sent and cached without error.")

### TEST 001 ###

    def test_001(self):
        """Retrieve the file.

        """
        logger.info("  I am retrieving the ~100B binary file from FileCacher")
        self.fake_content = "Fake content.\n"
        with open(self.cache_path, "wb") as cached_file:
            cached_file.write(self.fake_content)
        try:
            data = self.file_cacher.get_file(self.digest)
        except Exception as error:
            self.test_end(False, "Error received: %r." % error)
            return

        received = data.read()
        data.close()
        if received != self.fake_content:
            if received == self.content:
                self.test_end(False,
                              "Did not use the cache even if it could.")
            else:
                self.test_end(False, "Content differ.")
        else:
            self.test_end(True, "Data object received correctly.")

### TEST 002 ###

    def test_002(self):
        """Check the size of the file.

        """
        logger.info("  I am checking the size of the ~100B binary file")
        try:
            size = self.file_cacher.get_size(self.digest)
        except Exception as error:
            self.test_end(False, "Error received: %r." % error)
            return

        if size == self.size:
            self.test_end(True, "The size is correct.")
        else:
            self.test_end(False, "The size is wrong: %d instead of %d" %
                          (size, self.size))

### TEST 003 ###

    def test_003(self):
        """Get file from FileCacher.

        """
        logger.info("  I am retrieving the file from FileCacher " +
                    "after deleting the cache.")
        os.unlink(self.cache_path)
        try:
            data = self.file_cacher.get_file(self.digest)
        except Exception as error:
            self.test_end(False, "Error received: %r." % error)
            return

        received = data.read()
        data.close()
        if received != self.content:
            self.test_end(False, "Content differ.")
        elif not os.path.exists(self.cache_path):
            self.test_end(False, "File not stored in local cache.")
        elif open(self.cache_path).read() != self.content:
            self.test_end(False, "Local cache's content differ " +
                          "from original file.")
        else:
            self.test_end(True, "Content object received " +
                          "and cached correctly.")

### TEST 004 ###

    def test_004(self):
        """Delete the file through FS and tries to get it again through FC.

        """
        logger.info("  I am deleting the file from FileCacher.")
        try:
            self.file_cacher.delete(digest=self.digest)
        except Exception as error:
            self.test_end(False, "Error received: %s." % error)
            return

        else:
            logger.info("  File deleted correctly.")
            logger.info("  I am getting the file from FileCacher.")
            try:
                self.file_cacher.get_file(self.digest)
            except Exception as error:
                self.test_end(True, "Correctly received an error: %r." % error)
            else:
                self.test_end(False, "Did not receive error.")

### TEST 005 ###

    def test_005(self):
        """Get unexisting file from FileCacher.

        """
        logger.info("  I am retrieving an unexisting file from FileCacher.")
        try:
            self.file_cacher.get_file(self.digest)
        except Exception as error:
            self.test_end(True, "Correctly received an error: %r." % error)
        else:
            self.test_end(False, "Did not receive error.")

### TEST 006 ###

    def test_006(self):
        """Send a ~100B random binary file to the storage through
        FileCacher as a string. FC should cache the content locally.

        """
        self.content = "".join(chr(random.randint(0, 255))
                               for unused_i in xrange(100))

        logger.info("  I am sending the ~100B binary file to FileCacher")
        try:
            data = self.file_cacher.put_file_content(self.content,
                                                     u"Test #005")
        except Exception as error:
            self.test_end(False, "Error received: %r." % error)
            return

        if not os.path.exists(os.path.join(self.cache_base_path, data)):
            self.test_end(False, "File not stored in local cache.")
        elif open(os.path.join(self.cache_base_path, data),
                  "rb").read() != self.content:
            self.test_end(False, "Local cache's content differ "
                          "from original file.")
        else:
            self.cache_path = os.path.join(self.cache_base_path, data)
            self.digest = data
            self.test_end(True, "Data sent and cached without error.")

### TEST 007 ###

    def test_007(self):
        """Retrieve the file as a string.

        """
        logger.info("  I am retrieving the ~100B binary file from FileCacher "
                    "using get_file_to_string()")
        self.fake_content = "Fake content.\n"
        with open(self.cache_path, "wb") as cached_file:
            cached_file.write(self.fake_content)
        try:
            data = self.file_cacher.get_file_content(self.digest)
        except Exception as error:
            self.test_end(False, "Error received: %r." % error)
            return

        if data != self.fake_content:
            if data == self.content:
                self.test_end(False,
                              "Did not use the cache even if it could.")
            else:
                self.test_end(False, "Content differ.")
        else:
            self.test_end(True, "Data received correctly.")

### TEST 008 ###

    def test_008(self):
        """Put a ~100MB file into the storage (using a specially
        crafted file-like object).

        """
        logger.info("  I am sending the ~100MB binary file to FileCacher")
        rand_file = RandomFile(100000000)
        try:
            data = self.file_cacher.put_file_from_fobj(rand_file, u"Test #007")
        except Exception as error:
            self.test_end(False, "Error received: %r." % error)
            return
        if rand_file.dim != 0:
            self.test_end(False, "The input file wasn't read completely.")
        my_digest = rand_file.digest
        rand_file.close()

        if not os.path.exists(os.path.join(self.cache_base_path, data)):
            self.test_end(False, "File not stored in local cache.")
        elif my_digest != data:
            self.test_end(False, "File received with wrong hash.")
        else:
            self.cache_path = os.path.join(self.cache_base_path, data)
            self.digest = data
            self.test_end(True, "Data sent and cached without error.")

### TEST 009 ###

    def test_009(self):
        """Get the ~100MB file from FileCacher.

        """
        logger.info("  I am retrieving the ~100MB file from FileCacher " +
                    "after deleting the cache.")
        os.unlink(self.cache_path)
        hash_file = HashingFile()
        try:
            self.file_cacher.get_file_to_fobj(self.digest, hash_file)
        except Exception as error:
            self.test_end(False, "Error received: %r." % error)
            return
        my_digest = hash_file.digest
        hash_file.close()

        try:
            if self.digest != my_digest:
                self.test_end(False, "Content differs.")
            elif not os.path.exists(self.cache_path):
                self.test_end(False, "File not stored in local cache.")
            else:
                self.test_end(True, "Content object received " +
                              "and cached correctly.")
        finally:
            self.file_cacher.delete(self.digest)
Esempio n. 2
0
def test_testcases(base_dir, soluzione, language, assume=None):
    global task, file_cacher

    # Use a disabled FileCacher with a FSBackend in order to avoid to fill
    # the database with junk and to save up space.
    if file_cacher is None:
        file_cacher = FileCacher(path=os.path.join(config.cache_dir,
                                                   'cmsMake'),
                                 enabled=False)

    # Load the task
    if task is None:
        loader = YamlLoader(os.path.realpath(os.path.join(base_dir, "..")),
                            file_cacher)
        # Normally we should import the contest before, but YamlLoader
        # accepts get_task() even without previous get_contest() calls
        task = loader.get_task(os.path.split(os.path.realpath(base_dir))[1])

    # Prepare the EvaluationJob
    dataset = task.active_dataset
    if dataset.task_type != "OutputOnly":
        digest = file_cacher.put_file_from_path(
            os.path.join(base_dir, soluzione),
            "Solution %s for task %s" % (soluzione, task.name))
        executables = {
            task.name: Executable(filename=task.name, digest=digest)
        }
        jobs = [(t,
                 EvaluationJob(language=language,
                               task_type=dataset.task_type,
                               task_type_parameters=json.loads(
                                   dataset.task_type_parameters),
                               managers=dict(dataset.managers),
                               executables=executables,
                               input=dataset.testcases[t].input,
                               output=dataset.testcases[t].output,
                               time_limit=dataset.time_limit,
                               memory_limit=dataset.memory_limit))
                for t in dataset.testcases]
        tasktype = get_task_type(dataset=dataset)
    else:
        print("Generating outputs...", end='')
        files = {}
        for t in sorted(dataset.testcases.keys()):
            with file_cacher.get_file(dataset.testcases[t].input) as fin:
                with TemporaryFile() as fout:
                    print(str(t), end='')
                    call(soluzione, stdin=fin, stdout=fout, cwd=base_dir)
                    fout.seek(0)
                    digest = file_cacher.put_file_from_fobj(fout)
                    outname = "output_%s.txt" % t
                    files[outname] = File(filename=outname, digest=digest)
        jobs = [(t,
                 EvaluationJob(task_type=dataset.task_type,
                               task_type_parameters=json.loads(
                                   dataset.task_type_parameters),
                               managers=dict(dataset.managers),
                               files=files,
                               input=dataset.testcases[t].input,
                               output=dataset.testcases[t].output,
                               time_limit=dataset.time_limit,
                               memory_limit=dataset.memory_limit))
                for t in dataset.testcases]
        for k, job in jobs:
            job._key = k
        tasktype = get_task_type(dataset=dataset)
        print()

    ask_again = True
    last_status = "ok"
    status = "ok"
    stop = False
    info = []
    points = []
    comments = []
    tcnames = []
    for jobinfo in sorted(jobs):
        print(jobinfo[0], end='')
        sys.stdout.flush()
        job = jobinfo[1]
        # Skip the testcase if we decide to consider everything to
        # timeout
        if stop:
            info.append("Time limit exceeded")
            points.append(0.0)
            comments.append("Timeout.")
            continue

        # Evaluate testcase
        last_status = status
        tasktype.evaluate(job, file_cacher)
        if dataset.task_type != "OutputOnly":
            status = job.plus["exit_status"]
            info.append("Time: %5.3f   Wall: %5.3f   Memory: %s" %
                        (job.plus["execution_time"],
                         job.plus["execution_wall_clock_time"],
                         mem_human(job.plus["execution_memory"])))
        else:
            status = "ok"
            info.append("N/A")
        points.append(float(job.outcome))
        comments.append(format_status_text(job.text))
        tcnames.append(jobinfo[0])

        # If we saw two consecutive timeouts, ask wether we want to
        # consider everything to timeout
        if ask_again and status == "timeout" and last_status == "timeout":
            print()
            print("Want to stop and consider everything to timeout? [y/N]",
                  end='')
            if assume is not None:
                print(assume)
                tmp = assume
            else:
                tmp = raw_input().lower()
            if tmp in ['y', 'yes']:
                stop = True
            else:
                ask_again = False

    # Result pretty printing
    print()
    clen = max(len(c) for c in comments)
    ilen = max(len(i) for i in info)
    for (i, p, c, b) in zip(tcnames, points, comments, info):
        print("%s) %5.2lf --- %s [%s]" % (i, p, c.ljust(clen), b.center(ilen)))

    return zip(points, comments, info)
Esempio n. 3
0
class TestFileCacher(unittest.TestCase):
    """Service that performs automatically some tests for the
    FileCacher service.

    """
    def setUp(self):
        self.file_cacher = FileCacher()
        #self.file_cacher = FileCacher(self, path="fs-storage")
        self.cache_base_path = self.file_cacher.file_dir
        self.cache_path = None
        self.content = None
        self.fake_content = None
        self.digest = None
        self.file_obj = None

    def tearDown(self):
        shutil.rmtree(self.cache_base_path, ignore_errors=True)

    def test_file_life(self):
        """Send a ~100B random binary file to the storage through
        FileCacher as a file-like object. FC should cache the content
        locally.

        Then retrieve it.

        Then check its size.

        Then get it back.

        Then delete it.

        """
        self.size = 100
        self.content = b"".join(
            chr(random.randint(0, 255)) for unused_i in xrange(self.size))

        data = self.file_cacher.put_file_from_fobj(StringIO(self.content),
                                                   u"Test #000")

        if not os.path.exists(os.path.join(self.cache_base_path, data)):
            self.fail("File not stored in local cache.")
        elif io.open(os.path.join(self.cache_base_path, data),
                     "rb").read() != self.content:
            self.fail("Local cache's content differ " "from original file.")
        else:
            self.cache_path = os.path.join(self.cache_base_path, data)
            self.digest = data

        # Retrieve the file.
        self.fake_content = "Fake content.\n"
        with io.open(self.cache_path, "wb") as cached_file:
            cached_file.write(self.fake_content)
        try:
            data = self.file_cacher.get_file(self.digest)
        except Exception as error:
            self.fail("Error received: %r." % error)
            return

        received = data.read()
        data.close()
        if received != self.fake_content:
            if received == self.content:
                self.fail("Did not use the cache even if it could.")
            else:
                self.fail("Content differ.")

        # Check the size of the file.
        try:
            size = self.file_cacher.get_size(self.digest)
        except Exception as error:
            self.fail("Error received: %r." % error)
            return

        if size != self.size:
            self.fail("The size is wrong: %d instead of %d" %
                      (size, self.size))

        # Get file from FileCacher.
        os.unlink(self.cache_path)
        try:
            data = self.file_cacher.get_file(self.digest)
        except Exception as error:
            self.fail("Error received: %r." % error)
            return

        received = data.read()
        data.close()
        if received != self.content:
            self.fail("Content differ.")
        elif not os.path.exists(self.cache_path):
            self.fail("File not stored in local cache.")
        elif io.open(self.cache_path, "rb").read() != self.content:
            self.fail("Local cache's content differ " + "from original file.")

        # Delete the file through FS and tries to get it again through
        # FC.
        try:
            self.file_cacher.delete(digest=self.digest)
        except Exception as error:
            self.fail("Error received: %s." % error)
            return

        else:
            with self.assertRaises(Exception):
                self.file_cacher.get_file(self.digest)

    def test_fetch_missing_file(self):
        """Get unexisting file from FileCacher.

        """
        with self.assertRaises(Exception):
            self.file_cacher.get_file(self.digest)

    def test_file_as_content(self):
        """Send a ~100B random binary file to the storage through
        FileCacher as a string. FC should cache the content locally.

        Then retrieve it as a string.

        """
        self.content = b"".join(
            chr(random.randint(0, 255)) for unused_i in xrange(100))

        try:
            data = self.file_cacher.put_file_content(self.content,
                                                     u"Test #005")
        except Exception as error:
            self.fail("Error received: %r." % error)
            return

        if not os.path.exists(os.path.join(self.cache_base_path, data)):
            self.fail("File not stored in local cache.")
        elif io.open(os.path.join(self.cache_base_path, data),
                     "rb").read() != self.content:
            self.fail("Local cache's content differ " "from original file.")
        else:
            self.cache_path = os.path.join(self.cache_base_path, data)
            self.digest = data

        # Retrieve the file as a string.
        self.fake_content = "Fake content.\n"
        with io.open(self.cache_path, "wb") as cached_file:
            cached_file.write(self.fake_content)
        try:
            data = self.file_cacher.get_file_content(self.digest)
        except Exception as error:
            self.fail("Error received: %r." % error)
            return

        if data != self.fake_content:
            if data == self.content:
                self.fail("Did not use the cache even if it could.")
            else:
                self.fail("Content differ.")

    def test_big_file(self):
        """Put a ~10MB file into the storage (using a specially
        crafted file-like object).

        Then get it back.

        """
        rand_file = RandomFile(10000000)
        try:
            data = self.file_cacher.put_file_from_fobj(rand_file, u"Test #007")
        except Exception as error:
            self.fail("Error received: %r." % error)
            return
        if rand_file.dim != 0:
            self.fail("The input file wasn't read completely.")
        my_digest = rand_file.digest
        rand_file.close()

        if not os.path.exists(os.path.join(self.cache_base_path, data)):
            self.fail("File not stored in local cache.")
        elif my_digest != data:
            self.fail("File received with wrong hash.")
        else:
            self.cache_path = os.path.join(self.cache_base_path, data)
            self.digest = data

        # Get the ~100MB file from FileCacher.
        os.unlink(self.cache_path)
        hash_file = HashingFile()
        try:
            self.file_cacher.get_file_to_fobj(self.digest, hash_file)
        except Exception as error:
            self.fail("Error received: %r." % error)
            return
        my_digest = hash_file.digest
        hash_file.close()

        try:
            if self.digest != my_digest:
                self.fail("Content differs.")
            elif not os.path.exists(self.cache_path):
                self.fail("File not stored in local cache.")
        finally:
            self.file_cacher.delete(self.digest)
Esempio n. 4
0
def test_testcases(base_dir, soluzione, language, assume=None):
    global task, file_cacher

    # Use a FileCacher with a NullBackend in order to avoid to fill
    # the database with junk
    if file_cacher is None:
        file_cacher = FileCacher(null=True)

    # Load the task
    # TODO - This implies copying a lot of data to the FileCacher,
    # which is annoying if you have to do it continuously; it would be
    # better to use a persistent cache (although local, possibly
    # filesystem-based instead of database-based) and somehow detect
    # when the task has already been loaded
    if task is None:
        loader = YamlLoader(
            os.path.realpath(os.path.join(base_dir, "..")),
            file_cacher)
        # Normally we should import the contest before, but YamlLoader
        # accepts get_task() even without previous get_contest() calls
        task = loader.get_task(os.path.split(os.path.realpath(base_dir))[1])

    # Prepare the EvaluationJob
    dataset = task.active_dataset
    if dataset.task_type != "OutputOnly":
        digest = file_cacher.put_file_from_path(
            os.path.join(base_dir, soluzione),
            "Solution %s for task %s" % (soluzione, task.name))
        executables = {task.name: Executable(filename=task.name,
                                             digest=digest)}
        jobs = [(t, EvaluationJob(
            language=language,
            task_type=dataset.task_type,
            task_type_parameters=json.loads(dataset.task_type_parameters),
            managers=dict(dataset.managers),
            executables=executables,
            input=dataset.testcases[t].input,
            output=dataset.testcases[t].output,
            time_limit=dataset.time_limit,
            memory_limit=dataset.memory_limit)) for t in dataset.testcases]
        tasktype = get_task_type(dataset=dataset)
    else:
        print("Generating outputs...", end='')
        files = {}
        for t in sorted(dataset.testcases.keys()):
            with file_cacher.get_file(dataset.testcases[t].input) as fin:
                with TemporaryFile() as fout:
                    print("%s" % t, end='')
                    call(soluzione, stdin=fin, stdout=fout, cwd=base_dir)
                    fout.seek(0)
                    digest = file_cacher.put_file_from_fobj(fout)
                    outname = "output_%s.txt" % t
                    files[outname] = File(filename=outname, digest=digest)
        jobs = [(t, EvaluationJob(
            task_type=dataset.task_type,
            task_type_parameters=json.loads(dataset.task_type_parameters),
            managers=dict(dataset.managers),
            files=files,
            input=dataset.testcases[t].input,
            output=dataset.testcases[t].output,
            time_limit=dataset.time_limit,
            memory_limit=dataset.memory_limit)) for t in dataset.testcases]
        for k, job in jobs:
            job._key = k
        tasktype = get_task_type(dataset=dataset)
        print()

    ask_again = True
    last_status = "ok"
    status = "ok"
    stop = False
    info = []
    points = []
    comments = []
    tcnames = []
    for jobinfo in sorted(jobs):
        print(jobinfo[0], end='')
        sys.stdout.flush()
        job = jobinfo[1]
        # Skip the testcase if we decide to consider everything to
        # timeout
        if stop:
            info.append("Time limit exceeded")
            points.append(0.0)
            comments.append("Timeout.")
            continue

        # Evaluate testcase
        last_status = status
        tasktype.evaluate(job, file_cacher)
        if dataset.task_type != "OutputOnly":
            status = job.plus["exit_status"]
            info.append("Time: %5.3f   Wall: %5.3f   Memory: %s" %
                       (job.plus["execution_time"],
                        job.plus["execution_wall_clock_time"],
                        mem_human(job.plus["execution_memory"])))
        else:
            status = "ok"
            info.append("N/A")
        points.append(float(job.outcome))
        comments.append(format_status_text(job.text))
        tcnames.append(jobinfo[0])

        # If we saw two consecutive timeouts, ask wether we want to
        # consider everything to timeout
        if ask_again and status == "timeout" and last_status == "timeout":
            print()
            print("Want to stop and consider everything to timeout? [y/N]",
                  end='')
            if assume is not None:
                print(assume)
                tmp = assume
            else:
                tmp = raw_input().lower()
            if tmp in ['y', 'yes']:
                stop = True
            else:
                ask_again = False

    # Result pretty printing
    print()
    clen = max(len(c) for c in comments)
    ilen = max(len(i) for i in info)
    for (i, p, c, b) in zip(tcnames, points, comments, info):
        print("%s) %5.2lf --- %s [%s]" % (i, p, c.ljust(clen), b.center(ilen)))

    return zip(points, comments, info)
Esempio n. 5
0
class TestFileCacher(unittest.TestCase):
    """Service that performs automatically some tests for the
    FileCacher service.

    """

    def setUp(self):
        self.file_cacher = FileCacher()
        #self.file_cacher = FileCacher(self, path="fs-storage")
        self.cache_base_path = self.file_cacher.file_dir
        self.cache_path = None
        self.content = None
        self.fake_content = None
        self.digest = None
        self.file_obj = None

    def tearDown(self):
        shutil.rmtree(self.cache_base_path, ignore_errors=True)

    def test_file_life(self):
        """Send a ~100B random binary file to the storage through
        FileCacher as a file-like object. FC should cache the content
        locally.

        Then retrieve it.

        Then check its size.

        Then get it back.

        Then delete it.

        """
        self.size = 100
        self.content = b"".join(chr(random.randint(0, 255))
                                for unused_i in xrange(self.size))

        data = self.file_cacher.put_file_from_fobj(StringIO(self.content),
                                                   u"Test #000")

        if not os.path.exists(os.path.join(self.cache_base_path, data)):
            self.fail("File not stored in local cache.")
        elif io.open(os.path.join(self.cache_base_path, data),
                     "rb").read() != self.content:
            self.fail("Local cache's content differ "
                      "from original file.")
        else:
            self.cache_path = os.path.join(self.cache_base_path, data)
            self.digest = data

        # Retrieve the file.
        self.fake_content = "Fake content.\n"
        with io.open(self.cache_path, "wb") as cached_file:
            cached_file.write(self.fake_content)
        try:
            data = self.file_cacher.get_file(self.digest)
        except Exception as error:
            self.fail("Error received: %r." % error)
            return

        received = data.read()
        data.close()
        if received != self.fake_content:
            if received == self.content:
                self.fail("Did not use the cache even if it could.")
            else:
                self.fail("Content differ.")

        # Check the size of the file.
        try:
            size = self.file_cacher.get_size(self.digest)
        except Exception as error:
            self.fail("Error received: %r." % error)
            return

        if size != self.size:
            self.fail("The size is wrong: %d instead of %d" %
                      (size, self.size))

        # Get file from FileCacher.
        os.unlink(self.cache_path)
        try:
            data = self.file_cacher.get_file(self.digest)
        except Exception as error:
            self.fail("Error received: %r." % error)
            return

        received = data.read()
        data.close()
        if received != self.content:
            self.fail("Content differ.")
        elif not os.path.exists(self.cache_path):
            self.fail("File not stored in local cache.")
        elif io.open(self.cache_path, "rb").read() != self.content:
            self.fail("Local cache's content differ " +
                      "from original file.")

        # Delete the file through FS and tries to get it again through
        # FC.
        try:
            self.file_cacher.delete(digest=self.digest)
        except Exception as error:
            self.fail("Error received: %s." % error)
            return

        else:
            with self.assertRaises(Exception):
                self.file_cacher.get_file(self.digest)

    def test_fetch_missing_file(self):
        """Get unexisting file from FileCacher.

        """
        with self.assertRaises(Exception):
            self.file_cacher.get_file(self.digest)

    def test_file_as_content(self):
        """Send a ~100B random binary file to the storage through
        FileCacher as a string. FC should cache the content locally.

        Then retrieve it as a string.

        """
        self.content = b"".join(chr(random.randint(0, 255))
                                for unused_i in xrange(100))

        try:
            data = self.file_cacher.put_file_content(self.content,
                                                     u"Test #005")
        except Exception as error:
            self.fail("Error received: %r." % error)
            return

        if not os.path.exists(os.path.join(self.cache_base_path, data)):
            self.fail("File not stored in local cache.")
        elif io.open(os.path.join(self.cache_base_path, data),
                     "rb").read() != self.content:
            self.fail("Local cache's content differ "
                      "from original file.")
        else:
            self.cache_path = os.path.join(self.cache_base_path, data)
            self.digest = data

        # Retrieve the file as a string.
        self.fake_content = "Fake content.\n"
        with io.open(self.cache_path, "wb") as cached_file:
            cached_file.write(self.fake_content)
        try:
            data = self.file_cacher.get_file_content(self.digest)
        except Exception as error:
            self.fail("Error received: %r." % error)
            return

        if data != self.fake_content:
            if data == self.content:
                self.fail("Did not use the cache even if it could.")
            else:
                self.fail("Content differ.")

    def test_big_file(self):
        """Put a ~10MB file into the storage (using a specially
        crafted file-like object).

        Then get it back.

        """
        rand_file = RandomFile(10000000)
        try:
            data = self.file_cacher.put_file_from_fobj(rand_file, u"Test #007")
        except Exception as error:
            self.fail("Error received: %r." % error)
            return
        if rand_file.dim != 0:
            self.fail("The input file wasn't read completely.")
        my_digest = rand_file.digest
        rand_file.close()

        if not os.path.exists(os.path.join(self.cache_base_path, data)):
            self.fail("File not stored in local cache.")
        elif my_digest != data:
            self.fail("File received with wrong hash.")
        else:
            self.cache_path = os.path.join(self.cache_base_path, data)
            self.digest = data

        # Get the ~100MB file from FileCacher.
        os.unlink(self.cache_path)
        hash_file = HashingFile()
        try:
            self.file_cacher.get_file_to_fobj(self.digest, hash_file)
        except Exception as error:
            self.fail("Error received: %r." % error)
            return
        my_digest = hash_file.digest
        hash_file.close()

        try:
            if self.digest != my_digest:
                self.fail("Content differs.")
            elif not os.path.exists(self.cache_path):
                self.fail("File not stored in local cache.")
        finally:
            self.file_cacher.delete(self.digest)
Esempio n. 6
0
class TestFileCacher(TestService):
    """Service that performs automatically some tests for the
    FileCacher service.

    """
    def __init__(self, shard):
        logger.initialize(ServiceCoord("TestFileCacher", shard))
        TestService.__init__(self, shard, custom_logger=logger)

        # Assume we store the cache in "./cache/fs-cache-TestFileCacher-0/"
        self.cache_base_path = os.path.join(config.cache_dir,
                                            "fs-cache-TestFileCacher-0")
        self.cache_path = None
        self.content = None
        self.fake_content = None
        self.digest = None
        self.file_obj = None
        self.file_cacher = FileCacher(self)
        #self.file_cacher = FileCacher(self, path="fs-storage")

    def prepare(self):
        """Initialization for the test code - make sure that the cache
        is empty before testing.

        """
        logger.info("Please delete directory %s before." %
                    self.cache_base_path)

### TEST 000 ###

    def test_000(self):
        """Send a ~100B random binary file to the storage through
        FileCacher as a file-like object. FC should cache the content
        locally.

        """
        self.size = 100
        self.content = "".join(
            chr(random.randint(0, 255)) for unused_i in xrange(self.size))

        logger.info("  I am sending the ~100B binary file to FileCacher")
        try:
            data = self.file_cacher.put_file_from_fobj(StringIO(self.content),
                                                       u"Test #000")
        except Exception as error:
            self.test_end(False, "Error received: %r." % error)
            return

        if not os.path.exists(os.path.join(self.cache_base_path, data)):
            self.test_end(False, "File not stored in local cache.")
        elif open(os.path.join(self.cache_base_path, data), "rb").read() != \
                self.content:
            self.test_end(
                False, "Local cache's content differ "
                "from original file.")
        else:
            self.cache_path = os.path.join(self.cache_base_path, data)
            self.digest = data
            self.test_end(True, "Data sent and cached without error.")

### TEST 001 ###

    def test_001(self):
        """Retrieve the file.

        """
        logger.info("  I am retrieving the ~100B binary file from FileCacher")
        self.fake_content = "Fake content.\n"
        with open(self.cache_path, "wb") as cached_file:
            cached_file.write(self.fake_content)
        try:
            data = self.file_cacher.get_file(self.digest)
        except Exception as error:
            self.test_end(False, "Error received: %r." % error)
            return

        received = data.read()
        data.close()
        if received != self.fake_content:
            if received == self.content:
                self.test_end(False, "Did not use the cache even if it could.")
            else:
                self.test_end(False, "Content differ.")
        else:
            self.test_end(True, "Data object received correctly.")

### TEST 002 ###

    def test_002(self):
        """Check the size of the file.

        """
        logger.info("  I am checking the size of the ~100B binary file")
        try:
            size = self.file_cacher.get_size(self.digest)
        except Exception as error:
            self.test_end(False, "Error received: %r." % error)
            return

        if size == self.size:
            self.test_end(True, "The size is correct.")
        else:
            self.test_end(
                False,
                "The size is wrong: %d instead of %d" % (size, self.size))

### TEST 003 ###

    def test_003(self):
        """Get file from FileCacher.

        """
        logger.info("  I am retrieving the file from FileCacher " +
                    "after deleting the cache.")
        os.unlink(self.cache_path)
        try:
            data = self.file_cacher.get_file(self.digest)
        except Exception as error:
            self.test_end(False, "Error received: %r." % error)
            return

        received = data.read()
        data.close()
        if received != self.content:
            self.test_end(False, "Content differ.")
        elif not os.path.exists(self.cache_path):
            self.test_end(False, "File not stored in local cache.")
        elif open(self.cache_path).read() != self.content:
            self.test_end(
                False, "Local cache's content differ " + "from original file.")
        else:
            self.test_end(True,
                          "Content object received " + "and cached correctly.")

### TEST 004 ###

    def test_004(self):
        """Delete the file through FS and tries to get it again through FC.

        """
        logger.info("  I am deleting the file from FileCacher.")
        try:
            self.file_cacher.delete(digest=self.digest)
        except Exception as error:
            self.test_end(False, "Error received: %s." % error)
            return

        else:
            logger.info("  File deleted correctly.")
            logger.info("  I am getting the file from FileCacher.")
            try:
                self.file_cacher.get_file(self.digest)
            except Exception as error:
                self.test_end(True, "Correctly received an error: %r." % error)
            else:
                self.test_end(False, "Did not receive error.")

### TEST 005 ###

    def test_005(self):
        """Get unexisting file from FileCacher.

        """
        logger.info("  I am retrieving an unexisting file from FileCacher.")
        try:
            self.file_cacher.get_file(self.digest)
        except Exception as error:
            self.test_end(True, "Correctly received an error: %r." % error)
        else:
            self.test_end(False, "Did not receive error.")

### TEST 006 ###

    def test_006(self):
        """Send a ~100B random binary file to the storage through
        FileCacher as a string. FC should cache the content locally.

        """
        self.content = "".join(
            chr(random.randint(0, 255)) for unused_i in xrange(100))

        logger.info("  I am sending the ~100B binary file to FileCacher")
        try:
            data = self.file_cacher.put_file_content(self.content,
                                                     u"Test #005")
        except Exception as error:
            self.test_end(False, "Error received: %r." % error)
            return

        if not os.path.exists(os.path.join(self.cache_base_path, data)):
            self.test_end(False, "File not stored in local cache.")
        elif open(os.path.join(self.cache_base_path, data),
                  "rb").read() != self.content:
            self.test_end(
                False, "Local cache's content differ "
                "from original file.")
        else:
            self.cache_path = os.path.join(self.cache_base_path, data)
            self.digest = data
            self.test_end(True, "Data sent and cached without error.")

### TEST 007 ###

    def test_007(self):
        """Retrieve the file as a string.

        """
        logger.info("  I am retrieving the ~100B binary file from FileCacher "
                    "using get_file_to_string()")
        self.fake_content = "Fake content.\n"
        with open(self.cache_path, "wb") as cached_file:
            cached_file.write(self.fake_content)
        try:
            data = self.file_cacher.get_file_content(self.digest)
        except Exception as error:
            self.test_end(False, "Error received: %r." % error)
            return

        if data != self.fake_content:
            if data == self.content:
                self.test_end(False, "Did not use the cache even if it could.")
            else:
                self.test_end(False, "Content differ.")
        else:
            self.test_end(True, "Data received correctly.")

### TEST 008 ###

    def test_008(self):
        """Put a ~100MB file into the storage (using a specially
        crafted file-like object).

        """
        logger.info("  I am sending the ~100MB binary file to FileCacher")
        rand_file = RandomFile(100000000)
        try:
            data = self.file_cacher.put_file_from_fobj(rand_file, u"Test #007")
        except Exception as error:
            self.test_end(False, "Error received: %r." % error)
            return
        if rand_file.dim != 0:
            self.test_end(False, "The input file wasn't read completely.")
        my_digest = rand_file.digest
        rand_file.close()

        if not os.path.exists(os.path.join(self.cache_base_path, data)):
            self.test_end(False, "File not stored in local cache.")
        elif my_digest != data:
            self.test_end(False, "File received with wrong hash.")
        else:
            self.cache_path = os.path.join(self.cache_base_path, data)
            self.digest = data
            self.test_end(True, "Data sent and cached without error.")


### TEST 009 ###

    def test_009(self):
        """Get the ~100MB file from FileCacher.

        """
        logger.info("  I am retrieving the ~100MB file from FileCacher " +
                    "after deleting the cache.")
        os.unlink(self.cache_path)
        hash_file = HashingFile()
        try:
            self.file_cacher.get_file_to_fobj(self.digest, hash_file)
        except Exception as error:
            self.test_end(False, "Error received: %r." % error)
            return
        my_digest = hash_file.digest
        hash_file.close()

        try:
            if self.digest != my_digest:
                self.test_end(False, "Content differs.")
            elif not os.path.exists(self.cache_path):
                self.test_end(False, "File not stored in local cache.")
            else:
                self.test_end(
                    True, "Content object received " + "and cached correctly.")
        finally:
            self.file_cacher.delete(self.digest)