Ejemplo n.º 1
0
 def test_rsync_agent5(self):
     """ rsync should be able to resolve symlinks """
     self.agent.src_path = os.path.join(self.rootdir, "folder0")
     os.symlink(self.testfile, os.path.join(self.agent.src_path, "link1"))
     self.agent.cmdopts = {'-a': None, '--copy-links': None}
     self.assertTrue(self.agent.transfer(),
                     "transfer a folder containing a symlink failed")
     self.assertEqual(
         misc.hashfile(self.testfile, hasher='sha1'),
         misc.hashfile(os.path.join(self.destdir, "folder0", "link1"),
                       hasher='sha1'),
         "symlink was not properly transferred")
Ejemplo n.º 2
0
 def test_rsync_agent5(self):
     """ rsync should be able to resolve symlinks """
     self.agent.src_path = os.path.join(self.rootdir,"folder0")
     os.symlink(self.testfile,os.path.join(self.agent.src_path,"link1"))
     self.agent.cmdopts = {'-a': None, '--copy-links': None}
     self.assertTrue(
         self.agent.transfer(),
         "transfer a folder containing a symlink failed")
     self.assertEqual(
         misc.hashfile(self.testfile,hasher='sha1'),
         misc.hashfile(
             os.path.join(self.destdir,"folder0","link1"),
             hasher='sha1'),
         "symlink was not properly transferred")
Ejemplo n.º 3
0
 def test_deliver_sample1(self):
     """ transfer a sample using rsync
     """
     # create some content to transfer
     digestfile = self.deliverer.staging_digestfile()
     filelist = self.deliverer.staging_filelist()
     basedir = os.path.dirname(digestfile)
     create_folder(basedir)
     expected = []
     with open(digestfile, 'w') as dh, open(filelist, 'w') as fh:
         curdir = basedir
         for d in range(4):
             if d > 0:
                 curdir = os.path.join(curdir, "folder{}".format(d))
                 create_folder(curdir)
             for n in range(5):
                 fpath = os.path.join(curdir, "file{}".format(n))
                 open(fpath, 'w').close()
                 rpath = os.path.relpath(fpath, basedir)
                 digest = hashfile(fpath, hasher=self.deliverer.hash_algorithm)
                 if n < 3:
                     expected.append(rpath)
                     fh.write(u"{}\n".format(rpath))
                     dh.write(u"{}  {}\n".format(digest, rpath))
         rpath = os.path.basename(digestfile)
         expected.append(rpath)
         fh.write(u"{}\n".format(rpath))
     # transfer the listed content
     destination = self.deliverer.expand_path(self.deliverer.deliverypath)
     create_folder(os.path.dirname(destination))
     self.assertTrue(self.deliverer.do_delivery(), "failed to deliver sample")
     # list the trasferred files relative to the destination
     observed = [os.path.relpath(os.path.join(d, f), destination)
                 for d, _, files in os.walk(destination) for f in files]
     self.assertEqual(sorted(observed), sorted(expected))
Ejemplo n.º 4
0
 def validate_transfer(self):
     """ Validate the transferred files by computing checksums and comparing 
         to the pre-computed checksums, supplied in the digestfile attribute
         of this Agent instance. The hash algorithm is inferred from the file
         extension of the digestfile. The paths of the files to check are
         assumed to be relative to the location of the digestfile.
         
         Currently not implemented for remote transfers.
         
         :returns: False if any checksum does not match, or if a file does
             not exist. True otherwise.
         :raises transfer.RsyncValidationError: if the digestfile was not 
             supplied
     """
     if self.remote_host is not None:
         raise NotImplementedError("Validation on remote host not implemented")
     try:
         with open(self.digestfile) as fh:
             hasher = self.digestfile.split('.')[-1]
             dpath = os.path.dirname(self.digestfile)
             for line in fh:
                 digest,fpath = line.split()
                 tfile = os.path.join(dpath,fpath)
                 if not os.path.exists(tfile) or digest != hashfile(
                     tfile,
                     hasher=hasher):
                     return False
     except TypeError as e:
         raise RsyncValidationError(
             "no digest file specified",
             self.src_path,
             self.dest_path)
     return True
Ejemplo n.º 5
0
 def test_deliver_sample1(self):
     """ transfer a sample using rsync
     """
     # create some content to transfer
     digestfile = self.deliverer.staging_digestfile()
     filelist = self.deliverer.staging_filelist()
     basedir = os.path.dirname(digestfile)
     create_folder(basedir)
     expected = []
     with open(digestfile, 'w') as dh, open(filelist, 'w') as fh:
         curdir = basedir
         for d in xrange(4):
             if d > 0:
                 curdir = os.path.join(curdir, "folder{}".format(d))
                 create_folder(curdir)
             for n in xrange(5):
                 fpath = os.path.join(curdir, "file{}".format(n))
                 open(fpath, 'w').close()
                 rpath = os.path.relpath(fpath, basedir)
                 digest = hashfile(fpath, hasher=self.deliverer.hash_algorithm)
                 if n < 3:
                     expected.append(rpath)
                     fh.write("{}\n".format(rpath))
                     dh.write("{}  {}\n".format(digest, rpath))
         rpath = os.path.basename(digestfile)
         expected.append(rpath)
         fh.write("{}\n".format(rpath))
     # transfer the listed content
     destination = self.deliverer.expand_path(self.deliverer.deliverypath)
     create_folder(os.path.dirname(destination))
     self.assertTrue(self.deliverer.do_delivery(), "failed to deliver sample")
     # list the trasferred files relative to the destination
     observed = [os.path.relpath(os.path.join(d, f), destination)
                 for d, _, files in os.walk(destination) for f in files]
     self.assertItemsEqual(observed, expected)
Ejemplo n.º 6
0
    def validate_transfer(self):
        """Validate the transferred files by computing checksums and comparing
            to the pre-computed checksums, supplied in the digestfile attribute
            of this Agent instance. The hash algorithm is inferred from the file
            extension of the digestfile. The paths of the files to check are
            assumed to be relative to the location of the digestfile.

            Currently not implemented for remote transfers.

            :returns: False if any checksum does not match, or if a file does
                not exist. True otherwise.
            :raises transfer.RsyncValidationError: if the digestfile was not
                supplied
        """
        if self.remote_host is not None:
            raise NotImplementedError(
                'Validation on remote host not implemented')
        try:
            with open(self.digestfile) as fh:
                hasher = self.digestfile.split('.')[-1]
                dpath = os.path.dirname(self.digestfile)
                for line in fh:
                    digest, fpath = line.split()
                    tfile = os.path.join(dpath, fpath)
                    if not os.path.exists(tfile) or digest != hashfile(
                            tfile, hasher=hasher):
                        return False
        except TypeError as e:
            raise RsyncValidationError('no digest file specified',
                                       self.src_path, self.dest_path)
        return True
Ejemplo n.º 7
0
 def _get_digest(sourcepath, destpath, no_digest_cache=False, no_digest=False):
     digest = None
     # skip the digest if either the global or the per-file setting is to skip
     if not any([no_checksum, no_digest]):
         checksumpath = "{}.{}".format(sourcepath, hash_algorithm)
         try:
             with open(checksumpath, 'r') as fh:
                 digest = fh.next()
         except IOError:
             digest = hashfile(sourcepath, hasher=hash_algorithm)
             if not no_digest_cache:
                 try:
                     with open(checksumpath, 'w') as fh:
                         fh.write(digest)
                 except IOError as we:
                     logger.warning("could not write checksum {} to file {}: {}".format(digest, checksumpath, we))
     return sourcepath, destpath, digest
Ejemplo n.º 8
0
 def check_hash(self, alg, exp):
     assert misc.hashfile(self.hashfile,hasher=alg) == exp
Ejemplo n.º 9
0
 def validate_files(self,src,dst):
     return os.path.exists(src) and \
         os.path.isfile(src) and \
         os.path.exists(dst) and \
         os.path.isfile(dst) and \
         misc.hashfile(src) == misc.hashfile(dst)
Ejemplo n.º 10
0
 def test_multiple_hashfile_calls(self):
     """ Ensure that the hasher object is cleared between subsequent calls
     """
     assert misc.hashfile(self.hashfile,hasher='sha1') == misc.hashfile(self.hashfile,'sha1')
Ejemplo n.º 11
0
 def _write_digest(rootdir, fhandle, fpath):
     fhandle.write("{}  {}\n".format(misc.hashfile(fpath), os.path.relpath(fpath, rootdir)))
Ejemplo n.º 12
0
 def check_hash(self, alg, exp):
     assert misc.hashfile(self.hashfile, hasher=alg) == exp
Ejemplo n.º 13
0
 def validate_files(self, src, dst):
     return os.path.exists(src) and \
         os.path.isfile(src) and \
         os.path.exists(dst) and \
         os.path.isfile(dst) and \
         misc.hashfile(src) == misc.hashfile(dst)
Ejemplo n.º 14
0
 def test_multiple_hashfile_calls(self):
     """ Ensure that the hasher object is cleared between subsequent calls
     """
     assert misc.hashfile(self.hashfile, hasher='sha1') == misc.hashfile(
         self.hashfile, 'sha1')
Ejemplo n.º 15
0
 def test_hashfile_dir(self):
     """Hash digest for a directory should be None"""
     assert misc.hashfile(self.rootdir) is None
Ejemplo n.º 16
0
 def test_hashfile_dir(self):
     """Hash digest for a directory should be None"""   
     assert misc.hashfile(self.rootdir) is None
Ejemplo n.º 17
0
 def test_hashfile(self):
     """Test different hashing algorithms."""
     for alg, obj in self.hashfile_digests.items():
         self.assertEqual(misc.hashfile(self.hashfile, hasher=alg), obj)