Пример #1
0
    def test_setstate_with_changed_files(self):
        # refresh the sfs
        self.sfs.__enter__()

        # get the original state
        state_before = self.sfs.__getstate__()
        self.assertEqual(len(state_before['things']), self.file_count)

        # delete one of the files
        file_to_remove = self.files.pop()
        localfile_md5 = hashlib.md5(open(file_to_remove,
                                         'rb').read()).hexdigest()
        localfilename = "sf_%s" % localfile_md5

        # remove it from origin
        os.remove(file_to_remove)
        self.assertFalse(file_to_remove in self.files)
        self.assertFalse(os.path.exists(file_to_remove))
        #        print "removed %s" % file_to_remove

        #        # remove it from localdir
        localfile_to_remove = os.path.join(self.localdir, localfilename)
        os.remove(localfile_to_remove)
        self.assertFalse(os.path.exists(localfile_to_remove))

        # create a new sfs
        new_sfs = SeedfileSet()
        new_sfs.__setstate__(state_before)

        self.assertEqual(len(new_sfs.things), (self.file_count - 1))

        #        print "Newthings: %s" % new_sfs.things.keys()
        for k, thing in state_before['things'].iteritems():
            #            print "k: %s" % k
            if k == localfile_md5:
                self.assertFalse(k in new_sfs.things)
                continue
            else:
                # is there a corresponding thing in sfs?
                self.assertTrue(k in new_sfs.things)

            for x, y in thing.iteritems():
                # was it set correctly?
                sfsthing = new_sfs.things[k].__dict__[x]
                if hasattr(sfsthing, '__dict__'):
                    # some things are complex objects themselves
                    # so we have to compare their __dict__ versions
                    self._same_dict(y, sfsthing.__dict__)
                else:
                    # others are just simple objects and we can
                    # compare them directly
                    self.assertEqual(y, sfsthing)

        self.assertEqual(self.file_count - 1, new_sfs.sfcount)
Пример #2
0
    def test_setstate_with_changed_files(self):
        # refresh the sfs
        self.sfs.__enter__()

        # get the original state
        state_before = self.sfs.__getstate__()
        self.assertEqual(len(state_before['things']), self.file_count)

        # delete one of the files
        file_to_remove = self.files.pop()
        localfile_md5 = hashlib.md5(open(file_to_remove, 'rb').read()).hexdigest()
        localfilename = "sf_%s" % localfile_md5

        # remove it from origin
        os.remove(file_to_remove)
        self.assertFalse(file_to_remove in self.files)
        self.assertFalse(os.path.exists(file_to_remove))
#        print "removed %s" % file_to_remove

#        # remove it from localdir
        localfile_to_remove = os.path.join(self.localdir, localfilename)
        os.remove(localfile_to_remove)
        self.assertFalse(os.path.exists(localfile_to_remove))

        # create a new sfs
        new_sfs = SeedfileSet()
        new_sfs.__setstate__(state_before)

        self.assertEqual(len(new_sfs.things), (self.file_count - 1))

#        print "Newthings: %s" % new_sfs.things.keys()
        for k, thing in state_before['things'].iteritems():
#            print "k: %s" % k
            if k == localfile_md5:
                self.assertFalse(k in new_sfs.things)
                continue
            else:
                # is there a corresponding thing in sfs?
                self.assertTrue(k in new_sfs.things)

            for x, y in thing.iteritems():
                # was it set correctly?
                sfsthing = new_sfs.things[k].__dict__[x]
                if hasattr(sfsthing, '__dict__'):
                    # some things are complex objects themselves
                    # so we have to compare their __dict__ versions
                    self._same_dict(y, sfsthing.__dict__)
                else:
                    # others are just simple objects and we can
                    # compare them directly
                    self.assertEqual(y, sfsthing)

        self.assertEqual(self.file_count - 1, new_sfs.sfcount)
Пример #3
0
    def _create_seedfile_set(self):
        if self.seedfile_set is not None:
            return

        logger.info('Building seedfile set')
        with SeedfileSet(campaign_id=self.campaign_id,
                         originpath=self.seed_dir_in,
                         localpath=self.seed_dir_local,
                         outputpath=self.sf_set_out) as sfset:
            self.seedfile_set = sfset
Пример #4
0
    def setUp(self):
        campaign_id = 'testcampaign'

        self.origindir = tempfile.mkdtemp()
        self.localdir = tempfile.mkdtemp()
        self.outputdir = tempfile.mkdtemp()

        # create some files
        self.file_count = 5
        self.files = []
        for i in range(self.file_count):
            (fd, f) = tempfile.mkstemp(dir=self.origindir)

            os.write(fd, 'abacab%d' % i)
            os.close(fd)
            self.files.append(f)

        # create a set
        self.sfs = SeedfileSet(campaign_id, self.origindir, self.localdir,
                               self.outputdir)
Пример #5
0
    def _populate_sf_set(self):
        self.campaign.seedfile_set = SeedfileSet()

        files = []
        for x in xrange(10):
            _fd, _fname = tempfile.mkstemp(prefix='seedfile_', dir=self.tmpdir)
            os.write(_fd, str(x))
            os.close(_fd)
            files.append(_fname)

        self.campaign.seedfile_set.add_file(*files)
Пример #6
0
    def setUp(self):
        campaign_id = 'testcampaign'

        self.origindir = tempfile.mkdtemp()
        self.localdir = tempfile.mkdtemp()
        self.outputdir = tempfile.mkdtemp()

        # create some files
        self.file_count = 5
        self.files = []
        for i in range(self.file_count):
            (fd, f) = tempfile.mkstemp(dir=self.origindir)

            os.write(fd, 'abacab%d' % i)
            os.close(fd)
            self.files.append(f)

        # create a set
        self.sfs = SeedfileSet(campaign_id, self.origindir, self.localdir, self.outputdir)
Пример #7
0
class Test(unittest.TestCase):
    def setUp(self):
        campaign_id = 'testcampaign'

        self.origindir = tempfile.mkdtemp()
        self.localdir = tempfile.mkdtemp()
        self.outputdir = tempfile.mkdtemp()

        # create some files
        self.file_count = 5
        self.files = []
        for i in range(self.file_count):
            (fd, f) = tempfile.mkstemp(dir=self.origindir)

            os.write(fd, 'abacab%d' % i)
            os.close(fd)
            self.files.append(f)

        # create a set
        self.sfs = SeedfileSet(campaign_id, self.origindir, self.localdir,
                               self.outputdir)

    def tearDown(self):
        for f in self.files:
            os.remove(f)
            self.assertFalse(os.path.exists(f))
        for d in (self.origindir, self.localdir, self.outputdir):
            shutil.rmtree(d)
            self.assertFalse(os.path.exists(d))

    def test_pickle(self):
        import pickle
        self.assertTrue(hasattr(self.sfs, 'things'))
        # no files added yet
        self.assertEqual(0, len(self.sfs.things))
        # add the files
        self.sfs._setup()
        # confirm that the files are there
        self.assertEqual(self.file_count, len(self.sfs.things))
        unpickled = pickle.loads(pickle.dumps(self.sfs))
        pprint(unpickled.__dict__)

    def test_set_directories(self):
        self.assertEqual(self.sfs.originpath, self.origindir)
        self.assertEqual(self.sfs.localpath, self.localdir)
        self.assertEqual(self.sfs.outputpath, self.outputdir)
        self.assertEqual(None, self.sfs.origindir)
        self.assertEqual(None, self.sfs.localdir)
        self.assertEqual(None, self.sfs.outputdir)

        self.sfs._set_directories()

        self.assertEqual(Directory, self.sfs.origindir.__class__)
        self.assertEqual(Directory, self.sfs.localdir.__class__)
        self.assertEqual(Directory, self.sfs.outputdir.__class__)

        # make sure the file(s) we created in setUp are in origindir
        self.assertEqual(self.file_count, len(self.sfs.origindir.files))

    def test_copy_files_to_localdir(self):
        # mock the things
        self.sfs.origindir = [1, 2, 3, 4, 5]
        copied = []
        self.sfs.copy_file_from_origin = lambda x: copied.append(x)
        # do the test
        self.sfs._copy_files_to_localdir()
        self.assertEqual(self.sfs.origindir, copied)

    def test_copy_file_from_origin(self):
        pass

    def test_add_local_files_to_set(self):
        pass

    def test_add_file(self):
        self.assertNotEqual(0, len(self.files))
        self.assertEqual(0, len(self.sfs.things))
        self.sfs.add_file(*self.files)
        self.assertEqual(5, len(self.sfs.things))
        for thing in self.sfs.things.itervalues():
            self.assertEqual(SeedFile, thing.__class__)

    def test_init(self):
        self.assertEqual(self.outputdir, self.sfs.seedfile_output_base_dir)
        self.assertEqual(0, len(self.sfs.things))

    def test_getstate_is_pickle_friendly(self):
        # getstate should return a pickleable object
        import pickle
        state = self.sfs.__getstate__()
        try:
            pickle.dumps(state)
        except Exception, e:
            self.fail('Failed to pickle state: %s' % e)
Пример #8
0
class Test(unittest.TestCase):

    def setUp(self):
        campaign_id = 'testcampaign'

        self.origindir = tempfile.mkdtemp()
        self.localdir = tempfile.mkdtemp()
        self.outputdir = tempfile.mkdtemp()

        # create some files
        self.file_count = 5
        self.files = []
        for i in range(self.file_count):
            (fd, f) = tempfile.mkstemp(dir=self.origindir)

            os.write(fd, 'abacab%d' % i)
            os.close(fd)
            self.files.append(f)

        # create a set
        self.sfs = SeedfileSet(campaign_id, self.origindir, self.localdir, self.outputdir)

    def tearDown(self):
        for f in self.files:
            os.remove(f)
            self.assertFalse(os.path.exists(f))
        for d in (self.origindir, self.localdir, self.outputdir):
            shutil.rmtree(d)
            self.assertFalse(os.path.exists(d))

    def test_pickle(self):
        import pickle
        self.assertTrue(hasattr(self.sfs, 'things'))
        # no files added yet
        self.assertEqual(0, len(self.sfs.things))
        # add the files
        self.sfs._setup()
        # confirm that the files are there
        self.assertEqual(self.file_count, len(self.sfs.things))
        unpickled = pickle.loads(pickle.dumps(self.sfs))
        pprint(unpickled.__dict__)

    def test_set_directories(self):
        self.assertEqual(self.sfs.originpath, self.origindir)
        self.assertEqual(self.sfs.localpath, self.localdir)
        self.assertEqual(self.sfs.outputpath, self.outputdir)
        self.assertEqual(None, self.sfs.origindir)
        self.assertEqual(None, self.sfs.localdir)
        self.assertEqual(None, self.sfs.outputdir)

        self.sfs._set_directories()

        self.assertEqual(Directory, self.sfs.origindir.__class__)
        self.assertEqual(Directory, self.sfs.localdir.__class__)
        self.assertEqual(Directory, self.sfs.outputdir.__class__)

        # make sure the file(s) we created in setUp are in origindir
        self.assertEqual(self.file_count, len(self.sfs.origindir.files))

    def test_copy_files_to_localdir(self):
        # mock the things
        self.sfs.origindir = [1, 2, 3, 4, 5]
        copied = []
        self.sfs.copy_file_from_origin = lambda x: copied.append(x)
        # do the test
        self.sfs._copy_files_to_localdir()
        self.assertEqual(self.sfs.origindir, copied)

    def test_copy_file_from_origin(self):
        pass

    def test_add_local_files_to_set(self):
        pass

    def test_add_file(self):
        self.assertNotEqual(0, len(self.files))
        self.assertEqual(0, len(self.sfs.things))
        self.sfs.add_file(*self.files)
        self.assertEqual(5, len(self.sfs.things))
        for thing in self.sfs.things.itervalues():
            self.assertEqual(SeedFile, thing.__class__)

    def test_init(self):
        self.assertEqual(self.outputdir, self.sfs.seedfile_output_base_dir)
        self.assertEqual(0, len(self.sfs.things))

    def test_getstate_is_pickle_friendly(self):
        # getstate should return a pickleable object
        import pickle
        state = self.sfs.__getstate__()
        try:
            pickle.dumps(state)
        except Exception, e:
            self.fail('Failed to pickle state: %s' % e)
Пример #9
0
def main():
    global START_SEED
    hashes = []

    # give up if we don't have a debugger
    debuggers.verify_supported_platform()

    setup_logging_to_console(logger, logging.INFO)
    logger.info("Welcome to BFF!")

    scriptpath = os.path.dirname(sys.argv[0])
    logger.info('Scriptpath is %s', scriptpath)

    # parse command line options
    logger.info('Parsing command line options')
    parser = OptionParser()
    parser.add_option('',
                      '--debug',
                      dest='debug',
                      help='Turn on debugging output',
                      action='store_true')
    parser.add_option('-c',
                      '--config',
                      dest='cfg',
                      help='Config file location')
    (options, args) = parser.parse_args()  #@UnusedVariable

    # Get the cfg file name
    if options.cfg:
        remote_cfg_file = options.cfg
    else:
        remote_cfg_file = get_config_file(scriptpath)

    # die unless the remote config is present
    assert os.path.exists(
        remote_cfg_file
    ), 'Cannot find remote config file: %s, Please create it or use --config option to specify a different location.' % remote_cfg_file

    # copy remote config to local:
    local_cfg_file = os.path.expanduser('~/bff.cfg')
    filetools.copy_file(remote_cfg_file, local_cfg_file)

    # Read the cfg file
    logger.info('Reading config from %s', local_cfg_file)
    cfg = cfg_helper.read_config_options(local_cfg_file)

    # set up local logging
    setup_logfile(cfg.local_dir,
                  log_basename='bff.log',
                  level=logging.DEBUG,
                  max_bytes=1e8,
                  backup_count=3)

    # set up remote logging
    setup_logfile(cfg.output_dir,
                  log_basename='bff.log',
                  level=logging.INFO,
                  max_bytes=1e7,
                  backup_count=5)

    try:
        check_for_script(cfg)
    except:
        logger.warning("Please configure BFF to fuzz a binary.  Exiting...")
        sys.exit()

    z.setup_dirs_and_files(local_cfg_file, cfg)

    # make sure we cache it for the next run
    #    cache_state(cfg.campaign_id, 'cfg', cfg, cfg.cached_config_file)

    sr = get_cached_state('seedrange', cfg.campaign_id,
                          cfg.cached_seedrange_file)
    if not sr:
        sr = SeedRange(cfg.start_seed, cfg.seed_interval, cfg.max_seed)

    # set START_SEED global for timestamping
    START_SEED = sr.s1

    start_process_killer(scriptpath, cfg)

    z.set_unbuffered_stdout()

    # set up the seedfile set so we can pick seedfiles for everything else...
    seedfile_set = get_cached_state('seedfile_set', cfg.campaign_id,
                                    cfg.cached_seedfile_set)
    if not seedfile_set:
        logger.info('Building seedfile set')
        sfs_logfile = os.path.join(cfg.seedfile_output_dir, 'seedfile_set.log')
        with SeedfileSet(
                campaign_id=cfg.campaign_id,
                originpath=cfg.seedfile_origin_dir,
                localpath=cfg.seedfile_local_dir,
                outputpath=cfg.seedfile_output_dir,
                logfile=sfs_logfile,
        ) as sfset:
            seedfile_set = sfset

    # set up the watchdog timeout within the VM and restart the daemon
    if cfg.watchdogtimeout:
        watchdog = WatchDog(cfg.watchdogfile, cfg.watchdogtimeout)
        touch_watchdog_file(cfg)
        watchdog.go()

    cache_state(cfg.campaign_id, 'seedfile_set', seedfile_set,
                cfg.cached_seedfile_set)

    sf = seedfile_set.next_item()

    # Run the program once to cache it into memory
    z.cache_program_once(cfg, sf.path)

    # Give target time to die
    time.sleep(1)

    # flag to indicate whether this is a fresh script start up or not
    first_chunk = True

    # remember our parent process id so we can tell if it changes later
    _last_ppid = os.getppid()

    # campaign.go
    while sr.in_max_range():

        # wipe the tmp dir clean to try to avoid filling the VM disk
        TmpReaper().clean_tmp()

        sf = seedfile_set.next_item()

        r = sf.rangefinder.next_item()
        sr.set_s2()

        while sr.in_range():
            # interval.go
            logger.debug('Starting interval %d-%d', sr.s1, sr.s2)

            # Prevent watchdog from rebooting VM.  If /tmp/fuzzing exists and is stale, the machine will reboot
            touch_watchdog_file(cfg)

            # check parent process id
            _ppid_now = os.getppid()
            if not _ppid_now == _last_ppid:
                logger.warning('Parent process ID changed from %d to %d',
                               _last_ppid, _ppid_now)
                _last_ppid = _ppid_now

            # do the fuzz
            cmdline = cfg.get_command(sf.path)

            if first_chunk:
                # disable the --quiet option in zzuf
                # on the first chunk only
                quiet_flag = False
                first_chunk = False
            else:
                quiet_flag = True

            zzuf = Zzuf(
                cfg.local_dir,
                sr.s1,
                sr.s2,
                cmdline,
                sf.path,
                cfg.zzuf_log_file,
                cfg.copymode,
                r.min,
                r.max,
                cfg.progtimeout,
                quiet_flag,
            )
            saw_crash = zzuf.go()

            if not saw_crash:
                # we must have made it through this chunk without a crash
                # so go to next chunk
                try_count = sr.s1_s2_delta()
                sf.record_tries(tries=try_count)
                r.record_tries(tries=try_count)

                # emit a log entry
                crashcount = z.get_crashcount(cfg.crashers_dir)
                rate = get_rate(sr.s1)
                seed_str = "seeds=%d-%d" % (sr.s1, sr.s2)
                range_str = "range=%.6f-%.6f" % (r.min, r.max)
                rate_str = "Rate=(%.2f/s %.1f/m %.0f/h %.0f/d)" % (
                    rate, rate * 60, rate * 3600, rate * 86400)
                expected_density = seedfile_set.expected_crash_density
                xd_str = "expected=%.9f" % expected_density
                xr_str = 'expected_rate=%.6f uniq/day' % (expected_density *
                                                          rate * 86400)
                logger.info('Fuzzing %s %s %s %s %s %s crash_count=%d',
                            sf.path, seed_str, range_str, rate_str, xd_str,
                            xr_str, crashcount)

                # set s1 to s2 so that as soon as we continue we'll break out of the sr.in_range() loop
                sr.set_s1_to_s2()
                continue

            # we must have seen a crash

            # get the results
            zzuf_log = ZzufLog(cfg.zzuf_log_file,
                               cfg.zzuf_log_out(sf.output_dir))

            # Don't generate cases for killed process or out-of-memory
            # In the default mode, zzuf will report a signal. In copy (and exit code) mode, zzuf will
            # report the exit code in its output log.  The exit code is 128 + the signal number.
            crash_status = zzuf_log.crash_logged(cfg.copymode)
            sr.bookmark_s1()
            sr.s1 = zzuf_log.seed

            # record the fact that we've made it this far
            try_count = sr.s1_delta()
            sf.record_tries(tries=try_count)
            r.record_tries(tries=try_count)

            new_uniq_crash = False
            if crash_status:
                logger.info('Generating testcase for %s', zzuf_log.line)
                # a true crash
                zzuf_range = zzuf_log.range
                # create the temp dir for the results
                cfg.create_tmpdir()
                outfile = cfg.get_testcase_outfile(sf.path, sr.s1)
                logger.debug('Output file is %s', outfile)
                testcase = zzuf.generate_test_case(sf.path, sr.s1, zzuf_range,
                                                   outfile)

                # Do internal verification using GDB / Valgrind / Stderr
                fuzzedfile = file_handlers.basicfile.BasicFile(outfile)

                with BffCrash(cfg, sf, fuzzedfile, cfg.program,
                              cfg.debugger_timeout, cfg.killprocname,
                              cfg.backtracelevels, cfg.crashers_dir, sr.s1,
                              r) as c:
                    if c.is_crash:
                        new_uniq_crash = verify_crasher(
                            c, hashes, cfg, seedfile_set)

                    # record the zzuf log line for this crash
                    if not c.logger:
                        c.get_logger()
                    c.logger.debug("zzuflog: %s", zzuf_log.line)
                    c.logger.info('Command: %s', testcase.cmdline)

                cfg.clean_tmpdir()

            sr.increment_seed()

            # cache objects in case of reboot
            cache_state(cfg.campaign_id, 'seedrange', sr,
                        cfg.cached_seedrange_file)
            pickled_seedfile_file = os.path.join(cfg.cached_objects_dir,
                                                 sf.pkl_file())
            cache_state(cfg.campaign_id, sf.cache_key(), sf,
                        pickled_seedfile_file)
            cache_state(cfg.campaign_id, 'seedfile_set', seedfile_set,
                        cfg.cached_seedfile_set)

            if new_uniq_crash:
                # we had a hit, so break the inner while() loop
                # so we can pick a new range. This is to avoid
                # having a crash-rich range run away with the
                # probability before other ranges have been tried
                break
Пример #10
0
class Test(unittest.TestCase):
    def setUp(self):
        campaign_id = 'testcampaign'

        self.origindir = tempfile.mkdtemp()
        self.localdir = tempfile.mkdtemp()
        self.outputdir = tempfile.mkdtemp()

        # create some files
        self.file_count = 5
        self.files = []
        for i in range(self.file_count):
            (fd, f) = tempfile.mkstemp(dir=self.origindir)

            os.write(fd, 'abacab%d' % i)
            os.close(fd)
            self.files.append(f)

        # create a set
        self.sfs = SeedfileSet(campaign_id, self.origindir, self.localdir,
                               self.outputdir)

    def tearDown(self):
        for f in self.files:
            os.remove(f)
            self.assertFalse(os.path.exists(f))
        for d in (self.origindir, self.localdir, self.outputdir):
            shutil.rmtree(d)
            self.assertFalse(os.path.exists(d))


#    def test_pickle(self):
#        import pickle
#        self.assertTrue(hasattr(self.sfs, 'things'))
#        # no files added yet
#        self.assertEqual(0, len(self.sfs.things))
#        # add the files
#        self.sfs._setup()
#        # confirm that the files are there
#        self.assertEqual(self.file_count, len(self.sfs.things))
#        unpickled = pickle.loads(pickle.dumps(self.sfs))
#
#        self.assertTrue(hasattr(unpickled, 'things'))
#        self.assertEqual(self.file_count, len(unpickled.things))

    def test_set_directories(self):
        self.assertEqual(self.sfs.originpath, self.origindir)
        self.assertEqual(self.sfs.localpath, self.localdir)
        self.assertEqual(self.sfs.outputpath, self.outputdir)
        self.assertEqual(None, self.sfs.origindir)
        self.assertEqual(None, self.sfs.localdir)
        self.assertEqual(None, self.sfs.outputdir)

        self.sfs._set_directories()

        self.assertEqual(Directory, self.sfs.origindir.__class__)
        self.assertEqual(Directory, self.sfs.localdir.__class__)
        self.assertEqual(Directory, self.sfs.outputdir.__class__)

        # make sure the file(s) we created in setUp are in origindir
        self.assertEqual(self.file_count, len(self.sfs.origindir.files))

    def test_copy_files_to_localdir(self):
        # mock the things
        self.sfs.origindir = [1, 2, 3, 4, 5]
        copied = []
        self.sfs.copy_file_from_origin = lambda x: copied.append(x)
        # do the test
        self.sfs._copy_files_to_localdir()
        self.assertEqual(self.sfs.origindir, copied)

    def test_copy_file_from_origin(self):
        pass

    def test_add_local_files_to_set(self):
        pass

    def test_add_file(self):
        self.assertNotEqual(0, len(self.files))
        self.assertEqual(0, len(self.sfs.things))
        self.sfs.add_file(*self.files)
        self.assertEqual(5, len(self.sfs.things))
        for thing in self.sfs.things.itervalues():
            self.assertEqual(SeedFile, thing.__class__)

    def test_init(self):
        self.assertEqual(self.outputdir, self.sfs.seedfile_output_base_dir)
        self.assertEqual(0, len(self.sfs.things))

    def _same_dict(self, d1, d2):
        for k, v in d1.iteritems():
            #            print k
            self.assertTrue(k in d2)
            self.assertEqual(v, d2[k])
class Test(unittest.TestCase):

    def setUp(self):
        campaign_id = 'testcampaign'

        self.origindir = tempfile.mkdtemp()
        self.localdir = tempfile.mkdtemp()
        self.outputdir = tempfile.mkdtemp()

        # create some files
        self.file_count = 5
        self.files = []
        for i in range(self.file_count):
            (fd, f) = tempfile.mkstemp(dir=self.origindir)

            os.write(fd, 'abacab%d' % i)
            os.close(fd)
            self.files.append(f)

        # create a set
        self.sfs = SeedfileSet(
            campaign_id, self.origindir, self.localdir, self.outputdir)

    def tearDown(self):
        for f in self.files:
            os.remove(f)
            self.assertFalse(os.path.exists(f))
        for d in (self.origindir, self.localdir, self.outputdir):
            shutil.rmtree(d)
            self.assertFalse(os.path.exists(d))

#    def test_pickle(self):
#        import pickle
#        self.assertTrue(hasattr(self.sfs, 'things'))
#        # no files added yet
#        self.assertEqual(0, len(self.sfs.things))
#        # add the files
#        self.sfs._setup()
#        # confirm that the files are there
#        self.assertEqual(self.file_count, len(self.sfs.things))
#        unpickled = pickle.loads(pickle.dumps(self.sfs))
#
#        self.assertTrue(hasattr(unpickled, 'things'))
#        self.assertEqual(self.file_count, len(unpickled.things))

    def test_set_directories(self):
        self.assertEqual(self.sfs.originpath, self.origindir)
        self.assertEqual(self.sfs.localpath, self.localdir)
        self.assertEqual(self.sfs.outputpath, self.outputdir)
        self.assertEqual(None, self.sfs.origindir)
        self.assertEqual(None, self.sfs.localdir)
        self.assertEqual(None, self.sfs.outputdir)

        self.sfs._set_directories()

        self.assertEqual(Directory, self.sfs.origindir.__class__)
        self.assertEqual(Directory, self.sfs.localdir.__class__)
        self.assertEqual(Directory, self.sfs.outputdir.__class__)

        # make sure the file(s) we created in setUp are in origindir
        self.assertEqual(self.file_count, len(self.sfs.origindir.files))

    def test_copy_files_to_localdir(self):
        # mock the things
        self.sfs.origindir = [1, 2, 3, 4, 5]
        copied = []
        self.sfs.copy_file_from_origin = lambda x: copied.append(x)
        # do the test
        self.sfs._copy_files_to_localdir()
        self.assertEqual(self.sfs.origindir, copied)

    def test_copy_file_from_origin(self):
        pass

    def test_add_local_files_to_set(self):
        pass

    def test_add_file(self):
        self.assertNotEqual(0, len(self.files))
        self.assertEqual(0, len(self.sfs.things))
        self.sfs.add_file(*self.files)
        self.assertEqual(5, len(self.sfs.things))
        for thing in self.sfs.things.itervalues():
            self.assertEqual(SeedFile, thing.__class__)

    def test_init(self):
        self.assertEqual(self.outputdir, self.sfs.seedfile_output_base_dir)
        self.assertEqual(0, len(self.sfs.things))

    def _same_dict(self, d1, d2):
        for k, v in d1.iteritems():
            #            print k
            self.assertTrue(k in d2)
            self.assertEqual(v, d2[k])