def setUp(self):

        self.dbindex = cfg.getPath("archiveindex")
        self.assertEqual(
            self.dbindex, "testdata/archive/index"
        )  # check to prevent deleting things in wrong dir due to config goof
        shutil.rmtree(self.dbindex, True)  # start with empty index

        self.dbdoc = cfg.getPath("archive")
        self.assertEqual(self.dbdoc, "testdata/archive")

        self.logdir = cfg.getPath("logs")
        self.assertEqual(self.logdir, "testlogs")

        id = docarchive.idCounter.getNewId()
        self.assert_(int(id) < 999000, id)  # don't expect this to happen for test data; just double check.

        docarchive.idCounter._endId = 999000  # prepared docarc to start output from id #999000

        self._cleanup()
    def setUp(self):

        messagelog.mlog = messagelog.MsgLogger()

        # prepare some configuration for this test
        self.interval0 = cfg.get("indexing", "interval")
        self.numDoc0 = cfg.get("indexing", "numDoc")
        self.max_interval0 = cfg.get("indexing", "max_interval")
        cfg.set("indexing", "interval", "3")
        cfg.set("indexing", "numDoc", "5")
        cfg.set("indexing", "max_interval", "360")

        # make dummy queued msg 0.tmp
        self.logdir = cfg.getPath("logs")
        self.path = os.path.join(self.logdir, "0.tmp")
        file(self.path, "wb").close()

        dt0 = datetime.datetime(2000, 1, 1, 10, 00, 0)
        mtime = time.mktime(dt0.timetuple())  # first queued: 2000-1-1 10:00 localtime
        os.utime(self.path, (mtime, mtime))
"""
"""

import os, os.path
import traceback
import unittest

from config_help import cfg
from minds import messagelog
from minds import proxyhandler
from minds.util.multiblockfile import MbReader

testdir = os.path.join(cfg.getPath('testDoc'),'.')[:-1]


class TestProxyHandler(unittest.TestCase):

    # note that we uses proxyhandler.testHandleMlog() to run through most
    # of ProxyHandler. However network element like select is not exercised.


    def setUp(self):
        from minds import proxy
        proxy.init('')                              # use test config
        self.cleanup()


    def cleanup(self):
        # remove existing log files
        # hardcode the 'testlogs' directory. Avoid config goof and deleting real data.
        files = filter(messagelog.mlog.log_pattern.match, os.listdir('testlogs'))
 def setUp(self):
     self.dbindex = cfg.getPath('archiveindex')
     self.cleanup()
import traceback
import unittest

import PyLucene

from config_help import cfg
from minds import messagelog
from minds import qmsg_processor
from minds import distillML
from minds import docarchive
from minds import lucene_logic
from minds.util import fileutil
from minds.util import patterns_tester


testdir = os.path.join(cfg.getPath("testDoc"), ".")[:-1]


def _makeMeta(uri, date, etag, last_modified):
    """ Utility to build meta """

    ts = qmsg_processor._parseTimestamp(date)  # do a roundtrip to verify the format
    date = qmsg_processor._formatTimestamp(ts)

    meta = {}
    if uri:
        meta["uri"] = uri
    if date:
        meta["date"] = date
    if etag:
        meta["etag"] = etag
 def setUp(self):
     self.pathname = os.path.join(cfg.getPath('logs'), self.FILE1)
     self.cleanup()
 def setUp(self):
     self.apath = cfg.getPath('archive')
     self.cleanup()
     cfg.setupPaths()
 def setUp(self):
     self.dbindex = cfg.getPath('archiveindex')
     self.apath = cfg.getPath('archive')
     self._cleanup()
     cfg.setupPaths()
     self.populateTestDocs()