Ejemplo n.º 1
0
 def run(self):
     logger.config_logging(
         logger_name="worker",
         file_name=("didgen_concurrent_write_test_%s" % self.process_id +
                    '.log'),
         log_level="DEBUG",
         dir_name="logs",
         day_rotate=False,
         when="D",
         interval=1,
         max_size=20,
         backup_count=5,
         console=True)
     LLOG = logging.getLogger("worker")
     LLOG.propagate = False
     success_processed = 0
     total_processed = 0
     start_id = 10000000000000000000L
     end_id = 0
     r = redis.StrictRedis(host='localhost', port=6389, db=0)
     n = 0
     while True:
         if self.tasks.empty() == False:
             task = self.tasks.get()
             if task != "mission complete":
                 total_processed += 1
                 try:
                     task_id = task[0]
                     gid = r.get("id_test")
                     if gid.isdigit() and int(gid) == 0:
                         LLOG.error("Invalid Id: %s", gid)
                     elif not gid.isdigit():
                         LLOG.error("Invalid Id: %s", gid)
                     if int(gid) < start_id:
                         start_id = int(gid)
                     if int(gid) > end_id:
                         end_id = int(gid)
                     n += 1
                     if n % 5000 == 0:
                         LLOG.debug("Worker %04d: %020d", self.process_id,
                                    int(gid))
                     # LLOG.debug("Worker %04d: %020d", self.process_id, int(gid))
                     success_processed += 1
                 except Exception, e:
                     LLOG.exception(e)
             else:
                 break
         else:
             time.sleep(0.0001)
Ejemplo n.º 2
0
from __future__ import absolute_import

import logging

import constants
import settings
from logger import config_logging
from vendored.lirc import Lirc

import pubnub
from pubnub.callbacks import SubscribeCallback
from pubnub.enums import PNStatusCategory
from pubnub.pnconfiguration import PNConfiguration
from pubnub.pubnub import PubNub

logger = config_logging(settings.LOG_LEVEL, settings.LOG_FILE)
pubnub.set_stream_logger('pubnub', logging.WARNING)

pnconfig = PNConfiguration()
pnconfig.subscribe_key = settings.SUBSCRIBE_KEY
pnconfig.publish_key = settings.PUBLISH_KEY
pnconfig.ssl = True

pubnub_client = PubNub(pnconfig)

lirc = Lirc()


class KeyPressSubscribeCallback(SubscribeCallback):
    def presence(self, pubnub, presence):
        pass  # handle incoming presence data
Ejemplo n.º 3
0
    def run(self):
        date_time = strftime("%Y%m%d_%H%M%S", localtime())
        logger.config_logging(file_name = ("project_import_%s_" % self.process_id + '.log'),
                              log_level = CONFIG['log_level'],
                              dir_name = "logs",
                              day_rotate = False,
                              when = "D",
                              interval = 1,
                              max_size = 20,
                              backup_count = 5,
                              console = True)
        LOG.info("Start ProjectImportProcess(%s)", self.process_id)
        try:
            while True:
                try:
                    command, project = self.pipe_client.recv()
                    if command == "IMPORT":
                        LOG.debug("ProjectImportProcess import %s[%s] to Process(%s)", project.sha1, project.project_name, self.process_id)
                        # os.environ["GOPATH"] = "%s%s" % (os.environ["GOPATH"] + ":" if os.environ.has_key("GOPATH") else "", CONFIG["go_path"])
                        os.environ["GOPATH"] = project.go_path
                        LOG.info("GOPATH: %s", os.environ["GOPATH"])

                        data_path = os.path.join(CONFIG["data_path"], "projects", project.project_name)
                        flag = common_utils.make_callgraph_data(project.main_path, os.path.join(data_path, "data.callgraph"))
                        if flag is True:
                            LOG.debug("generate data.callgraph success")
                            db_path = os.path.join(data_path, "table_calling.db")
                            if os.path.exists(db_path) and os.path.isdir(db_path):
                                shutil.rmtree(db_path)
                                LOG.info("delete: %s", db_path)
                            elif os.path.exists(db_path) and os.path.isfile(db_path):
                                os.remove(db_path)
                                LOG.info("delete: %s", db_path)
                            db_path = os.path.join(data_path, "table_called.db")
                            if os.path.exists(db_path) and os.path.isdir(db_path):
                                shutil.rmtree(db_path)
                                LOG.info("delete: %s", db_path)
                            elif os.path.exists(db_path) and os.path.isfile(db_path):
                                os.remove(db_path)
                                LOG.info("delete: %s", db_path)
                            finder = Finder(data_path, called = True)
                            finder.build_finder()
                            projects = Projects()
                            ix = IX(projects = [v for v in projects.all().itervalues()])
                            LOG.debug("IX: %s", IX.IX_INDEXS)
                            flag = index_all_func(db = finder.db, ix = ix.get(project.project_name))
                            if flag is True:
                                finder = Finder(data_path, called = False)
                                finder.build_finder()
                                flag = index_all_func(db = finder.db, ix = ix.get(project.project_name))
                        else:
                            LOG.error("Create data.callgraph failed!")
                        if flag is True:
                            self.pipe_client.send((command, True))
                        else:
                            self.pipe_client.send((command, False))
                    elif command == "EXIT":
                        LOG.info("ProjectImportProcess(%s) exit by EXIT command!", self.process_id)
                        return
                except EOFError:
                    LOG.error("EOFError ProjectImportProcess(%s) Write Thread exit!", self.process_id)
                    return
                except Exception, e:
                    LOG.exception(e)
            LOG.info("Leveldb Process(%s) exit!", self.process_id)
Ejemplo n.º 4
0
import logger

LOG = logging.getLogger(__name__)

home = str(Path.home())


if __name__ == "__main__":
    workspace, input_data = Action.get_input()

    logs_directory = os.path.join(workspace, "logs")
    logger.config_logging(file_name = "first.log",
                          log_level = "DEBUG",
                          dir_name = logs_directory,
                          day_rotate = False,
                          when = "D",
                          interval = 1,
                          max_size = 20,
                          backup_count = 5,
                          console = False)
    LOG.debug("test start")
    LOG.debug("input_data: %s", input_data)

    data = {"messages": []}
    i = 0
    while True:
        now = datetime.datetime.now()
        message = "%s: hello world, tornado(%03d): %s" % (now, i, tornado.version)
        data["messages"].append(message)
        LOG.debug(message)
        time.sleep(1)
Ejemplo n.º 5
0
cwd = os.path.split(os.path.realpath(__file__))[0]
sys.path.insert(0, os.path.split(cwd)[0])

from utils import common_utils
from config import CONFIG
import logger

LOG = logging.getLogger(__name__)


if __name__ == "__main__":
    os.environ["GOPATH"] = "%s" % "/home/breeze/Develop/IDGO"
    logger.config_logging(file_name = "test.log",
                          log_level = CONFIG["log_level"],
                          dir_name = "logs",
                          day_rotate = False,
                          when = "D",
                          interval = 1,
                          max_size = 20,
                          backup_count = 5,
                          console = True)
    LOG.info("Test Start")
    t = time.time()
    # common_utils.make_callgraph_data()
    r = common_utils.get_definition_from_guru("/home/breeze/Develop/IDGO/src/github.com/flike/idgo/server/command.go", 19, 15)
    LOG.debug("get_definition_from_guru: %s", r)
    tt = time.time()
    LOG.info("Use Time: %ss", tt - t)
    LOG.info("Test Exit!")
Ejemplo n.º 6
0
from config import CONFIG
from utils.finder import Finder
from utils import common_utils
from utils.index_whoosh import IX, index_all_func
import logger

LOG = logging.getLogger(__name__)
INF = logging.getLogger("info")
INF.propagate = False

if __name__ == "__main__":
    logger.config_logging(file_name="Build.log",
                          log_level=CONFIG["log_level"],
                          dir_name="logs",
                          day_rotate=False,
                          when="D",
                          interval=1,
                          max_size=50,
                          backup_count=5,
                          console=True)
    logger.config_logging(logger_name="info",
                          file_name="Info.log",
                          log_level="NOSET",
                          dir_name="logs",
                          day_rotate=False,
                          when="D",
                          interval=1,
                          max_size=20,
                          backup_count=10,
                          console=True)
    LOG.info("Start Build")
Ejemplo n.º 7
0
                    break
            else:
                time.sleep(0.0001)
        LLOG.info("total processed: %s, total successed: %s", total_processed,
                  success_processed)
        self.tasks.put("mission complete")
        self.results.put(
            (total_processed, success_processed, start_id, end_id))


if __name__ == "__main__":
    logger.config_logging(file_name="didgen_concurrent_test.log",
                          log_level="DEBUG",
                          dir_name="logs",
                          day_rotate=False,
                          when="D",
                          interval=1,
                          max_size=20,
                          backup_count=5,
                          console=True)
    LOG.info("Start didgen_concurrent_test Script")

    test_num = 1000000 + 2000
    process_num = 4
    mission_queue = Queue(10000)
    result_queue = Queue(process_num)

    r = redis.StrictRedis(host='localhost', port=6389, db=0)
    if r.exists("id_test"):
        r.delete("id_test")
    r.set("id_test", 0)