def main():
    import logutil
    logger = logutil.get_logger(__name__)

    try:
        import configutil
        configutil.initialize('env_sample_work_flow.cfg')
        import to_do_queue
        to_do_queue.initiliaze()
        
        TaskAll = {
            'tasks' : [
                {"cmd":"ls"},
                {"cmd":"whereis python"},
                {"cmd":"whereis vim"}
            ]
        }
        
        result_queue_name, result_num = to_do_queue.add_TaskAll(TaskAll)
        to_do_queue.watch_result(result_queue_name, result_num)

        import sqs
        sqs.clean_all()
        
    except (SystemExit, KeyboardInterrupt):
        pass
    except Exception:
        logger.exception("launch service failed")
        sys.exit(1)
Ejemplo n.º 2
0
 def __init__(self, cfg_parser):
     self.cfg_parser = cfg_parser
     self.logfile = None
     self.logger = get_logger("big-code",
                              self.log_dir(),
                              self.log_file(),
                              log_level=self.log_level(),
                              log_to_console=True)
def main():
    import logutil
    logger = logutil.get_logger(__name__)

    try:
        import configutil
        configutil.initialize('env_sample_work_flow.cfg')
        
        from gevent import monkey
        monkey.patch_all()

        import worker
        worker.get_task_work()
    except (SystemExit, KeyboardInterrupt):
        pass
    except Exception:
        logger.exception("launch service failed")
        sys.exit(1)
Ejemplo n.º 4
0
import numpy as np
import yaml
from collections import defaultdict
import pydot
import json
import os, sys
import config, logutil

logger = logutil.get_logger('sensors')

class Encoder(object):
    def __init__(self, sub_encoder, transform=None, flip=False):
        self.transform = transform
        self.flip = flip
        self.encoder = sub_encoder

    def encode(self, values):
        """Encode transforms a sensor vector into a single probability"""

        # make sure this is a one-d numpy array
        values = np.array(values).ravel()

        if self.transform==None:
            # univariate case
            p = self.encoder.prob(values[0])
        else:
            # multivariate transform
            p = self.encoder.prob(self.transform.transform(values))


        if self.flip:
Ejemplo n.º 5
0
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#

import collections
import weakref
import types
import sys
import logging
import json
import logutil

logger = logutil.get_logger('fysom')

__author__ = 'Mansour Behabadi'
__copyright__ = 'Copyright 2011, Mansour Behabadi and Jake Gordon'
__credits__ = ['Mansour Behabadi', 'Jake Gordon']
__license__ = 'MIT'
__version__ = '${version}'
__maintainer__ = 'Mansour Behabadi'
__email__ = '*****@*****.**'


WILDCARD = '*'
SAME_DST = '='


class FysomError(Exception):
Ejemplo n.º 6
0
def get_commits_data(cfg, limits):

    repo = cfg.repo
    df = pd.DataFrame(columns=cfg.columns(), dtype=str)
    commits = list(cfg.repo.iter_commits())
    [start, end] = limits
    num_commits = end - start

    worker_id = int(start / num_commits)

    logger = get_logger("big-code-" + str(worker_id),
                        cfg.log_dir(),
                        "big_code_" + str(worker_id) + ".log",
                        log_level=cfg.log_level(),
                        log_to_console=True)

    count = 0

    for i in range(limits[0], limits[1]):

        D = {}
        D['project_name'] = cfg.proj['project_name']
        D['commit_msg'] = commits[i].summary
        D['commit_id'] = commits[i].hexsha

        logger.info("Processing [" + str(count) + "/" + str(num_commits) +
                    "]: " + D['commit_id'])

        try:
            diff = repo.git.diff(commits[i].hexsha, commits[i].hexsha + '^')
            patch_set = PatchSet(diff)
        except:
            diff, patch_set = None, []
            cfg.logger.info("Failed to get diff :" + commits[i].hexsha)
            pass

        if len(patch_set) <= cfg.max_files_changed() and diff:
            for p in patch_set:
                if p.is_modified_file:
                    if len(p) <= cfg.max_hunks_changed():
                        try:
                            file_type = os.path.basename(p.path).split('.')[1]
                        except:
                            file_type = None
                        if file_type == 'java':

                            try:
                                source_file = re.sub('^a\/', '', p.source_file)
                                target_file = re.sub('^b\/', '', p.target_file)

                                D['file_name'] = source_file
                                curr_code = repo.git.show('{}:{}'.format(
                                    commits[i].hexsha, source_file))
                                prev_code = repo.git.show('{}:{}'.format(
                                    commits[i].hexsha + '^', target_file))

                                prev_file_name = get_filename(
                                    p.source_file, D, 'prev')
                                curr_file_name = get_filename(
                                    p.target_file, D, 'curr')

                                write_source_files(prev_code, curr_code,
                                                   prev_file_name,
                                                   curr_file_name, cfg)
                            except:
                                prev_code, curr_code, prev_file_name, curr_file_name = None, None, None, None
                                cfg.logger.info(
                                    "Failed to get prve & curr code")
                                pass

                            try:
                                cfg.logger.info("getting ast for: " +
                                                prev_file_name)
                                prev_ast = Tree(
                                    cfg.project_scratch_dir() + os.sep +
                                    prev_file_name, cfg)
                                curr_ast = Tree(
                                    cfg.project_scratch_dir() + os.sep +
                                    curr_file_name, cfg)
                            except:
                                prev_ast, curr_ast = None, None
                                cfg.logger.info("Failed to get ast tree !")
                                pass

                            if prev_ast and curr_ast:

                                write_ast_files(prev_ast, curr_ast,
                                                prev_file_name, curr_file_name,
                                                cfg)

                                prev_data = AstDiff(prev_code, prev_ast)
                                curr_data = AstDiff(curr_code, curr_ast)

                                for h in p:

                                    if np.max([
                                            h.source_length, h.target_length
                                    ]) < cfg.max_hunk_size():
                                        prev_raw, prev_ast = prev_data.get_ast_seq(
                                            h.target_start,
                                            h.target_start + h.target_length)
                                        curr_raw, curr_ast = curr_data.get_ast_seq(
                                            h.source_start,
                                            h.source_start + h.source_length)

                                        data = [
                                            cfg.proj['project_name'],
                                            commits[i].hexsha,
                                            commits[i].summary, source_file,
                                            prev_raw, curr_raw, prev_ast,
                                            curr_ast, h.target_start,
                                            h.target_length, h.source_start,
                                            h.source_length
                                        ]

                                        df.loc[count] = data
                                        count = count + 1

    #cfg.logger.info("[commit_data] for thread " + str(w) + " number of rows in the data frame :" + str(count))
    return df
Ejemplo n.º 7
0
#
# url_tools.py
# A module containing tools for working with URLs.
#

# stdlib imports
from __future__ import with_statement
import os
from urlparse import urlparse

# project imports
from logutil import get_logger, LoggingSection
from types import Singleton

#region Globals
logger = get_logger(LoggingSection.UTILITIES, __name__)
#endregion


class UrlUtility:
    """A class for parsing and interacting with URLs."""
    __metaclass__ = Singleton

    tlds = None

    @staticmethod
    def _populate_tlds():
        with open(
                os.path.join(os.path.dirname(os.path.abspath(__file__)),
                             'resources/effective_tld_names.dat.txt')) as f:
            UrlUtility.tlds = [
Ejemplo n.º 8
0
import yaml
import fysom
import pydot
import os, sys, json
import config, logutil

# todo: mark events as "outgoing" to allow them to be captured and sent on

logger = logutil.get_logger('FSM')


class FSM(object):
    def __init__(self, name, spec):
        """
        Create an FSM from a collection of YAML specs
        """
        self.event_stack = []
        self.name = name
        initial = spec["initial"]
        events = spec["events"]
        event_list = []
        for name, event_spec in events.iteritems():
            ev = dict(event_spec)
            ev["name"] = name
            event_list.append(ev)
        fysom_spec = {'initial': initial, 'events': event_list}
        self.fsm = fysom.Fysom(fysom_spec, trace=True)

        logger.info(json.dumps({'type': 'fsm_created', 'name': self.name}))

        # attach event handlers
Ejemplo n.º 9
0
 def __init__(self, cfg_parser, logger=None):
     self.cfg_parser = cfg_parser
     self.logger = get_logger(self.log_name(),\
          self.log_dir(), self.log_file(), self.log_level(),\
          self.log_to_console())
Ejemplo n.º 10
0
#
# url_tools.py
# A module containing tools for working with URLs.
#

# stdlib imports
from __future__ import with_statement
import os
from urlparse import urlparse

# project imports
from logutil import get_logger, LoggingSection
from types import Singleton

#region Globals
logger = get_logger(LoggingSection.UTILITIES, __name__)
#endregion


class UrlUtility:
    """A class for parsing and interacting with URLs."""
    __metaclass__ = Singleton

    tlds = None

    @staticmethod
    def _populate_tlds():
        with open(
                os.path.join(os.path.dirname(os.path.abspath(__file__)), 'resources/effective_tld_names.dat.txt')) as f:
            UrlUtility.tlds = [line.strip().decode('utf-8') for line in f if line[0] not in "/\n"]
Ejemplo n.º 11
0
import discord
from discord.ext.commands import Bot
from os import environ
import sys
import logging
import logging.config
import logutil
import commands.reddit

log_level = eval('logging.%s' % (environ.get('LOG_LEVEL') or 'INFO'))
logging.basicConfig(level=log_level, stream=sys.stdout)
logger = logutil.get_logger(__name__, 'log.txt')

try:
    bot_token = environ['DISCORD_BOT_TOKEN']
except KeyError:
    logger.critical('DISCORD_BOT_TOKEN environment variable not set.')
    quit()

bot = Bot(command_prefix='!')


@bot.event
async def on_read():
    logger.info('Client logged in!')


@bot.command()
async def hello(*args):
    return await bot.say('Hello world!')
Ejemplo n.º 12
0
 def __init__(self):
     super().__init__()
     self.enclosing_session = None
     self.client_adr = None
     self.log = logutil.get_logger(self)
Ejemplo n.º 13
0
 def __init__(self):
     self.session_dict = dict()
     self.log = logutil.get_logger(self)
Ejemplo n.º 14
0
 def __init__(self, session_id):
     self.session_id = session_id
     self.first_connect = None
     self.giscon = None
     self.appcon = None
     self.log = logutil.get_logger(self)
Ejemplo n.º 15
0
import yaml
from libpgm.graphskeleton import GraphSkeleton
from libpgm.orderedskeleton import OrderedSkeleton
from libpgm.nodedata import NodeData
from libpgm.discretebayesiannetwork import DiscreteBayesianNetwork
from libpgm.tablecpdfactorization import TableCPDFactorization
from collections import defaultdict
import pydot
import pprint
import numpy as np
import os, sys, json
import config, logutil

logger = logutil.get_logger('BAYES')


def normalise_name(n):
    if n.startswith('~'):
        return n[1:]
    return n


def is_negated(n):
    return n.startswith('~')


def parse_truth_table(truth_table, parents):
    cond_table = {}
    for row, prob in truth_table.iteritems():
        cond_name = []
        for i, p in enumerate(parents):
Ejemplo n.º 16
0
URL_CONTEXT # eg. URL_CONTEXT = 'http://123.207.8.234:8887'
# remove me if neccessary
if not URL_CONTEXT:
    print('you need set URL_CONTEXT variable before use this module')
    return

REQUEST_HEADER = {
    'Host': '124.207.8.234:8887',
    'Accept':' text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
    'User-Agent': 'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/31.0.1650.57 Safari/537.36',
    'Content-Type': 'application/x-www-form-urlencoded',
    'Accept-Language': 'zh-CN,zh;q=0.8,en;q=0.6'
}
# 北京, 东城区
LOCATION_DATA = 'cnwsignoutVO.province=%B1%B1%BE%A9%CA%D0&cnwsignoutVO.city=%CA%D0%CF%BD%C7%F8&cnwsignoutVO.county=%B6%AB%B3%C7%C7%F8'
log = logutil.get_logger()

class LoginFailureException(Exception):pass

class AutoCheckin:
    def __init__(self):
        self._set_up_cookiejar()

    def _get_header_meta(self):
        return REQUEST_HEADER.copy()

    def _get_json_header(self):
        header = self._get_header_meta()
        header['x-requested-with'] = 'XMLHttpRequest'
        header['Accept'] = 'application/json, text/javascript, */*'
        return header
import uuid
import json
import sqs
import functools
from configutil import get_config
from logutil import get_logger

_config = get_config()
_logger = get_logger(__name__)

_queue_name = _config.workflow_engine.todo_queue_name


def initiliaze():
    add_todo_queue()

def add_todo_queue():
    global _queue_name
    sqs.add_queue(_queue_name)

def add_TaskAll(TaskAll):
    result_queue_name = str(uuid.uuid4())
    sqs.add_queue(result_queue_name, timeout=_config.workflow_engine.visibility_timeout_for_result_queue)
    tasks = TaskAll.get('tasks', [])
    for task_id, task in enumerate(tasks, start=1) :
        task['task_id'] = task_id
        task['result_queue_name'] = result_queue_name
        push_sub_task(task)

    return result_queue_name, len(tasks)
Ejemplo n.º 18
0
import yaml
import fsm
import bayes_net
import sensor_encoder
import os, sys, json
import pydot
import config, logutil

logger = logutil.get_logger('shared')

class SharedControl(object):

    def __init__(self, model_dir):        
        self.fsms = fsm.load_fsms(os.path.join(model_dir, "fsms.yaml"))
        self.bayes_net = bayes_net.load_bayes_net(os.path.join(model_dir, "bayes_net.yaml"))
        self.sensor_encoder = sensor_encoder.load_sensor_encoder(os.path.join(model_dir, "encoder.yaml"))
        
    def update(self, sensor_dict):
        """
        Takes a dictionary of sensor_name:sensor_value mappings.
        Returns a list of strings, representing all output events fired.
        """
        # encode sensor values
        # get a node name->probability mapping
        sensor_probs = self.sensor_encoder.encode(sensor_dict)      
        logger.info(json.dumps({'type': 'sensor_update', 'value': sensor_probs}))
        
        fsm_evidence = {}
        
        # infer bayes net output variables
        events = self.bayes_net.infer(sensor_probs, fsm_evidence)