Beispiel #1
0
    def __init__(self, config, auth, shared_storage):
        self.logger = logging.getLogger()

        self.config = config
        self.shared_storage = shared_storage

        self.symbol = self.config.symbol

        self.auth = auth

        self.subscribed = False
        self.orders_received = False

        self.events = {
            "order-received": self.process_new_received,
            "modify-received": self.process_amend_received,
            "cancel-received": self.process_cancel_received,
            "accepted": self.process_accept,
            "rejected": self.process_new_rejection,
            "modify-rejected": self.process_amend_rejection,
            "canceled": self.process_elim,
            "cancel-rejected": self.process_elim_reject,
            "filled": self.process_fill,
        }

        if self.symbol is None:
            self.config.symbol = None
        elif type(self.config.symbol) is not list:
            self.config.symbol = [self.config.symbol]
Beispiel #2
0
def setup_custom_logger(name):
    formatter = logging.Formatter(
        fmt='%(asctime)s %(levelname)-8s %(message)s',
        datefmt='%Y-%m-%d %H:%M:%S')
    logger = logging.getLogger(name)
    logger.setLevel(logging.DEBUG)
    return logger
Beispiel #3
0
    def __init__(self, cfg, exchange_adapter):
        self.logger = logging.getLogger()

        self._load_configuration(cfg)

        self.config = cfg
        self.exchange_adapter = exchange_adapter
        self.exchange_adapter.set_order_update_callback(self.on_market_update)
        self.orders_manager = orders_manager(self.exchange_adapter)

        if self.cancel_orders_on_start is True:
            self.exchange_adapter.cancel_orders_on_start = True
        else:
            self.exchange_adapter.cancel_orders_on_start = False

        self.update_orders = False

        self.started_time = time.time()
        self.last_amend_time = None
        self.reconnecting = False

        self.active = True
        self.tob = None
        self.num_of_sent_orders = 0
        self.cancel_all_request_was_sent = False

        self.user_asks = self.config.orders.asks
        self.user_bids = self.config.orders.bids
Beispiel #4
0
    def __init__(self, exchange_adapter):
        self.exchange_adapter = exchange_adapter

        try:
            self.exch_name = self.exchange_adapter.config.name
        except AttributeError:
            self.exch_name = ""

        self.orders = {}
        self.live_orders_ids = []
        self.orders_states = {}
        self.orderid_to_orderid_map = {}
        self.order_to_event = {}
        self.order_to_strategy_type = {}
        self.ids_to_fills = {}

        self.update_type_to_state = {
            new_order_ack: event.on_insert_ack,
            new_order_nack: event.on_insert_nack,
            order_elim_ack: event.on_cancel_ack,
            order_elim_nack: event.on_cancel_nack,
            order_fill_ack: event.on_fill,
            order_full_fill_ack: event.on_full_fill,
            amend_ack: event.on_amend_ack,
            amend_ack_on_partial: event.on_amend_partial_ack,
            amend_nack: event.on_amend_nack,
        }

        self.logger = logging.getLogger()
Beispiel #5
0
 def __init__(self):
     self.logger = logging.getLogger('db.py')
     self.con = MySQLdb.connect(config.get_db("host"),
                                config.get_db("user"),
                                config.get_db("pass"),
                                config.get_db("database"),
                                charset="utf8")
     self.cursor = self.con.cursor()
Beispiel #6
0
 def __init__(self):
     self.INSERT_PRICE = "insert into stock_price_history (`code`,`open` ,`close` ,`price` ,`high`,`low`,`buy_price` ,`sell_price`,`deal_number`,`deal_money` ,`buy_one_number`,`buy_one_price`,`buy_two_number`,`buy_two_price`,`buy_three_number`,`buy_three_price`,`buy_four_number`,`buy_four_price`,`buy_five_number` ,`buy_five_price` ,`sell_one_number` ,`sell_one_price` ,`sell_two_number`,`sell_two_price` ,`sell_three_number` ,`sell_three_price` ,`sell_four_number`,`sell_four_price` ,`sell_five_number` ,`sell_five_price`,`date`,`time`,`other`) values (%s) on duplicate key update `other`='00'"
     self.INSERT_TODAY_PRICE = "insert into stock_price_newest (`code`,`open` ,`close` ,`price` ,`high`,`low`,`buy_price` ,`sell_price`,`deal_number`,`deal_money` ,`buy_one_number`,`buy_one_price`,`buy_two_number`,`buy_two_price`,`buy_three_number`,`buy_three_price`,`buy_four_number`,`buy_four_price`,`buy_five_number` ,`buy_five_price` ,`sell_one_number` ,`sell_one_price` ,`sell_two_number`,`sell_two_price` ,`sell_three_number` ,`sell_three_price` ,`sell_four_number`,`sell_four_price` ,`sell_five_number` ,`sell_five_price`,`date`,`time`,`other`) values (%s) on duplicate key update `other`='00'"
     self.TRUNCATE_TODAY_PRICE = "truncate table stock_price_newest"
     self.UPDATE_NAME = "update stock set name='%s' where code='%s'"
     self.stock_url = "http://hq.sinajs.cn/list="
     self.logger = logging.getLogger('price.py')
     reload(sys)
     sys.setdefaultencoding('utf-8')
def addNetworkHandler(comm, boxinfo):
    # add in the beanstalk logger if applicable
    from logger import logging
    tlog = logging.getLogger("pipeline")

    network_handler = NetworkHandler(comm, boxinfo)
    network_handler.setLevel(logging.INFO)
    network_handler.setFormatter(log_formatter)
    tlog.addHandler(network_handler)
Beispiel #8
0
    def __init__(self, config, auth, ws, shared_storage):
        self.logger = logging.getLogger()

        self.config = config
        self.ws = ws
        self.shared_storage = shared_storage
        self.auth = auth

        self.headers = {'content-type': 'application/json'}
Beispiel #9
0
    def __init__(self):
        self.count = 0

        self.buildlocks = {}
        self.idlock = Lock()
        self.rsynclock = Lock()
        self.mod_buildlocks = Lock()

        self.logger = logging.getLogger('pipeline').getChild('locks')
    def __init__(self):
        self.msg_callback = None
        self.stop = False
        self.last_hb_time = time.time()
        self.cancel_orders_on_start = False

        self.started = False
        self.reconnecting = False
        self.ready_to_listen = asyncio.Event()

        self.logger = logging.getLogger()
Beispiel #11
0
def addNetworkHandler(app, name):
    """
    Create the network based log handler. It takes a celery app and a unique
    log-producer name (such as worker-<UUID>-0) and puts the message on a
    queue to gateway so that they are stored and displayed
    """
    # add in the celery logger if applicable
    from logger import logging
    tlog = logging.getLogger("pipeline")

    network_handler = NetworkHandler(app, name)
    network_handler.setLevel(logging.INFO)
    network_handler.setFormatter(log_formatter)
    tlog.addHandler(network_handler)
Beispiel #12
0
def check_paths():
  logger = logging.getLogger()

  if not path.exists(_CONF_DIR):
    mkdir(_CONF_DIR)
    logger.debug('Creata la cartella "%s".' % _CONF_DIR)

  if not path.exists(OUTBOX):
    mkdir(OUTBOX)
    logger.debug('Creata la cartella "%s".' % OUTBOX)

  if not path.exists(SENT):
    mkdir(SENT)
    logger.debug('Creata la cartella "%s".' % SENT)
Beispiel #13
0
def check_paths():
    logger = logging.getLogger()

    if not path.exists(_CONF_DIR):
        mkdir(_CONF_DIR)
        logger.debug('Creata la cartella "%s".' % _CONF_DIR)

    if not path.exists(OUTBOX):
        mkdir(OUTBOX)
        logger.debug('Creata la cartella "%s".' % OUTBOX)

    if not path.exists(SENT):
        mkdir(SENT)
        logger.debug('Creata la cartella "%s".' % SENT)
Beispiel #14
0
    def __init__(self, cfg):
        self.logger = logging.getLogger()

        self.is_active = True
        self.exchange_adapter = emx_adapter(cfg.adapter)

        try:
            strategy_name = cfg.strategy.name
        except AttributeError:
            self.logger.exception("strategy was not found")
            raise Exception("strategy was not found")

        try:
            self.strategy = strategies_factory[strategy_name](cfg.strategy, self.exchange_adapter)
        except KeyError:
            self.logger.exception("strategy was not found in a factory")
            raise Exception("strategy was not found in a factory")
# from threading import Thread, Lock
import sys
import time
import simplejson
import traceback

from config import *
import network
import rsync_tools
import compute
import database
import kimobjects
import kimapi

from logger import logging
logger = logging.getLogger("pipeline").getChild("pipeline")

PIPELINE_WAIT    = 1
PIPELINE_TIMEOUT = 60
PIPELINE_MSGSIZE = 2**20
PIPELINE_JOB_TIMEOUT = 3600*24
buildlock = Lock()
loglock = Lock()

def getboxinfo():
    os.system("cd /home/vagrant/openkim-pipeline; git log -n 1 | grep commit | sed s/commit\ // > /persistent/setuphash")

    info = {}
    things = ['sitename', 'username', 'boxtype',
            'ipaddr', 'vmversion', 'setuphash', 'uuid',
            'gitargs', 'gitbranch', 'githost']
Beispiel #16
0
import requests
import pprint
from os import environ
from os import system
import json
import time
import sys
from logger import logging

formatter = logging.Formatter(fmt='%(asctime)s %(levelname)-8s %(message)s',
                              datefmt='%Y-%m-%d %H:%M:%S')
logger = logging.getLogger('zookeeper-launch')
logger.setLevel(logging.DEBUG)
pp = pprint.PrettyPrinter(indent=4)

discovery_ep = environ.get('DISCOVERY_EP') + '/subscriptions/' + environ.get(
    'TENANT_ID')
logger.info("Discovery endpoint %s", discovery_ep)

headers = json.loads('{"Content-type": "application/json"}')
getUrl = discovery_ep + '/getpods'
response = requests.get(getUrl, headers=headers)
response.raise_for_status()
#pp.pprint(response.json())

role_name = environ.get('ROLE_NAME')
logger.info("Role %s", role_name)

lExpectedHosts = {}
for pod in response.json():
    if pod["DesiredStatus"] != 1:
Beispiel #17
0
#! /usr/bin/env python
""" Simple wrapper for udunits2-bin for converting arbitrary units to SI units """
VERSION = 0.3

import re
import subprocess
import warnings
warnings.simplefilter("ignore")

from logger import logging
logger = logging.getLogger("pipeline").getChild("kimunits")
logger.setLevel(logging.DEBUG)


class UnitConversion(Exception):
    """ Class for unit conversion errors """


_output_expression_default = re.compile(
    "You have: You want:     "
    "(?:(?P<value>(?:[-+]?(?:\d+(?:\.\d*)?|\.\d+)(?:[eE][-+]?\d+)?)) )?"
    "(?P<unit>\S+)", )

_output_expression_convert = re.compile(
    "You have: You want:     "
    ".*?= "  #with original stuff
    "(?:(?P<value>(?:[-+]?(?:\d+(?:\.\d*)?|\.\d+)(?:[eE][-+]?\d+)?)) )?"
    "(?P<unit>\S+)", )


def convert_udunits2(from_value,
Beispiel #18
0
    def __init__(self, exchange_name=None):
        self.ws = None
        self.session = None
        self.exchange_name = exchange_name

        self.logger = logging.getLogger()
Beispiel #19
0
import shutil
import subprocess
import dircache
import glob
import os
from packaging import version
from contextlib import contextmanager

import database
import kimapi
from template import template_environment

import config as cf
from config import __kim_api_version_spec__
from logger import logging
logger = logging.getLogger("pipeline").getChild("kimobjects")

#------------------------------------------------
# Base KIMObject
#------------------------------------------------

class KIMObject(object):
    """ The base KIMObject that all things inherit from

    Attributes:
        required_leader
            the required two letter leader for all kim codes, meant to be overridden
            by subclassers
        makeable
            marks the type of kimobject as makeable or not, to be overriden by subclassers
        path
Beispiel #20
0
from discord.ext import commands

from formatting.markdown import MD
from logger import logging

logger = logging.getLogger(f"robotina.{__name__}")


class CommandErrorHandler(commands.Cog):
    def __init__(self, bot):
        self.bot = bot
        logger.debug(f"{self.__class__.__name__} initialized")

    @commands.Cog.listener()
    async def on_command_error(self, ctx, error):
        await ctx.send(error)


class Status(commands.Cog):
    def __init__(self, bot):
        self.bot = bot
        logger.debug(f"{self.__class__.__name__} initialized")

    @commands.command(aliases=["stat", "st"])
    async def status(self, ctx):
        await ctx.send(f"latency: {self.bot.latency:0.3f}s")


class Greetings(commands.Cog):
    def __init__(self, bot):
        self.bot = bot
Beispiel #21
0
        one unit to another
    * asedata - the dictionary of reference data contained within ASE
"""
import os
import jinja2
import json
import clj
from functools import partial

from kimquery import query
from kimunits import convert
import database
import kimobjects
import config as cf
from logger import logging
logger = logging.getLogger("pipeline").getChild("template")

#-----------------------------------------
# New Template functions
#-----------------------------------------
def path(cand):
    obj = kimobjects.kim_obj(cand)
    try:
        p = obj.executable
    except AttributeError:
        p = obj.path

    logger.debug("thinks the path is %r",p)
    return p

def latestversion(kim):
Beispiel #22
0
from os import listdir

from threading import Thread, Event

from flask import Flask, render_template, request, redirect, send_file, jsonify
from flask_socketio import SocketIO

from util import Util
from vault import Vault

from yara_create import Rule, YaraFiles

from config import CAP_PATH, SESSION_CACHE_PATH, CARVED_DIR

from logger import logging, LOG_FILE, FORMATTER, TIMESTAMP, LOG_LEVEL
logger = logging.getLogger(__name__)
logger.setLevel(LOG_LEVEL)

formatter = logging.Formatter(FORMATTER, TIMESTAMP)

file_handler = logging.FileHandler(LOG_FILE)
file_handler.setFormatter(formatter)

logger.addHandler(file_handler)

app = Flask(__name__)
app.config["SECRET_KEY"] = "secret!"
app.config["DEBUG"] = False
app.config["SERVER_NAME"] = "127.0.0.1:8000"

socketio = SocketIO(app, async_mode=None)
"""
Simple set of tools for having rsync commands go through
"""

import config as cf
from logger import logging
logger = logging.getLogger("pipeline").getChild("rsync_tools")

import os
import subprocess
import tempfile
from database import parse_kim_code
from functools import partial

# --delete ensures that we delete files that aren't on remote
RSYNC_FLAGS  = "-vvrLhzREc --progress --stats -e "
RSYNC_FLAGS += "'ssh -i "+cf.GLOBAL_KEY+" -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no'"
RSYNC_FLAGS += " --exclude-from="+cf.RSYNC_EXCLUDE_FILE

RSYNC_ADDRESS = cf.RSYNC_USER+"@"+cf.RSYNC_HOST
RSYNC_PATH = RSYNC_ADDRESS+":"+cf.RSYNC_REMOTE_ROOT
RSYNC_LOG_FILE_FLAG = "--log-file={}/rsync.log".format(cf.KIM_LOG_DIR)
RSYNC_LOG_PIPE_FLAG = " >> {} 2>&1".format(cf.KIM_LOG_DIR+"/rsync_stdout.log")

if cf.PIPELINE_GATEWAY:
    READ_PENDING  = os.path.join(RSYNC_PATH, "/curators-to-pipeline-interface/pending/./")
    READ_APPROVED = os.path.join(RSYNC_PATH, "/curators-to-pipeline-interface/approved/./")
    if cf.PIPELINE_DEBUG:
        WRITE_RESULTS = os.path.join(RSYNC_PATH, "/pipeline/test-result-uploads-dbg/incoming/./")
    else:
        WRITE_RESULTS = os.path.join(RSYNC_PATH, "/pipeline/test-result-uploads/incoming/./")
Beispiel #24
0
 def setup_celery_logging(**kwargs):
     return logging.getLogger('pipeline')
Beispiel #25
0
import pymongo
import os
import re
import datetime

import util
import kimunits
import database
import config as cf
from logger import logging
logger = logging.getLogger('pipeline').getChild('mongodb')

client = pymongo.MongoClient(host=cf.MONGODB_HOSTNAME)
db = client[cf.MONGODB_DATABASE]

PATH_RESULT = cf.RSYNC_LOCAL_ROOT
PATH_APPROVED = cf.RSYNC_LOCAL_ROOT

def config_edn(flname):
    with open(flname) as f:
        doc = util.loadedn(f)
        doc.setdefault("created_on", str(datetime.datetime.fromtimestamp(os.path.getctime(flname))))
        return doc

def parse_kim_code(kim_code):
    RE_KIMID = r"(?:([_a-zA-Z][_a-zA-Z0-9]*?)__)?([A-Z]{2})_([0-9]{12})(?:_([0-9]{3}))?"
    return re.match(RE_KIMID,kim_code).groups()

def drop_tables(ask=True):
    if ask:
        check = raw_input("Are you sure? [y/n] ")
Beispiel #26
0
from copy import copy
from datetime import datetime
from game.board import Board
from game.trie import Trie
from logger import logging

logger = logging.getLogger("start_game")


class Game:
    def __init__(self, board, trie, min_len=0, allow_repetitions=True):
        self.min_len = min_len
        self.allow_repetitions = allow_repetitions
        self.board = board
        self.trie = trie
        self.found = {}

    def dfs(self, trie, cur, visited, words, word_i):
        cur_c = self.board.get_c(cur)

        trie = trie.get(cur_c)
        if not trie:
            return

        visited[cur] = True
        word_i.append(cur)

        if Trie.is_word(trie) and len(word_i) >= self.min_len:
            if self.allow_repetitions:
                words.append(word_i)
            else:
Beispiel #27
0
import glob
import os
import itertools
from packaging import version
from contextlib import contextmanager
from collections import defaultdict

import util
import database
import kimapi
from template import template_environment

import config as cf
from config import __kim_api_version_spec__
from logger import logging
logger = logging.getLogger("pipeline").getChild("kimobjects")

#------------------------------------------------
# Base KIMObject
#------------------------------------------------
class KIMObject(object):
    """ The base KIMObject that all things inherit from

    Attributes:
        required_leader
            the required two letter leader for all kim codes, meant to be overridden
            by subclassers
        makeable
            marks the type of kimobject as makeable or not, to be overriden by subclassers
        path
            the full path to the directory associated with the kim object
Beispiel #28
0
"""
Simple set of tools for having rsync commands go through
"""

import config as cf
import network
from logger import logging
logger = logging.getLogger("pipeline").getChild("rsync_tools")

import os
import sys
import subprocess
import tempfile
from database import parse_kim_code
from functools import partial

# # For Director and Worker
#
# - Expecting directory ~/.ssh/controlmasters to exist, will be created if missing.
#
# - ~/.ssh/config should contain these lines:
#
#      ControlMaster auto
#
#      Host gateway-host-for-rsync
#        ControlPath ~/.ssh/controlmasters/gateway-rsync
#        UserKnownHostsFile /dev/null
#        StrictHostKeyChecking no
#        HostName gateway.pipeline.openkim.org
#        User gateway-user1
#        IdentityFile ~/.ssh/ssh-key-for-gateway
import pymongo
import os
import yaml
import json
from logger import logging
logger = logging.getLogger('pipeline').getChild('connect')

client = pymongo.MongoClient()
db = client.database
objs = db.objs
results = db.results
errors = db.errors
verifications = db.verifications

Beispiel #30
0
# encoding: utf-8
"""
es_util.py

Created by Cuong Pham on 2012-09-06.
Copyright (c) 2012 ChonGiaDung.vn. All rights reserved.
"""

from logger import logging
from elasticsearch import helpers, Elasticsearch
import time
import threading
import json
from decorators import command

logging.getLogger('elasticsearch').setLevel(logging.WARNING)


class ESClient(object):
    def __init__(self, hosts, batchSize=1000, **kwargs):
        self.esConn = Elasticsearch(hosts, **kwargs)
        self.bulker = ListBulker()
        self.batchSize = batchSize
        self.ID_FIELD = "_id"

    def _isOk(self, response):
        return response.get('acknowledged', False)

    def createIndex(self,
                    indexName="test",
                    body=None,
Beispiel #31
0
communication between directors, workers, and webiste through
the beanstalk daemon.

Any of the classes below rely on a secure public key to open an ssh
tunnel to the remote host.  It then connects to the beanstalkd
across this tunnel.
"""
import os
import time
import json
from subprocess import Popen, check_call, CalledProcessError
from string import Template

import config as cf
from logger import logging
logger = logging.getLogger("pipeline").getChild("network")

#=============================================================================
# Functions for formulating messages to different parts of the system
#=============================================================================
def transform_priority(priority):
    priorities = {
        "immediate": 0, "very high": 0.01, "high": 0.1,
        "normal": 1, "low": 10, "very low": 100
    }

    if isinstance(priority, str):
        priority = int(1e6*priorities.get(priority, 1))
    return priority

def director_update_message(itemid, status='approved', force=False, priority='normal'):
Beispiel #32
0
import os

import discord
from discord.ext import commands
from dotenv import load_dotenv

from bot_commands import COGS
from logger import logging

logger = logging.getLogger("robotina.start")

load_dotenv()
TOKEN = os.getenv('DISCORD_TOKEN')

intents = discord.Intents(messages=True, guilds=True)
bot = commands.Bot(command_prefix='!', intents=intents)

for cog in COGS:
    bot.add_cog(cog(bot))


@bot.event
async def on_ready():
    logger.info("Robonita is ready")


bot.run(TOKEN)
Beispiel #33
0
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.

from arp import do_arping
from logger import logging
from threading import Thread
import ipcalc
import ping
import re
import socket

MAX = 128

logger = logging.getLogger()

class sendit(Thread):
  def __init__ (self, ip):
    Thread.__init__(self)
    self.ip = ip
    self.status = 0
    self.elapsed = 0

  def run(self):
    try:
      self.elapsed = ping.do_one("%s" % self.ip, 1)
      if (self.elapsed > 0):
        self.status = 1
    except Exception as e:
      logger.debug('Errore durante il ping dell\'host %s: %s' % (self.ip, e))
Beispiel #34
0
# ============================================================================
# GameMode Class
# ============================================================================

from logger import logging, log
import time
from AudioPlay import AudioPlay

battLog = logging.getLogger('BATTERY')
batt_handler = logging.FileHandler(
    '/home/pi/Documents/ovaom/logs/battery_monitor.log')
battLog.addHandler(batt_handler)

INACTIVE_THRESHOLD = 5  # in seconds


class GameMode(object):
    ''' GameMode Class: puzzleMode and jungleMode must inherit from this class '''

    instrument = [{
        'active': 0,
        'maxPreset': 1,
        'currentPreset': 0,
        'lastSeen': -1,
        'battery': 0,
    } for i in range(0, 4)]
    prev_offline_objects = []
    prevMonitoring = 0

    def __init__(self, net):
        self.net = net
If we are told that a new test result has come in:

    * Get all (te, mo) pairs that depend on this result
    * See if they need to be updated (are they latest?)
    * Send these updates through the original channels

Local dependency resolution
"""
import config as cf

import kimobjects
import kimquery
import kimapi

from logger import logging
logger = logging.getLogger("pipeline").getChild("dependencies")

# TODO -- implement blacklist in mongodb to not run some tests

def result_inqueue(test, model):
    query = {
        "database": "job", "query": {"test": str(test), "model": str(model)},
        "project": ["status"], "limit": 1
    }

    status = kimquery.query(query, decode=True)
    return len(status) > 0

def result_exists(test, model):
    query = {
        "project": ["test-result-id"], "database": "obj", "query": {
Beispiel #36
0
import sqlite3
import hashlib
import cPickle as pickle
from itertools import chain

import config as cf
import kimapi
import kimobjects
import database
import dependencies
import pipeline
import rsync_tools
import network

from logger import logging
logger = logging.getLogger("pipeline").getChild("director")

#==================================================================
# director class for the pipeline
#==================================================================
class Director(pipeline.Agent):
    """
    The Director object, knows to listen to incoming jobs, computes
    dependencies and passes them along to workers
    """
    def __init__(self, *args, **kwargs):
        super(Director, self).__init__(name="director", num=0, *args, **kwargs)
        self.logger.info("Initializing KIM Director")
        self.db = KIMObjectDB(cf.DIRECTOR_DB)

        # start by getting into sync with the shared repository and making
Beispiel #37
0
#! /usr/bin/env python
""" Simple wrapper for executable for converting arbitrary units to SI units """
VERSION = 0.3

import re
import math
import subprocess
import warnings
warnings.simplefilter("ignore")

from logger import logging
logger = logging.getLogger("pipeline").getChild("kimunits")
logger.setLevel(logging.DEBUG)

class UnitConversion(Exception):
    """ Class for unit conversion errors """

_units_output_expression = re.compile("(?P<value>(?:[-+]?(?:\d+(?:\.\d*)?|\.\d+)(?:[eE][-+]?\d+)?))(?: (?P<unit>.+))?")

def linear_fit(x, y):
    """
    Perform a linear fit between x,y, returning the average error for each data
    point as well. This is written this way so as to not add a numpy dependency
    """
    n = len(x)
    xx = sum(map(lambda x: x**2, x)) - sum(x)**2 / n
    yy = sum(map(lambda x: x**2, y)) - sum(y)**2 / n
    xy = sum(map(lambda x,y : x*y, x, y)) - sum(x) * sum(y) / n
    a, b = sum(y)/n - xy / xx * sum(x) / n, xy / xx
    yhat = map(lambda x: a + b*x, x)
    yerr = math.sqrt(sum(map(lambda y,yh: (y - yh)**2/y**2, y, yhat)) / n)
Beispiel #38
0
"""
Methods that deal with the KIM API directly.  Currently these are methods
that build the libraries and use the Python interface kimservice
to test if tests and models match.
"""
import os
import sys
from subprocess import check_output, check_call, CalledProcessError
from contextlib import contextmanager
import packaging.specifiers, packaging.version
from functools import partial

import config as cf
from config import __pipeline_version_spec__, __kim_api_version_spec__
from logger import logging
logger = logging.getLogger("pipeline").getChild("kimapi")

#======================================
# API build utilities
#======================================
MAKE_LOG = os.path.join(cf.LOG_DIR, "make.log")

@contextmanager
def in_dir(path):
    cwd = os.getcwd()
    os.chdir(path)
    try:
        yield
    except Exception as e:
        raise e
    finally:
Beispiel #39
0
import json
import os
from logger import logging

logger = logging.getLogger("trie")


class Trie:
    END_OF_WORD = "!"

    def __init__(self):
        self._trie = {}

    @property
    def trie(self):
        return self._trie

    def save_to_file(self, path="trie.json"):
        with open(path, 'w') as outfile:
            json.dump(self._trie, outfile)
        logger.info(f"save: location={path}, size={os.stat(path).st_size}")
        return path

    def load_from_file(self, path="trie.json"):
        with open(path) as json_file:
            self._trie = json.load(json_file)
        logger.info(f"load: location={path}, size={os.stat(path).st_size}")
        return path

    def generate_from_dict_file(self,
                                dict_path="dictionary.txt",
Beispiel #40
0
        - if there are results outstanding, do not submit the original pair
          (since the acceptance of the test result will trigger it again)

If we are told that a new test result has come in:

    * Get all (te, mo) pairs that depend on this result
    * See if they need to be updated (are they latest?)
    * Send these updates through the original channels

Local dependency resolution
"""
import kimobjects
import kimquery

from logger import logging
logger = logging.getLogger("pipeline").getChild("dependencies")


def result_inqueue(test, model):
    query = {
        "database": "job",
        "test": str(test),
        "model": str(model),
        "project": ["tube"],
        "limit": 1
    }

    tube = kimquery.query(query, decode=True)
    return len(tube) > 0

import pymongo
import os
import yaml
import json
from logger import logging
logger = logging.getLogger('pipeline').getChild('mongofill')
from processkimfiles import eatfile, configtojson

client = pymongo.MongoClient()
db = client.test_database

#tests = db.tests
#tests.insert(post)
REPO_DIR = os.path.abspath("/home/vagrant/openkim-repository/")

objs = db.objs
objs.drop()

results = db.results
results.drop()
logger.info("Filling with test results")
leaders = ('tr', 'vr')
for leader in leaders:
    for folder in os.listdir(os.path.join(REPO_DIR, leader)):
        logger.info("On %s ", folder)
        path = os.path.join(REPO_DIR, leader, folder)
        info = configtojson(os.path.join(path, 'kimspec.ini'))
        info['path'] = os.path.join(leader, folder)
        info['uuid'] = folder
        resultobj = objs.insert(info)
        with open(os.path.join(path, 'results.yaml')) as f:
Beispiel #42
0
import time
import subprocess
import threading
import shutil
import json
import traceback
from contextlib import contextmanager

import util
import kimunits
import kimquery
import kimobjects

import config as cf
from logger import logging
logger = logging.getLogger("pipeline").getChild("compute")

#================================================================
# a class to be able to timeout on a command
#================================================================
class Command(object):
    def __init__(self, cmd, stdin=None, stdout=None, stderr=None):
        """
        A class to provide time limits to sub processes. Accepts
        a command as an array (similar to check_output) and file
        handles with which to communicate on stdin, stdout, stderr
        """
        self.cmd = cmd
        self.process = None
        self.stdin = stdin
        self.stdout = stdout
Beispiel #43
0
import urllib
import urllib2
import itertools
from config import PipelineQueryError
import config as cf
import json
import os
import sys
import ssl

from logger import logging
logger = logging.getLogger("pipeline").getChild("kimquery")

import sys

def open_url(url, data, header):
    request  = urllib2.Request(url, data, header)

    if cf.PIPELINE_QUERY_USE_SSL:
        response = urllib2.urlopen(request)
    else:
        context = ssl._create_unverified_context()
        response = urllib2.urlopen(request, context=context)

    answer = response.read()
    response.close()
    return answer

def prepare_dns():
    resolv = "/etc/resolv.conf"
    dnsline = "nameserver 127.0.0.1"
Beispiel #44
0
'''

import socket
import time
import copy
import utility as util
# from digitalIO import DIO
from constants import Constant as const
from vergence_version import ValidateVergenceVersion
from global_parameters import Globals as globls
from data_collection import DataFileObj
from constants import Constant, ConfigLabels
from logger import logging
import numpy as np

log = logging.getLogger(__name__)


class ArbitratorServer:
    def __init__(self):
        # self.DIO_obj = DIO()

        # for programing printer port

        sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
        # sock.bind(('', globls.config_param['arbitrator_service']['local_port']))
        sock.bind(('100.1.1.3',
                   globls.config_param['arbitrator_service']['local_port']))

        sock_eye = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
        # sock_eye.bind(('', globls.config_param['arbitrator_service']['local_port_eye']))
Beispiel #45
0
import zipfile

from collections import OrderedDict

from HTMLParser import HTMLParser
from StringIO import StringIO
from urllib2 import urlopen, URLError, HTTPError

from logger import logging
from lxml import etree
from lxml.etree import tostring


from jinja2 import Environment, FileSystemLoader

log = logging.getLogger('development')


def create():
    env = Environment(loader=FileSystemLoader("."))
    try:
        configuration = env.get_template('WebGrab++.config.xml.template')
        sections = json.loads(open('template_variables.json').read())

        country_name = sections['country_name']
        if country_name is None:
            raise Exception(sections['no_country_config'])

        if ' ' in country_name:
            log.error("Please do the following"
                      "Download the zip file and place it on the channels "
Beispiel #46
0
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.

import ConfigParser
from logger import logging
from os import path
import paths

logger = logging.getLogger()

configerror = 00000  # codice di errore utilizzato in presenza errori di errorcoder.py


class Errorcoder:
    '''
    Nel file di configurazione di possono creare delle sezioni, al'interno
    delle quali avviene la ricerca come se fosse un dizionario si può associare
    una sezione a ciascun set di codici di errore utilizzato dai vari operatori.
    In alternativa ciascuna sezione può essere associata alla tipologia di
    errore, I/O FTP PING.
    '''
    def __init__(self, filename):
        self._filename = filename
Beispiel #47
0
the task name 'pipeline.tasks.{object-type-name}_{function-name}'.

Concretely, the `run` function of Worker is listed as {'worker': ['run']} in
the registry and is mapped to the task 'pipeline.tasks.worker_run'.

Finally, functions for monitoring the Celery application while its running are
provided at the end of the file. There allow for the watching of task
generation and consumption. It is used by the Gateway to keep track of job
status and used by the website to send realtime updates to users on the web.
"""
import database
import kimobjects
import config as cf
from app import app
from logger import logging
logger = logging.getLogger('pipeline').getChild('tasks')

import time
import threading
from celery import Task

# conditional import since importing mongodb, Gateway tries to connect to the
# database while loading, causing issues in the director, worker
Gateway, Director, Worker = [dict for i in xrange(3)]
GWorker, GDirector, GGateway = [dict(obj=None) for i in xrange(3)]

if cf.PIPELINE_GATEWAY:
    from gateway import Gateway, mongodb
elif cf.PIPELINE_DIRECTOR:
    from director import Director
elif cf.PIPELINE_WORKER:
Beispiel #48
0
"""
Set of methods for querying the database in one form or the other

As well as parsing and handling kim_codes

Currently these calls mostly glob on the database, could be replaced by something more elegant later

"""
import re
import os
import glob
import operator

import config as cf
from logger import logging
logger = logging.getLogger("pipeline").getChild("database")

#-------------------------------------------------
# Helper routines (probably move)
#-------------------------------------------------
#KIMID matcher  ( optional name             __) (prefix  ) ( number  )( opt version )
RE_KIMID    = r"^(?:([_a-zA-Z][_a-zA-Z0-9]*?)__)?([A-Z]{2})_([0-9]{12})(?:_([0-9]{3}))?$"
RE_UUID     = r"^([A-Z]{2}_[0-9]{12}_[0-9]{3}-and-[A-Z]{2}_[0-9]{12}_[0-9]{3}-[0-9]{5,})(-error)?$"
RE_ERROR    = r"^([A-Z]{2}_[0-9]{12}_[0-9]{3}-and-[A-Z]{2}_[0-9]{12}_[0-9]{3}-[0-9]{5,})-error$"
RE_UUID_LEX = r"^([A-Z]{2}_[0-9]{12}_[0-9]{3})-and-([A-Z]{2}_[0-9]{12}_[0-9]{3})-[0-9]{5,}(-error)?$"

LEADERS_OBJS = ["mo", "md", "te", "td", "tv", "mv"]
LEADERS_DATA = ["tr", "vr", "rd", "er"]

def jobid_to_kimcodes(jobid):
    codes = re.match(RE_UUID_LEX, jobid)
Beispiel #49
0
 def __init__(self):
     self.validate_url = "http://basic.10jqka.com.cn/"
     self.logger = logging.getLogger('code.py')