Exemple #1
0
    """The justification was not recognized"""


class NotAllFilesFound(Exception):
    """Didn't find all the files when looking for a multi cell from files"""


class BadFont(Exception):
    """The font was not loaded"""


class InvalidNameList(Exception):
    """The name list did not match the sprite given"""


log = common.getLogger('Visual')


class Store(registry.GeneralStore):
    """Stores sprites"""
    def _registerItem(self,
                      name,
                      path,
                      w=1,
                      h=1,
                      framerate=0,
                      running=False,
                      rectangular=True,
                      angle=0.0,
                      zoom=1.0,
                      loop=True,
pygame.font.init()

import common
import serialize
import registry

class BadSprite(Exception): """The sprite was not loaded"""
class InvalidCell(Exception): """The sprite cell number was out of range"""
class BadScale(Exception): """An invalid scaling factor was used"""
class InvalidJustification(Exception): """The justification was not recognized"""
class NotAllFilesFound(Exception): """Didn't find all the files when looking for a multi cell from files"""
class BadFont(Exception): """The font was not loaded"""
class InvalidNameList(Exception): """The name list did not match the sprite given"""


log = common.getLogger('Visual')

class Store(registry.GeneralStore):
    """Stores sprites"""
    
    def _registerItem(self, name, path, w=1, h=1, framerate=0, running=False, rectangular=True, angle=0.0, zoom=1.0, loop=True,
            one_direction=False, convert_alpha=False):
        """Register a sprite"""
        #
        # Watch for special case h = -1 ... this is a multi cell
        if h == -1:
            return self.registerFromFiles(name, path, w, framerate, 
                running, rectangular, angle, zoom, loop, one_direction, convert_alpha)
        #
        # Reality heighteck
        if zoom <= 0.0:
Exemple #3
0
    # 0 Error #1 Warn #2 Info #3 Debug #4 Detail
    aparser.add_argument("--log",
                         "-log",
                         help="set logging info",
                         type=int,
                         choices=range(5),
                         default=2)

    aparser.add_argument("--seed", "-seed", type=float, help="use this seed")

    from time import time
    args = aparser.parse_args()

    import settings
    settings.loggingLevel = CM.getLogLevel(args.log)
    logger = CM.getLogger(__name__, settings.loggingLevel)

    if __debug__:
        logger.warning("DEBUG MODE ON. Can be slow !")

    seed = round(time(), 2) if args.seed is None else float(args.seed)

    import alg
    import pathlib

    # Run it
    st = time()
    src = pathlib.Path(args.src)
    alg.Repair(src).start()
    logger.info("time {}s".format(time() - st))
#-*- coding: UTF-8 -*-


import os
import context
import setting
from datetime import datetime
import common
from persistence import fileMgr
from string import Template

logger = common.getLogger(__name__)

cfgFailedHandler = context.failedHandler #setting.app['handlers']['failed']
rootFolder = cfgFailedHandler['folder']
extension = cfgFailedHandler['extension']
template = Template(cfgFailedHandler['template'])

fileMgr.verifyExists(rootFolder)


def removeFile(fileName):
    logger.debug(u'failed handler is trying to remove {} file'.format(fileName))
    try:
        for d in os.listdir(rootFolder):
            dpath = os.path.join(rootFolder,d)
            if os.path.isdir(dpath):
                for fd in os.listdir(dpath):
                    fdPath = os.path.join(dpath,fd)
                    if os.path.isfile(fdPath):
                        pair = os.path.split(fd)
Exemple #5
0
    required_fields = {
        "client": [("trustedca", "string")],
        "server": [("interface", "string"), ("port", "int"),
                   ("keypath", "string"), ("certpath", "string")]
    }

    optional_fields = {
        "server": [("logfile", "string", None), ("outputdir", "string", "."),
                   ("maxsize", "int", 50)]
    }

    try:
        config = common.parse(cfgpath, required_fields, optional_fields)
    except Exception as e:
        print(str(e), file=sys.stderr)
        sys.exit(-1)

    config["maxsize"] *= 1024 * 1024

    logger = common.getLogger(logfile=config['logfile'])
    context = common.getContext(protocol=ssl.PROTOCOL_TLS_SERVER,
                                certpath=config['certpath'],
                                trustedca=config['trustedca'],
                                keypath=config['keypath'])

    logger.info("parsed config: ")
    for k, v in config.items():
        logger.info("{}: {}".format(k, v))

    main()
import pyodbc
import logging
import os
import sys
import time

modulePath = os.path.dirname(__file__)
rootPath = os.path.dirname(modulePath)
scriptsPath = f'{modulePath}/sql_scripts'
testCasesPath = f'{rootPath}/inputs'

# Such ugly hack to allow import from sibling folder
sys.path.append(rootPath)  ## append parent folder to path
import common

logger = common.getLogger(modulePath)
logger.info('Running ' + __file__)


def main():
    try:
        os.system("clear")
        test_cases = []
        test_case_directories = [
            f.name for f in os.scandir(testCasesPath) if f.is_dir()
        ]
        test_case_directories.sort()
        for i, name in enumerate(test_case_directories, start=1):
            test_cases.append(name)
            print("{}: {}".format(i, name))
Exemple #7
0
    def __init__(self, config):
        try:
            self._logger = common.getLogger(logfile=config['logfile'])
        except IOError as error:
            self._logger = common.getLogger()
            self._logger.warning("Couldn't setup logfile: " + str(error))

        self._logger.info("parsed config: ")
        for k, v in config.items():
            self._logger.info("{}: {}".format(k, v))

        if sys.byteorder == "big":
            self._logger.error(
                "Unsupported architecture. Can send captures only in little-end format"
            )
            sys.exit(-1)

        self._protocol = config["protocol"]
        self._promiscious = config["promiscious"]
        self._usb = config["non-network-devices"]

        try:
            self.available_devices = pcapy.findalldevs()
        except pcapy.PcapError as e:
            self._logger.error(str(e))
            sys.exit(-1)

        self._caps = self.load_caps(config["interfaces"])
        if self._caps == []:
            self._logger.info("Not listening to any interface. Exiting")
            sys.exit(-1)

        try:
            self.context = common.getContext(protocol=ssl.PROTOCOL_TLS_CLIENT,
                                             certpath=config['certpath'],
                                             trustedca=config['trustedca'],
                                             keypath=config['keypath'])
        except IOError as error:
            self._logger.error('Failed to initialize PKI infrastructure: ' +
                               str(error) + ". Exiting")
            sys.exit(-1)

        self._minframes = config["minframes"]
        self._minsize = config["minsize"]

        self.data = b''

        self._lock = threading.Lock()
        self._condition_to_send = threading.Condition(self._lock)
        self._available_bytes = 0
        self._available_frames = 0

        self._sock = None
        self._ssock = None
        self._sniffing_threads = [
            threading.Thread(name="Sniffer " + cap[1],
                             target=self.sniff,
                             args=(cap[0], )) for cap in self._caps
        ]
        self._alive_sniffing_threads = len(self._sniffing_threads)

        self.server_up = threading.Event(
        )  # blocks the sniffing threads when the server is down
        self._exit = threading.Event()  # to stop the process
Exemple #8
0
#!/usr/bin/env python3
'''Common code for takeoff analysis'''

import numpy as np
import pandas as pd
import cruise
from common import getLogger

log = getLogger(__name__)


def find_all_takeoffs(data, future=30):
    '''Return the number of detected takeoffs and
    a dataframe with all takeoff datapoints

    future -- number of datapoints for each takeoff (default 30)'''

    # Initial filter for faster processing
    indexes = data.index[(data['E1 RPM'] >= 2450)
                         & (data['IAS'].between(50, 75))]
    if indexes.empty:
        return pd.DataFrame()

    takeoff_indexes = []
    skip = None
    for i in indexes:
        if i == skip:
            # Skip this one and the next. Same takeoff.
            skip += 1
            continue
Exemple #9
0
from bs4 import BeautifulSoup
from zhilian import zlFilter
from job import jobFilter
import logging
import logging.config
import common
import context
from persistence import fileMgr
import templateFactory
import constructor
from persistence import mongodb
import handlerFactory

# initialize log
logging.config.dictConfig(context.logging)
logger = common.getLogger(context.appName)


class Application(object):

    """application class"""

    def __init__(self):
        super(Application, self).__init__()
        self.resumeMgr = None

        self._initialize()

    def _initialize(self):
        # initilize variables
        fileMgr.verifyExists(context.dataFolder)
Exemple #10
0
#-*- coding: UTF-8 -*-


import os
import setting
import json
import common
import shutil
import context

logger = common.getLogger()


def saveJson(dic):
    logger.debug(u'save json to file {}'.format(dic['userName']))

    filePath = u'{}/{}.json'.format(context.dataFolder, dic['userName'])

    with open(filePath, 'w') as f:
        _dumpJson(f, dic)


def _dumpJson(fd, dic):
    # json.dump(dic,fd,ensure_ascii=False).encoding('UTF-8')
    strData = json.dumps(dic, ensure_ascii=False).encode('UTF-8')
    fd.write(strData)


def getResumes(source):
    resumes = []
Exemple #11
0
import socket
import sys
import time
import zeroconf

from common import getLogger

#  vds stands for very disco service

# logging.basicConfig(level=logging.DEBUG)
# if len(sys.argv) > 1:
#     assert sys.argv[1:] == ['--debug']
#     logging.getLogger('zeroconf').setLevel(logging.DEBUG)

description = 'Very Disco Service Discover'
log = getLogger("service-discovery", description, level="info")

zc = zeroconf.Zeroconf()
info = None

def cleanup():
    log.info("Deregistering service.")
    zc.unregister_service(info)
    zc.close()

def register_service(properties):
    service_type = properties['type'] if 'type' in properties else "_http._tcp.local."
    short_name = properties['name'] if 'name' in properties else "Default Service Name"
    name = "%s.%s" % (short_name, service_type)
    ip_str = properties['ip'] if 'ip' in properties else "127.0.0.1"
    ip = socket.inet_aton(ip_str)
Exemple #12
0
import tornado.websocket
import signal
import os

from tornado import gen
from tornado.websocket import websocket_connect

import pt
import ptcv
from common import getLogger

# import code
# code.interact(local=locals())

description = 'pt-client sends camera frames the central pt server'
log = getLogger("pt-client", description, level="info")

# resolution = 3
res_x = 640
res_y = 480

# vid = "/Users/dan/Movies/bal2.h264"
vid = "/home/dan/mac/Movies/bal2.h264"

vidfile = True
webcam = False

send = True

# amqp_host = "192.168.111.154"
# pt_server = "192.168.111.76"
import vds
import pt
import ptcv
from common import getLogger

# import code
# code.interact(local=locals())

# amqp_host = "192.168.111.154"
# localhost = "127.0.0.1"
# amqp_host = "172.16.6.2"
amqp_host = "ptserver.local"

description = 'collimates video streams and pipes it to a web client'
log = getLogger("pt-server", description, level="info")


def get_link_local_address():
    # Return first link local IP address
    return list(
        set(addr['addr'] for iface in netifaces.interfaces()
            for addr in netifaces.ifaddresses(iface).get(socket.AF_INET, [])
            if '169.254' in addr.get('addr')))


my_ip = get_link_local_address()
if not my_ip:
    log.critical(
        "No local link address set. You may try the following command:"
        "\nsudo ifdown eth1:0 && sudo ifup eth1:0")