Esempio n. 1
0
def main(arg):

    # set logs directory
    home = os.path.expanduser("~")
    logs_home = home + '/logs/'
    # make sure logs directory exists
    if not os.path.exists(logs_home):
        os.makedirs(logs_home)
    # setup logger
    lfname = logs_home + 'shutter_' + datetime.strftime(
        datetime.now(), "%Y-%m-%d_%H:%M:%S") + '.log'
    log.setup_custom_logger(lfname)

    parser = argparse.ArgumentParser()
    parser.add_argument("--open",
                        action="store_true",
                        help="Open the beamline shutter")

    args = parser.parse_args()

    epics_pvs = set_pvs()

    if args.open:
        open_shutter(epics_pvs)
    else:
        close_shutter(epics_pvs)
Esempio n. 2
0
def main():

    # create logger
    home = os.path.expanduser("~")
    logs_home = home + '/logs/'

    # make sure logs directory exists
    if not os.path.exists(logs_home):
        os.makedirs(logs_home)

    lfname = logs_home + 'temperature_' + datetime.strftime(
        datetime.now(), "%Y-%m-%d_%H:%M:%S") + '.log'
    log.setup_custom_logger(lfname)

    init_general_PVs(global_PVs, variableDict)
    try:
        while True:
            h5fname = global_PVs['HDF1_FullFileName_RBV'].get()
            h5fname_str = "".join([chr(item) for item in h5fname])
            temp = global_PVs['Temperature'].get()
            log.info('Temperature: %4.4f °C;  %s' % (temp, h5fname_str))
            time.sleep(5)
    except KeyboardInterrupt:
        log.warning('interrupted!')
        log.warning('Log information saved at: %s', lfname)
Esempio n. 3
0
def main():
    # log file name
    logs_home = os.path.join(str(pathlib.Path.home()), 'logs')
    # make sure logs directory exists
    if not os.path.exists(logs_home):
        os.makedirs(logs_home)
    lfname = os.path.join(
        logs_home, 'test_' +
        datetime.strftime(datetime.now(), "%Y-%m-%d_%H_%M_%S") + '.log')

    log.setup_custom_logger(lfname)
    logger = log.logger

    args = None
    run_logs(args)
Esempio n. 4
0
    def __init__(self, path_inst_dir, path_raw_data, earliest_year):
        self.PATH_TO_INSTALL_DIRECTORY = path_inst_dir
        self.RAW_PATH = path_raw_data
        self.DATA_PATH = self.PATH_TO_INSTALL_DIRECTORY + 'data/'
        self.ZIP_PATH = self.DATA_PATH + 'zips/'
        self.STOCK_PATH = self.DATA_PATH + 'stock_data/'
        self.ALL_CODES_FILE_NAME = 'all_codes.csv'
        self.TIME_SERIES_FILE_NAME = 'time_series.csv'
        self.RAW_DATA_REGEX_PATTERN = '\d{8}.txt'
        self.GLOBAL_VARS_FILE_NAME = 'global_vars.py'
        self.RAW_DATA_DATES_FILE_NAME = 'raw_data_dates.csv'

        self.RAW_DATA_DATES_FILE = self.PATH_TO_INSTALL_DIRECTORY + self.RAW_DATA_DATES_FILE_NAME
        self.ALL_CODES_FILE = self.PATH_TO_INSTALL_DIRECTORY + self.ALL_CODES_FILE_NAME
        self.GLOBAL_VARS_FILE = self.PATH_TO_INSTALL_DIRECTORY + self.GLOBAL_VARS_FILE_NAME
        self.LOG_PATH = self.PATH_TO_INSTALL_DIRECTORY + 'logs/'

        if not os.path.exists(self.LOG_PATH):
            os.makedirs(self.LOG_PATH)

        self.initial_codes = []

        self.INSTALL_LOG_FILE = self.LOG_PATH + 'install.log'
        log.setup_custom_logger('root', self.INSTALL_LOG_FILE)

        self.EARLIEST_YEAR = earliest_year

        logger = logging.getLogger('root')

        try:
            assert os.path.exists(self.RAW_PATH)
        except:
            logger.critical("Path to raw data not found: %s", self.RAW_PATH)
            raise ValueError(
                'Path to raw data not found. Please provide the absolute path to a folder with the specified data format.'
            )

        if not os.listdir(self.RAW_PATH):
            logger.error("No files in the raw data path: %s", self.RAW_PATH)
            raise Exception(
                "No files in the raw data path. Nothing can be done if there is no data!"
            )

        if not os.path.exists(self.PATH_TO_INSTALL_DIRECTORY):
            logger.info("Install path does not exist. Creating now")
            os.makedirs(self.PATH_TO_INSTALL_DIRECTORY)

        self.install()
Esempio n. 5
0
def main():
    # logger
    per_source = True
    datapath = os.path.abspath(os.path.dirname(__file__)) + '\\data\\'
    logger = log.setup_custom_logger('root')
    logger.info('start analyzing')
    main_path = 'C:/Programmierung/Masterarbeit/Scraper/data/articles/'
    test_path = 'C:/Programmierung/Masterarbeit/Scraper/data/test'
    valid_path = 'C:/Programmierung/Masterarbeit/Scraper/data/valid'
    model_path = 'C:/Programmierung/Masterarbeit/Analyzer/data/trainedModels'

    #dirs_to_train = load_dirs(main_path)

    counts, word_list, tokens = prepare_data(main_path, per_source, '')
    counts_test, word_list_test, tokens_test = prepare_data(
        test_path, per_source, '_test')
    counts_valid, word_list_valid, tokens_valid = prepare_data(
        valid_path, per_source, '_valid')
    if os.path.isfile('data/obj/articles.pkl'):
        articles = load_obj('articles')
    else:
        articles = preprocessor.get_articles_from_top_dir(main_path, '')
    if os.path.isfile('data/obj/articles_test.pkl'):
        articles_test = load_obj('articles_test')
    else:
        articles_test = preprocessor.get_articles_from_top_dir(
            test_path, '_test')

    # cnn_model.train()
    rnn_tensorflow.run(articles, articles_test)
Esempio n. 6
0
def main():
    (opts, args) = parse_opts()

    LOG = setup_custom_logger('root', level=opts.log_level.upper())

    LOG.info('Connecting to Geth RPC: %s', opts.geth_addr)
    # web3 instance for talking to Geth RPC
    w3 = Web3(Web3.HTTPProvider(opts.geth_addr))

    LOG.info('Connecting to IPFS Cluster: %s', opts.ipfs_addr)
    # for talking to IPFS cluster and pinning images
    ipfs = IpfsPinner(opts.ipfs_addr)

    # Read Sticker Pack contract ABI
    with open(opts.abi_file, "r") as f:
        contract_abi = json.load(f)

    # Get instance of sticker pack contract
    contract = StickerPackContract(opts.contract, contract_abi, w3)

    if opts.pin_all:
        LOG.info('Pinning all existing packs...')
        pinAllPacks(ipfs, contract)

    global watcher
    LOG.info('Watching for events: %s', opts.events)
    watcher = ContractWatcher(w3, ipfs, contract)
    watcher.loop(opts.events.split(','))
Esempio n. 7
0
def main(name, value, aws_region, log_dir, overwrite, verbose):
    """
    Routine to set a specific SSM parameter value.
    """
    if not os.path.isdir(log_dir):
        print(f'Log directory does not exist: \'{log_dir}\' '
              '- please create or adjust --log-dir parameter')
        return

    log_level = INFO
    if verbose:
        log_level = DEBUG

    overwrite_flag = False
    if overwrite:
        overwrite_flag = True

    if not name:
        print(f'Missing parameter \'--name\' - please set on the command line')
        return
    if not value:
        print(
            f'Missing parameter \'--value\' - please set on the command line')
        return

    start_time = datetime.now()
    logger = log.setup_custom_logger(
        'root',
        log_dir=log_dir,
        level=log_level,
        prefix=f'set-parameter-{os.environ.get("STAGE")}')

    if os.environ.get('STAGE') is None:
        logger.warning('Please set \'STAGE\' environment variable.')
        return
    stage = os.environ.get('STAGE')

    aws_session = boto3.session.Session(
        profile_name='lfproduct-{}'.format(stage), region_name=aws_region)
    ssm_client = aws_session.client('ssm')

    logger.info(f'STAGE           : {stage}')
    logger.info(f'AWS REGION      : {aws_region}')
    logger.info(f'log-dir         : {log_dir}')
    logger.info(f'log level       : {getLevelName(log_level)}')

    try:
        ssm_client.put_parameter(Name=name,
                                 Value=value,
                                 Type='String',
                                 Overwrite=overwrite_flag)
    except ClientError as e:
        logger.info(
            f'Skipping {name}: {value} - '
            f'key already exists and overwrite set to: {overwrite_flag}')

    logger.info('Finished export - duration: {}'.format(datetime.now() -
                                                        start_time))
Esempio n. 8
0
    def __init__(self, args):
        self.args = args
        self.logger = log.setup_custom_logger('elem')
        self.console_logger = log.setup_console_logger('console')

        self.exploitdb = ExploitDatabase(self.args.exploitdb,
                                         self.args.exploitdbrepo)
        self.exploit_manager = ExploitManager(self.args.exploits,
                                              self.args.exploitsrepo)
Esempio n. 9
0
def _init():
    global  logger

    reload(sys)
    sys.setdefaultencoding('utf-8')

    mlog = setup_custom_logger("root")
    mlog.debug("setup logger")

    logger = logging.getLogger('root')
Esempio n. 10
0
def main():

    # set logs directory
    home = os.path.expanduser("~")
    logs_home = home + '/logs/'
    # make sure logs directory exists
    if not os.path.exists(logs_home):
        os.makedirs(logs_home)
    # setup logger
    lfname = logs_home + 'flir_' + datetime.strftime(
        datetime.now(), "%Y-%m-%d_%H:%M:%S") + '.log'
    log.setup_custom_logger(lfname)

    epics_pvs = set_pvs()

    rotation_start = 0
    num_angles = 100
    rotation_step = 0.1

    while (True):
        set_pso(epics_pvs, rotation_start, num_angles, rotation_step)
        log.info('taxi before starting capture')
        # Taxi before starting capture
        epics_pvs['PSOtaxi'].put(1, wait=True)
        wait_pv(epics_pvs['PSOtaxi'], 0)
        set_trigger_mode(epics_pvs, 'PSOExternal', num_angles)
        # Start the camera
        epics_pvs['CamAcquire'].put('Acquire')
        wait_pv(epics_pvs['CamAcquire'], 1)
        log.info('start fly scan')
        # Start fly scan
        epics_pvs['PSOfly'].put(1)  #, wait=True)
        # wait for acquire to finish
        # wait_camera_done instead of the wait_pv enabled the counter update
        # self.wait_pv(epics_pvs['PSOfly'], 0)
        time_per_angle = compute_frame_time(epics_pvs)
        log.info('Time per angle: %s', time_per_angle)
        collection_time = num_angles * time_per_angle
        wait_camera_done(epics_pvs, collection_time + 60.)

        set_trigger_mode(epics_pvs, 'FreeRun', 1)
        epics_pvs['CamAcquire'].put('Acquire')
        wait_pv(epics_pvs['CamAcquire'], 1)
Esempio n. 11
0
def objectify(structs, functions):
    logger = log.setup_custom_logger(__name__)
    #for (index, item) in enumerate(structs):
    #for (index, item) in enumerate(functions):
    for struct in structs:
        logger.debug("struct {0}:{1} has {2} fields".format(struct.fileName, struct.name, len(struct.fields)))
        for function in functions:
            logger.debug("function {0}:{1} has {2} arguments".format(function.fileName, function.name, len(function.arguments)))
            if len(struct.fields) == len(function.arguments):
                logger.info("Struct {0}:{1} and Function {2}:{3} have matching field/argument lengths".format(struct.fileName, struct.name, function.fileName, function.name))
Esempio n. 12
0
 def __init__(self,
              port=setting.PORT,
              baud=setting.BAUD,
              timeout=setting.TIMEOUT):
     self.ser = serial.Serial(port, baud, timeout=timeout)
     self.logger = log.setup_custom_logger(__name__)
     self.report = None
     self.fan0 = pid(1, 1, 1)
     self.fan1 = pid(1, 1, 1)
     self.fan2 = pid(1, 1, 1)
     self.run_thread = threading.Thread(target=self.run_loop, args=())
     self.logger.info('env class initialized.')
Esempio n. 13
0
def coerce(typefrom, typeto):
    """returns boolean indicating whether type is coercible to other type"""

    logger = log.setup_custom_logger(__name__)
    coercible = False
    result = "failed"
    if typefrom in coerce_list:
        if typeto in coerce_list[typefrom]:
            coercible = True
    if coercible:
        result = "succeeded"
    logger.info("Cast from '%s' to '%s' %s" % (typefrom, typeto, result))
    return coercible
Esempio n. 14
0
def main(output_filename, aws_region, log_dir, verbose):
    """
    Routine to export the AWS SSM parameters to a JSON document suitable for
    subsequent importing.
    """
    if not os.path.isdir(log_dir):
        print(f'Log directory does not exist: \'{log_dir}\' '
              '- please create or adjust --log-dir parameter')
        return

    log_level = INFO
    if verbose:
        log_level = DEBUG

    start_time = datetime.now()
    logger = log.setup_custom_logger(
        'root',
        log_dir=log_dir,
        level=log_level,
        prefix=f'export-{os.environ.get("STAGE")}')

    if os.environ.get('STAGE') is None:
        logger.warning('Please set \'STAGE\' environment variable.')
        return
    stage = os.environ.get('STAGE')

    aws_session = boto3.session.Session(
        profile_name='lfproduct-{}'.format(stage), region_name=aws_region)
    ssm_client = aws_session.client('ssm')

    logger.info(f'STAGE           : {stage}')
    logger.info(f'AWS REGION      : {aws_region}')
    logger.info(f'log-dir         : {log_dir}')
    logger.info(f'log level       : {getLevelName(log_level)}')

    logger.debug('Querying for SSM parameters...')
    # https://github.com/shibboleth66/gorgonzola/blob/20f984b8dd28a388ce8e769fe9185b9af022c1db/gorgonzola/aws_ssm_global_parameters.py
    # https://www.programcreek.com/python/example/97943/boto3.client
    # https://github.com/elonmusk408/ansible1/blob/5db7501ebdacf65a8cf076da35ed6c3011c4c58a/lib/ansible/plugins/lookup/aws_ssm.py

    # Query for the list of keys matching our filter
    keys: List[str] = get_ssm_keys(logger, ssm_client, ['cla'])
    # Query for the key/value pairs from the key list
    output = get_ssm_values(logger, ssm_client, keys)
    # Save to the output file
    with open(output_filename, 'w') as outfile:
        json.dump(output, outfile, indent=2)

    logger.info(f'Wrote results to: {output_filename}')
    logger.info('Finished export - duration: {}'.format(datetime.now() -
                                                        start_time))
Esempio n. 15
0
def main():
    (opts, args) = parse_opts()

    LOG = setup_custom_logger('root', opts.log_level.upper())

    esq = ESQueryPeers(
        opts.es_host,
        opts.es_port
    )
    psg = PGDatabase(
        opts.db_name,
        opts.db_user,
        opts.db_pass,
        opts.db_host,
        opts.db_port
    )

    days = psg.get_present_days()
    present_indices = ['logstash-{}'.format(d.replace('-', '.')) for d in days]

    LOG.info('Querying ES cluster for peers...')
    peers = []
    for index in esq.get_indices(opts.index_pattern):
        # skip already injected indices
        if index in present_indices:
            LOG.debug('Skipping existing index: %s', index)
            continue
        # skip current day as it's incomplete
        if index == datetime.now().strftime('logstash-%Y.%m.%d'):
            LOG.debug('Skipping incomplete current day.')
            continue
        LOG.info('Index: {}'.format(index))
        rval = esq.get_peers(
            index=index,
            field=opts.field,
            fleet=opts.fleet,
            program=opts.program,
            max_query=opts.max_size
        )
        if len(rval) == 0:
            LOG.warning('No entries found!')
        LOG.debug('Found: %s', len(rval))
        peers.extend(rval)

    if len(peers) == 0:
        LOG.info('Nothing to insert into database.')
        exit(0)

    LOG.info('Injecting peers data into database...')
    psg.inject_peers(peers)
Esempio n. 16
0
 def f_retry(*args, **kwargs):
     logger = log.setup_custom_logger('sap')
     mtries, mdelay = tries, delay
     while mtries > 1:
         try:
             return f(*args, **kwargs)
         except exceptions as e:
             msg = '{}, Nova tentativa em {} s...'.format(e, mdelay)
             if logger:
                 logger.warning(msg)
             else:
                 print(msg)
             time.sleep(mdelay)
             mtries -= 1
             mdelay *= backoff
     return f(*args, **kwargs)
Esempio n. 17
0
def _init():
    global logger

    # set encoding
    reload(sys)
    sys.setdefaultencoding('utf-8')

    mlog = setup_custom_logger("root")
    mlog.debug("setup logger")

    mpath = path_of('')
    if not os.path.isdir(mpath):
        os.mkdir(mpath)
        mlog.debug("mkdir %s" % mpath)

    logger = logging.getLogger('root')
Esempio n. 18
0
def _init():
    global  logger

    # set encoding
    reload(sys)
    sys.setdefaultencoding('utf-8')

    mlog = setup_custom_logger("root")
    mlog.debug("setup logger")

    mpath = path_of('')
    if not os.path.isdir(mpath):
        os.mkdir(mpath)
        mlog.debug("mkdir %s" % mpath)

    logger = logging.getLogger('root')
Esempio n. 19
0
    def __init__( self, connector, mqtt_client, \
                  sub_topic, \
                  nuances_resolution ):

        super( LedPotentiometerResource, self ).__init__( connector, mqtt_client, \
                                             sub_topic, nuances_resolution )

        self.logger = log.setup_custom_logger("mqtt_thing_led_resource")
        self.grovepi_interactor_member = InteractorMember( connector, \
                                                           'OUTPUT', \
                                                           ANALOG_WRITE )

        self.value = 0

        self.grovepi_interactor_member.tx_queue.put( \
            ( self.grovepi_interactor_member, int( self.value ) ) )
Esempio n. 20
0
def main(name, aws_region, log_dir, verbose):
    """
    Routine to get a specific SSM parameter value.
    """
    if not os.path.isdir(log_dir):
        print(f'Log directory does not exist: \'{log_dir}\' '
              '- please create or adjust --log-dir parameter')
        return

    log_level = INFO
    if verbose:
        log_level = DEBUG

    if not name:
        print(f'Missing parameter \'--name\' - please set on the command line')
        return

    start_time = datetime.now()
    logger = log.setup_custom_logger(
        'root',
        log_dir=log_dir,
        level=log_level,
        prefix=f'get-parameter-{os.environ.get("STAGE")}')

    if os.environ.get('STAGE') is None:
        logger.warning('Please set \'STAGE\' environment variable.')
        return
    stage = os.environ.get('STAGE')

    aws_session = boto3.session.Session(
        profile_name='lfproduct-{}'.format(stage), region_name=aws_region)
    ssm_client = aws_session.client('ssm')

    logger.info(f'STAGE           : {stage}')
    logger.info(f'AWS REGION      : {aws_region}')
    logger.info(f'log-dir         : {log_dir}')
    logger.info(f'log level       : {getLevelName(log_level)}')

    ssm_dict = {'WithDecryption': False, 'Name': name}

    response = ssm_client.get_parameter(**ssm_dict)
    param = response['Parameter']
    logger.info(f'Name: {param["Name"]}, Value: {param["Value"]}')

    logger.info('Finished export - duration: {}'.format(datetime.now() -
                                                        start_time))
Esempio n. 21
0
File: elem.py Progetto: miradam/elem
    def __init__(self, args):
        self.args = args
        self.logger = log.setup_custom_logger('elem')
        self.console_logger = log.setup_console_logger('console')
        exploitdb_path = ''
        exploit_path = ''
        if self.args.exploitdb:
            exploitdb_path = self.args.exploitdb
        else:
            exploitdb_path = os.path.dirname(os.path.realpath(__file__))  + \
            '/exploit-databse'

        if self.args.exploits:
            exploit_path = self.args.exploit
        else:
            exploit_path = os.path.dirname(os.path.realpath(__file__)) + \
            '/exploits'

        self.exploitdb = ExploitDatabase(exploitdb_path,
                                         exploit_path,
                                         self.args.exploitdbrepo)
Esempio n. 22
0
    def __sap_connect(self, pmode, numero_conexao, historico):
        logger = log.setup_custom_logger('sap')
        if not pmode:

            dir_path = os.path.dirname(os.path.realpath(__file__))
            path = os.path.join(dir_path, 'tx.sap')
            sap_gui_auto = self.__get_sap_gui(path)
            appl = sap_gui_auto.GetScriptingEngine

            while appl.Connections.Count == 0:
                time.sleep(5)
            con = appl.Connections.Count - 1
            connection = appl.Children(con)
            self.connections.append(con)
            self.connection_current = con

            if connection.sessions.Count > 0:
                self.session = connection.Children(0)

        else:
            sap_gui_auto = self.__get_sap_gui()
            appl = sap_gui_auto.GetScriptingEngine
            appl.historyEnabled = historico

            if appl.Connections.Count > 0:
                connection = appl.Children(0)
                self.connections.append(0)
                self.connection_current = 0
            else:
                try:
                    connection = appl.openConnectionByConnectionString(
                        sap_conn_str, True, True)
                except Exception as e:
                    logger.error(traceback.format_exc())

            if connection.sessions.count > 0:
                self.session = connection.sessions(numero_conexao)
            else:
                self.session_close(sap_kill=True)
                self.__sap_connect(self, pmode, numero_conexao, historico)
Esempio n. 23
0
def main(spec_input_file, spec_output_file, log_dir):
    if not os.path.isdir(log_dir):
        os.makedirs(log_dir)

    if spec_input_file is None:
        print(
            f'Input spec input file missing - set with the --spec-input-file option'
        )
        return
    if spec_output_file is None:
        print(
            f'Input spec output file missing - set with the --spec-output-file option'
        )
        return

    logger = log.setup_custom_logger('root',
                                     log_dir=log_dir,
                                     prefix='multi-file-swagger')
    logger.setLevel(logging.INFO)
    logger.info('log-dir     : {}'.format(log_dir))

    start_time = datetime.now()
    try:
        logger.info(f'Processing swagger spec file: {spec_input_file}')
        with open(spec_input_file, 'r') as stream:
            try:
                data = yaml.load(stream, Loader=yaml.FullLoader)
                data = resolve_references(data, logger, "/")
                with open(spec_output_file, 'w') as yaml_file:
                    yaml.dump(data, yaml_file, sort_keys=False)
            except yaml.YAMLError as exc:
                print(exc)
    except IOError as e:
        print(f'error reading input file {spec_input_file} - error: {e}')
        return
    logger.info(f'Wrote swagger spec file     : {spec_output_file}')
    logger.info(f'Finished - duration         : {datetime.now() - start_time}')
Esempio n. 24
0
def ctoxml(filePath, cilPath):
    logger = log.setup_custom_logger(__name__)
    logger.debug("Using CIL executable located at %s" % (cilPath))
    if os.path.isdir(filePath):
        for root, dirs, files in os.walk(filePath):
          #excludes files that don't end in .c
          files = [fi for fi in files if fi.endswith(".c")]
          for cfile in files:
            fileName = os.path.abspath(os.path.join(root,cfile))
            xmlName = "%s.xml" % (os.path.splitext(cfile)[0]) 
            #if the generated XML exists, do not re-run CIL on the C file
            if not exists(xmlName): 
              logger.info("Running CIL on %s" % (fileName))
              result, data = util.myrun([cilPath] +  [fileName] + cilArgs)
              if result:
                  logger.error("CIL processing of %s failed: %s" % (fileName, data))
                  break
              cXmlName= os.path.join(root,xmlName)
              logger.debug("Writing results of CIL processing to %s" % (cXmlName))
              util.writefile(cXmlName, data)
    else:
      result, data = util.myrun([cilPath] +  [filePath] + cilArgs)
      logger.debug("Writing results of CIL proceesing to %s" % (cXmlName))
      til.writefile(cXmlName, data)
    import log
except Exception:
    pass

# Model
ProjectPhase = connection.get_model('res.invest.construction.phase')

# Domain
pcp_codes = ['C-18-082-01']
dom = [('code', 'in', pcp_codes)]

# Search Project Construction Phase
pcps = ProjectPhase.search_read(dom, context={'active_test': 0})

log_pcp_codes = [[], []]
logger = log.setup_custom_logger('project_construction_phase_act_submit')
logger.info('Start process')
logger.info('Total project construction phase: %s' % len(pcps))
for pcp in pcps:
    try:
        ProjectPhase.mork_action_submit([pcp['id']])
        log_pcp_codes[0].append(pcp['code'].encode('utf-8'))
        logger.info('Pass: %s' % pcp['code'])
    except Exception as ex:
        log_pcp_codes[1].append(pcp['code'].encode('utf-8'))
        logger.error('Fail: %s (reason: %s)' % (pcp['code'], ex))
summary = 'Summary: pass %s%s and fail %s%s' \
          % (len(log_pcp_codes[0]),
             log_pcp_codes[0] and ' %s' % str(tuple(log_pcp_codes[0])) or '',
             len(log_pcp_codes[1]),
             log_pcp_codes[1] and ' %s' % str(tuple(log_pcp_codes[1])) or '')
Esempio n. 26
0
def pytest_runtest_setup(item):
    logger = log.setup_custom_logger("root", item.name)
    logger.info("%s is starting.... " % item.name)
Esempio n. 27
0
def test_modulea():
    log.setup_custom_logger()
    ma.do_something()
    assert 1 == 1
Esempio n. 28
0
#coloredlogs.install(level=logging.DEBUG)
script_import.do_import(os.path.dirname(os.path.abspath(__file__)) + "/scripts", globals())



def scheduler(tests):
    while True:
        try:
            for test in tests:
                t = threading.Thread(target=test.do, args=(), kwargs={})
                t.start()
            logger.debug("Active threads: %s" % (threading.active_count()))
            time.sleep(1)
        except KeyboardInterrupt:
            logger.debug("Exiting....")
            sys.exit(0)

def import_channels():
    ChannelImporter()

def import_tests():
    return script_import.ScriptConfig(script_settings_filename).script_objects

if __name__ == "__main__":
    logger = log.setup_custom_logger('notifyme')
    logger.debug("Loaded: %s" % str(script_import.imported_classes))

    import_channels()
    tests = import_tests()
    scheduler(tests)
Esempio n. 29
0
File: main.py Progetto: noklam/blog

import log
logger = log.setup_custom_logger('root')
logger.debug('main message')

import app
Esempio n. 30
0
import log
from config import BCMConfig
from deprecated.brickpile import BrickPile
from legoutils import Condition
from wanted import WantedDict

# get a wanted list
logger = log.setup_custom_logger('pybcm')
#
config = BCMConfig('../config/bcm.ini')  # create the settings object and load the file
#

update_cache = True

logger.info("Loading wanted dict")
wanted = WantedDict(WantedDict.wantedTypes.BL)
bl_list = '../Sampledata/10030 Star Destroyer.xml'
wanted.read(bl_list)
#
bp = BrickPile()


pricefile = 'price.pickle'
if update_cache:
    logger.info("Reading prices from web")
    bp.readpricesfromweb(wanted, Condition.NEW|Condition.USED)
    bp.price_to_pickle(pricefile)

else:
    logger.info("Reading prices from pickle")
    bp._wanted_dict = wanted
Esempio n. 31
0
import logging
from pprint import pprint

import log
from config import BCMConfig
from trowel import Trowel

# get a wanted list
logger = log.setup_custom_logger("pybcm")
logger.setLevel(logging.INFO)
logging.getLogger("pybcm.trowel").setLevel(logging.WARNING)
#logging.getLogger("pybcm.rest").setLevel(logging.WARNING)

config = BCMConfig('../config/bcm.ini')  # create the settings object and load the file
tr = Trowel(config)

inv = tr.get_set_inv('76023-1')
price = tr.get_inv_prices_df(inv)
best = tr.best_prices(price)
print(price)
# set_summary('75146-16')
#tr.set_summary('10182-1')
#tr.set_summary('10185-1')
#sets = tr.price_sets(['10182-1', '10185-1', '10190-1', '10197-1', '10246-1', '10251-1', '10255-1'])
#pprint(sets)
Esempio n. 32
0
    if os.path.isdir(filePath):
        for root, dirs, files in os.walk(filePath):
          #excludes files that don't end in .c
          files = [fi for fi in files if fi.endswith(".c")]
          for cfile in files:
            fileName = os.path.abspath(os.path.join(root,cfile))
            xmlName = "%s.xml" % (os.path.splitext(cfile)[0]) 
            #if the generated XML exists, do not re-run CIL on the C file
            if not exists(xmlName): 
              logger.info("Running CIL on %s" % (fileName))
              result, data = util.myrun([cilPath] +  [fileName] + cilArgs)
              if result:
                  logger.error("CIL processing of %s failed: %s" % (fileName, data))
                  break
              cXmlName= os.path.join(root,xmlName)
              logger.debug("Writing results of CIL processing to %s" % (cXmlName))
              util.writefile(cXmlName, data)
    else:
      result, data = util.myrun([cilPath] +  [filePath] + cilArgs)
      logger.debug("Writing results of CIL proceesing to %s" % (cXmlName))
      til.writefile(cXmlName, data)

if __name__ == "__main__":
  import sys
  logger = log.setup_custom_logger("run_cil")
  #parser = argparse.ArgumentParser(description='Runs CIL to convert C to XML.')
  #parser.add_argument ('-p', action="store", dest="path")
  #parser.add_argument ('-c', action="store", dest="cil")
  #args = parser.parse_args()
  ctoxml(str(sys.argv[1]), str(sys.argv[2]))
Esempio n. 33
0
import log
import threading
from numpy import exp


__author__ = "Thomas"

logger = log.setup_custom_logger('ComputeZscore')


class ComputeZscore(threading.Thread):

    def __init__(self, estimate, row, index, coeff=1):
        threading.Thread.__init__(self, target=self.score)
        self.estimate = estimate
        self.row = row
        self.coeff = coeff
        self.score = {"SCORE": 0}
        self.index = index
        self._return = None

    def run(self):

        if self._Thread__target is not None:
            self._return = self._Thread__target()

    def score(self):

        self.score["PRIX_PERSONNE"] = self.estimate.get_zscore(self.row["PRIX_PERSONNE"], "PRIX_PERSONNE",
                                                     (str(self.row["REGION"]), str(self.row["TYPE_BATEAU_AGREGE"])))
Esempio n. 34
0
#!/usr/bin/python2
import cil_xml_extract
import ooc
import log
import argparse
import sys, os

def objectify(structs, functions):
    logger = log.setup_custom_logger(__name__)
    #for (index, item) in enumerate(structs):
    #for (index, item) in enumerate(functions):
    for struct in structs:
        logger.debug("struct {0}:{1} has {2} fields".format(struct.fileName, struct.name, len(struct.fields)))
        for function in functions:
            logger.debug("function {0}:{1} has {2} arguments".format(function.fileName, function.name, len(function.arguments)))
            if len(struct.fields) == len(function.arguments):
                logger.info("Struct {0}:{1} and Function {2}:{3} have matching field/argument lengths".format(struct.fileName, struct.name, function.fileName, function.name))


if __name__ == "__main__":
  logger = log.setup_custom_logger(__name__)
  parser = argparse.ArgumentParser(description='Runs metac compliant XML processor.')
  parser.add_argument ('-p', action="store", dest="path")
  parser.add_argument ('-c', action="store", dest="cil")
  args = parser.parse_args()
  ex = cil_extract.CilExtract()
  ex.extract(args.path, args.cil)
  objects = objectify(ex.structs, ex.functions);
Esempio n. 35
0
File: Data.py Progetto: Jingoo88/CA2

import pandas as pd
import os
import datetime
import log

from numpy.random import uniform
from sys import exit

__author__ = 'Thomas'

logger = log.setup_custom_logger('Data')


class Data:
    """
    Data processing class, built in methods clean data, fill NAs ... for estimation purposes
    """

    def __init__(self, data_handler):

        logger.info("Initializing Data object")

        self.__now = datetime.datetime.now()
        self.__headers = []
        self.__data = {}
        self.__data_handler = data_handler

        logger.info("Data object initialized")
Esempio n. 36
0
import log
import time
import math
import redis
import logging
import traceback
from threading import Thread

##############################################
# GLOBALS
##############################################
LOG = log.setup_custom_logger('imu')
LOG.setLevel(logging.WARN)

class IMU(object):

    """
    IMU class for obtaining orientation data
    """

    def __init__(self, GPS, poll_interval_ms=10.,
        roll_offset=0, yaw_offset=0, pitch_offset=0):
        self.threadAlive = False
        self.roll = 0
        self.pitch = 0
        self.yaw = 0
        self.GPS = GPS
        self.roll_offset = roll_offset
        self.yaw_offset = yaw_offset
        self.pitch_offset = pitch_offset
        self.poll_interval_ms = poll_interval_ms
Esempio n. 37
0
import log
import threading

__author__ = "Thomas"

logger = log.setup_custom_logger('UpdateTable')


class UpdateTable(threading.Thread):
    """
    Class creating the different threads for updating the database
    Each thread is a single SQL query
    2015/12/01 : As of now the request is static, it should be passed as an argument, and therefore could do more than
                                                                                        just updates
    2015/12/01 : This class should inherit from a broader query class
    """

    def __init__(self, engine, key, value):
        threading.Thread.__init__(self)
        self.engine = engine
        self.key = key
        self.value = value

    def run(self):
        """
        Executes unique request of the object
        :return: None
        """

        update = "UPDATE PRODUITS SET NOTE_NEW_ALGO = %(value).9f WHERE PK_PRODUITS = %(key)s"%{"value": self.value["SCORE"], "key": self.key}
Esempio n. 38
0
from flask import jsonify
from ml import getClassify
from ml import getClassifies
from database import db, Data
from sqlalchemy.orm import sessionmaker
from flask import url_for, redirect
from flask.ext.wtf import Form
from wtforms.fields import StringField, SubmitField
import random
import log

__copyright__ = "Copyright (C) 2015 MLI SG SAP Inc."
__license__ = "MLI SG SAP"
__version__ = "0.1.0"

logger = log.setup_custom_logger('root')
logger.info('Starting Server')

# -- Init --
app = Flask(__name__, static_url_path='')
app.config['SECRET_KEY'] = "t%Bh9mXe=YzWJh3fW8*v"

# -- Database Session --
Session = sessionmaker(bind=db)
session = Session()


# - Submit Form --
class SubmitForm(Form):
    message = StringField(u'message to classify:')
    submit = SubmitField(u'process')
Esempio n. 39
0
import os
import log
import time
import logging
import picamera

from PIL import Image
from datetime import datetime
from threading import Thread


# GUIDE
# http://ava.upuaut.net/?p=768

LOG = log.setup_custom_logger("Camera")
LOG.setLevel(logging.WARNING)


class GliderCamera(object):

    def __init__(self, 
        low_quality_interval=15,
        high_quality_interval=60,
        photo_path="/data/camera"):
        self.photo_path = photo_path
        self.last_low_pic = time.time()
        self.last_high_pic = time.time()
        self.low_quality_interval = low_quality_interval
        self.high_quality_interval = high_quality_interval
        self.video_requested = 0
Esempio n. 40
0
import numpy as np
import log
from Data import Data
from Estimation import Estimation
from ComputeZscore import ComputeZscore

__author__ = 'Thomas'

logger = log.setup_custom_logger("Score")


class Score:

    def __init__(self, query1, query2, query3, data_handler):

        logger.info("Initializing Score object")

        self.data = Data(data_handler)
        self.data = self.data.get_clean_datab(query1)
        self.estimate = Estimation(query2, data_handler)
        self.query3 = query3
        self.coeff = {}
        self.score = {}
        self.score_details = {}
        self.coeff = {"PUISSANCE_PERSONNE":1, "TAUX_DE_TRANSFORMATION":1, "PRIX_PERSONNE":1,
                                     "PRIX_METRE":1, "PRIX_PUISSANCE":1, "DELAIS_MOYEN_REPONSE_MESSAGES":1}

        self.continuous_variables = ["PUISSANCE_PERSONNE", "TAUX_DE_TRANSFORMATION", "PRIX_PERSONNE",
                                     "PRIX_METRE", "PRIX_PUISSANCE", "DELAIS_MOYEN_REPONSE_MESSAGES"]
Esempio n. 41
0
import log
import time
import math
import logging
from threading import Thread

##############################################
# GLOBALS
##############################################
LOG = log.setup_custom_logger('pilot')
LOG.setLevel(logging.WARN)


class Pilot(object):
    """
    Pilot class for translating our heading, orientation, and desired 
    coordinates into intended wing angles
    """

    def __init__(self, IMU, 
        desired_yaw=0, desired_pitch=-0.52, 
        turn_severity=1.2, servo_range=0.5236,
        destination=[54.816069,-6.052094],
        location=[52.254197,-7.181244],
        wing_calc_interval=0.02):
        
        self.IMU = IMU
        self.threadAlive = False
        
        self.servo_range = servo_range
        self.wing_param = {
    script_path = os.path.dirname(purchase_path)
    migration_path = os.path.dirname(script_path)
    controller_path = '%s/controller' % migration_path
    sys.path.insert(0, controller_path)
    from connection import connection
    import log
except Exception:
    pass

# Model
PurchaseContract = connection.get_model('purchase.contract')

# Domain
dom = []

# Search PO Contract
poc = PurchaseContract.search_read(dom)

log_po_names = [[], []]
logger = log.setup_custom_logger('po_contract_act_set_write_uid_by_create_uid')
logger.info('Start process')
logger.info('Total contract: %s' % len(poc))
try:
    # Update write uid with create_uid
    poc = [x['id'] for x in poc]
    PurchaseContract.mork_set_write_uid_by_create_uid(poc)
    logger.info('Updated: %s contracts' % len(poc))
except Exception as ex:
    logger.error('Fail: %s' % ex)
logger.info('End process')
Esempio n. 43
0
try:
    import glider_lib
    import glider_schedule as schedule
except:
    print traceback.print_exc()
    sys.exit(1)
from glider_states import *

##########################################
# TODO
##########################################

##########################################
# GLOBALS
##########################################
LOG = log.setup_custom_logger('glider')
LOG.setLevel(logging.WARNING)

STATE_MACHINE = {
    "HEALTH_CHECK"  : healthCheck(),
    "ASCENT"        : ascent(),                   
    "RELEASE"       : release(),                
    "FLIGHT"        : glide(),                    
    "PARACHUTE"     : parachute(),
    "RECOVER"       : recovery(),
    "ERROR"         : errorState()
}
CURRENT_STATE = "HEALTH_CHECK"
RUNNING = True

##########################################
Esempio n. 44
0
import log
import math
import time
import numpy
import logging
import subprocess

# Glider Imports
import glider_lib
import glider_schedule as schedule
import glider_states as states

##########################################
# GLOBALS
##########################################
LOG = log.setup_custom_logger('state_controller')
LOG.setLevel(logging.WARN)
##########################################
# FUNCTIONS - UTIL
##########################################
def setState(newState):
    """Sets the global state which is used for various updates"""
    global STATE
    if getattr(states, newState):
        STATE = newState
    else:
        raise Exception("State (%s) does not exist" % newState)


def scheduleRelease():
    global CURRENT_STATE
Esempio n. 45
0
File: main.py Progetto: Jingoo88/CA2
import time
import log
import json
import getopt
from sys import argv
from Score import Score
from DataHandler import DataHandler
from PartialScore import PartialScore

__author__ = 'Thomas'

logger = log.setup_custom_logger("Main")

start_time = time.clock()

logger.info("Main Started at time %i"%start_time)

with open('test.json') as data_file:
    data = json.load(data_file)

data_handler = DataHandler(usr=data["usr"], table=data["table"], url=data["url"], pwd=data["pwd"])
myopts, args = getopt.getopt(argv[1:], "c:i:", ["coeffs=", "ids="])

if len(argv) > 1:

    for i, j in myopts:
        if i == "--ids" or i == "-i":
            requested_ids = j.split(",")
        elif i == "--coeffs" or i == "-c":
            input_coeffs = j.split(",")
Esempio n. 46
0
from __future__ import print_function

import dsid_pb2
import dsid_pb2_grpc
import log

import grpc

import time
import numpy as np

from sys import getsizeof

logger = log.setup_custom_logger('grpc_client_python')

_STRINGS_TO_TEST = {
    1:
    "a",
    2:
    "ab",
    4:
    "abcd",
    8:
    "abcdefgh",
    16:
    "x5cHKQWH0Vsou5Ej",
    32:
    "xv1lWaeDWxn48TTVEjMFeam74Dj7xaeB",
    64:
    "VPh1NLqEa80CUurgztYQawE0D6uJWuwqZK8hxrpxflfbkRb3MnoPqAhWFkgLd41p",
    128:
Esempio n. 47
0
import log

from GalGen3 import CreateGalaxy
from dataclasses import Calendar, PropertyRegister
from MarketModel_Economy import Economy

##########

###### Globals
Cal = Calendar()
WorldBank = PropertyRegister(Economy.AllGoodsName)

if __name__ == '__main__':
    if not 'logger' in locals():
        logger = log.setup_custom_logger("root")
    logger.info('*** COMMENCING ***')
    Cal.set_date(2050, 01, 02)

    logger.info('Date is : %s', Cal.Date)
    ### Generates Galaxy
    Pl = CreateGalaxy(10, Cal, WorldBank)
    #Pl[0].Description(0)
    Pl[0].CreateEconomy(20)

    #### Money Distribution
    WorldBank.store(Pl[0].ecoactor['Government'].account['gov'], 'Money',  20.)
    for sub in Pl[0].ecoactor['Resources'].production_goods:
        WorldBank.store(Pl[0].ecoactor['Resources'].account[sub], 'Money',  2.)
    for sub in Pl[0].ecoactor['Business'].production_goods:
        WorldBank.store(Pl[0].ecoactor['Business'].account[sub], 'Money',  2.)
Esempio n. 48
0
import log
import time
import signal as sig

from config import cfg

logger = log.setup_custom_logger('root', 'swapper')
logger.debug('Starting OCC to TUSC swapper')

import eth_api.interactor_eth_api as eth_api
import db_access.db as db

general_cfg = cfg["general"]


def keyboard_interrupt_handler(signal, frame):
    print("KeyboardInterrupt (ID: {}) has been caught. Cleaning up...".format(
        signal))
    exit(0)


sig.signal(sig.SIGINT, keyboard_interrupt_handler)

if __name__ == '__main__':
    logger.debug('Starting swapper')
    db.initiate_database_connection()

    retry_interval = 4
    swap_count = 0
    while True:
        logger.debug('Swapping')
Esempio n. 49
0
#!/usr/bin/python
##############################################
#
# Glider GroundStation Software
# For use with launch of GliderV2:
#   Daniel Vagg 2015
#
##############################################
import os
import log
import time
import logging

LOG = log.setup_custom_logger('groundstation')
LOG.setLevel(logging.WARNING)

#####################################
# DATA HANDLERS
#####################################
class TelemetryHandler(object):
    def __init__(self, output="output/telemetry.data"):
        self.output = output
        self.last_packet = None
        self.all_sat_last_packets = {}
        self.components = [
            "callsign", "index", "hhmmss", 
            "NS", "lat", "EW", "lon", "gps_dil", "alt", 
            "temp1", "temp2", "pressure"
        ]
        with open(self.output, "a") as output:
            output.write("# " + ",".join(self.components) + "\n")
Esempio n. 50
0
def main(input_filename, aws_region, dry_run, log_dir, overwrite, verbose):
    """
    Routine to import a list of key/value pairs from a JSON document to AWS SSM
    """
    if not os.path.isdir(log_dir):
        print(f'Log directory does not exist: \'{log_dir}\' '
              '- please create or adjust --log-dir parameter')
        return

    if not os.path.isfile(input_filename):
        print(f'Input filename does not exist: \'{input_filename}\' '
              '- please specify a valid input file.')
        return

    log_level = INFO
    if verbose:
        log_level = DEBUG

    overwrite_flag = False
    if overwrite:
        overwrite_flag = True

    start_time = datetime.now()
    logger = log.setup_custom_logger(
        'root',
        log_dir=log_dir,
        level=log_level,
        prefix=f'import-{os.environ.get("STAGE")}')

    if os.environ.get('STAGE') is None:
        logger.warning('Please set \'STAGE\' environment variable.')
        return
    stage = os.environ.get('STAGE')

    aws_session = boto3.session.Session(
        profile_name='lfproduct-{}'.format(stage), region_name=aws_region)
    ssm_client = aws_session.client('ssm')

    logger.info(f'STAGE           : {stage}')
    logger.info(f'AWS REGION      : {aws_region}')
    logger.info(f'dry-run         : {dry_run}')
    logger.info(f'log-dir         : {log_dir}')
    logger.info(f'log level       : {getLevelName(log_level)}')

    with open(input_filename) as json_file:
        data = json.load(json_file)

        for kv in data:
            logger.info(f'Processing {kv["Name"]}: {kv["Value"]}')
            if dry_run:
                logger.info('Skipping upload - dry-run mode')
                continue

            try:
                ssm_client.put_parameter(Name=kv["Name"],
                                         Value=kv["Value"],
                                         Type='String',
                                         Overwrite=overwrite_flag)
            except ClientError as e:
                logger.info(
                    f'Skipping {kv["Name"]}: {kv["Value"]} - '
                    f'key already exists and overwrite set to: {overwrite_flag}'
                )

    logger.info('Finished export - duration: {}'.format(datetime.now() -
                                                        start_time))
Esempio n. 51
0
import log
from pickle import load
from math import isnan


__author__ = 'Thomas'

logger = log.setup_custom_logger('Estimation')


class PartialEstimation:
    """
    Class computing vars and means for variables used for score estimates and then calculating zcores
    Basicly the calculating tool of the library
    """

    def __init__(self):

        logger.info("Initializing PartialEstimation object")

        self.__means = self.load_obj("last_estimated_means")
        self.__std = self.load_obj("last_estimated_vars")

        logger.info("Estimation object initialized")

    @staticmethod
    def load_obj(name):

        with open(name + '.pkl', 'rb') as f:
            return load(f)
Esempio n. 52
0
import log
import time
import math
import json
import logging
import traceback
import dateutil.parser
from threading import Thread

# GUIDE
# http://ava.upuaut.net/?p=768

##########################################
# GLOBALS
##########################################
LOG = log.setup_custom_logger('telemetry')
LOG.setLevel(logging.WARN)


class TelemetryHandler():

    def __init__(self, radio, imu, pilot, gps):
        self.threadAlive = True

        self.radio = radio
        self.imu = imu
        self.pilot = pilot
        self.gps = gps
        self.glider_state = None
        self.alien_gps_dump = {}
Esempio n. 53
0
import log
import sys
import time
import json
import logging
import datetime
import threading

#####################################
# GLOBALS
#####################################
STATE_DATA = {}
FUNC_STACK = {}
LOG = log.setup_custom_logger('scheduler')
LOG.setLevel(logging.DEBUG)

#####################################
# FUNCTIONS
#####################################
def init():
  LOG.info("Initializing the Scheduler")

def shutDown():
  disableAllFunc()
  LOG.info("Shutting Down")

def enableFunc(funcName, function, interval, count=0):
  global FUNC_STACK

  # Cancel Thread if it already exists.
  if FUNC_STACK.get(funcName) and FUNC_STACK.get(funcName).get("THREAD"):
Esempio n. 54
0
import log
import joblib
from trainer.transformer import DataTransformer
import pandas as pd

logger = log.setup_custom_logger(__name__)


class Predictor:
    def __init__(self, name, classifier, transformer):
        self.name = name
        self.classifier = classifier
        self.transformer = transformer

    def passenger_to_pandas(self, passenger):
        data = pd.DataFrame(columns=list(passenger.keys()),
                            data=[list(passenger.values())])
        features = self.transformer.transform(data)
        return features

    def predict(self, passenger):
        features = self.passenger_to_pandas(passenger)
        logger.info(features)
        prediction = self.classifier.predict(features)
        return prediction


actual_predictors = []


def build_predictor(name):
Esempio n. 55
0
import sys
import time
import random

# Check if verbose
verbose = False
for arg in sys.argv:
    if arg == '--verbose' or arg == '-v':
        verbose = True

# Start logger
import log
import logging
logger = log.setup_custom_logger('root', logging.DEBUG if verbose else logging.ERROR)

# Load plugins
logger.debug('Loading plugins')
from plugins import collage_plugins, get_resolution_plugins, set_wallpaper_plugins

# Read options and arguments
logger.debug('Reading config file and parsing options')
from config import get_config
config = get_config()
print 'config: ', config

# Find wallpapers
logger.debug('Initialize wallpapers')
from wallpapers import Wallpapers
wps = Wallpapers(config)

# Instantiate collages
 def __init__(self):
   self.logger = log.setup_custom_logger(__name__)
Esempio n. 57
0
##########################################
# TODO
##########################################

##########################################
# GLOBALS
##########################################
delay_reset = 0.1
delay_xfer = 0.05
spi_bus = 0
spi_dev = 0
spi = None
wing_angle = 0
max_speed = 40000 # fuckit.. keep it that low.

LOG = log.setup_custom_logger('ATMEGA')
LOG.setLevel(logging.WARNING)

##########################################
# FUNCTIONS
##########################################
def W_glider_command(command):
    comm_string = "$%s;" % command
    LOG.debug("Sending %s" % comm_string)
    char_arr = [ord(i) for i in comm_string]
    
    while True:
        response = raw_xfer(char_arr)
        if response == char_arr:
            break
        else:
import threads.AADLThreadFunctionsSupport as tfs

import folder_tree_functions as folderTree

import datetime
import XMLTags

from lxml import etree

import systems.SystemsManager as sm
from systems.System import System

import global_filepath
import log

logger = log.setup_custom_logger("root")


def createNewThread(system_root, process, thread, classname, associated_class):
    if not classname:
        return None

    # Importo il modulo che contiene la tipologia di thread che voglio aggiungere
    thread_module = importlib.import_module("threads." + classname)

    # Ottengo la classe che gestisce quel particolare thread
    thread_class = getattr(thread_module, classname)

    # Creo una nuova istanza della classe relativa al thread e lancio quindi la procedura di
    # creazione effettiva del codice
    new_thread = thread_class(system_root, process, thread, associated_class)
Esempio n. 59
0
import os
import log
import logging

from gps import *
from time import *
from threading import Thread

# GUIDE
# http://ava.upuaut.net/?p=768

LOG = log.setup_custom_logger("GPS")
LOG.setLevel(logging.WARN)


class GPS_USB(object):

    def __init__(self):
        self.gpsd = gps(mode=WATCH_ENABLE) #starting the stream of info

    def poll_gps(self):
        while self.threadAlive:
            self.gpsd.next() #this will continue to loop and grab EACH set of gpsd info to clear the buffer
            LOG.debug("GPS data:")
            LOG.info('Error     %s %s' % (self.gpsd.fix.epx, self.gpsd.fix.epy))
            LOG.debug('latitude     %s' % self.gpsd.fix.latitude)
            LOG.debug('longitude    %s' % self.gpsd.fix.longitude)
            LOG.debug('time utc     %s + %s' % (self.gpsd.utc, self.gpsd.fix.time))
            LOG.debug('altitude (m) %s' % self.gpsd.fix.altitude)
 
    def start(self):