Example #1
0
#

import sys

if len(sys.argv) < 3:
    print """
    OpenEmbedded source checker script require two arguments:

    1. location of conf/checksums.ini
    2. path to DL_DIR (without "/" at the end)
    """
    sys.exit(0)

import ConfigParser, os, itertools

checksums_parser = ConfigParser.ConfigParser()
checksums_parser.read(sys.argv[1])

parsespin = itertools.cycle(r'|/-\\')

item = 1
files_total = len(checksums_parser.sections())
files_checked = 0
files_good = 0
files_wrong = []

for source in checksums_parser.sections():
    archive = source.split("/")[-1]
    localpath = os.path.join(sys.argv[2], archive)
    md5 = checksums_parser.get(source, "md5")
    sha = checksums_parser.get(source, "sha256")
#############COMMAND CODE IS BELOW ######################

###########OPTIONS
parser = argparse.ArgumentParser(description='Command line parser of skim options')
parser.add_argument('--config',   dest='cfgfile',   help='Name of config file',   required = True)
parser.add_argument('--sample',   dest='samplename',   help='Type of sample: 0 signal, 1 bkgmodel',   required = True)
parser.add_argument('--casename', dest='casename',  help='Case name',   required = True)
args = parser.parse_args()
configfilename = args.cfgfile
sampletorun    = args.samplename
case           = args.casename

###########Read Config file
print "[INFO] Reading skim configuration file . . ."
cfgparser = ConfigParser()
cfgparser.read('%s'%configfilename)
##########Get skim variables
print "[INFO] Getting configuration parameters . . ."
directory   = ast.literal_eval(cfgparser.get("configuration","directory"))
print "    -The directory:"
print "      *",directory
signalsamples  = ast.literal_eval(cfgparser.get("configuration","mvasigsamples"))
print "    -The list of signal samples:"
for x in range(len(signalsamples)):
  print "      *",signalsamples[x]
bdtbkgsamples  = ast.literal_eval(cfgparser.get("configuration","mvabkgsamples"))
print "    -The list of bdtbkg samples:"
print "      *",bdtbkgsamples[0] 
tag         = ast.literal_eval(cfgparser.get("configuration","tag"))
print "    -The tag:"
Example #3
0
#!/usr/bin/env python
import re
import argparse
import sys
import pika
import imp
import os
import ConfigParser
import logging

logging.basicConfig()
config = ConfigParser.ConfigParser()
config.read("./rabbitMQtests.config")

class Receiver(object):
	def __init__(self):	  
	  #setup the connection
          sslOptions = {}
          sslOptions["ca_certs"]  = os.path.abspath(config.get('general', "CA_CERT_FILE"))
          sslOptions["certfile"]  = os.path.abspath(config.get('general', 'CLIENT_CERT_FILE'))
          sslOptions["keyfile"]   = os.path.abspath(config.get('general', 'CLIENT_KEY_FILE'))

	  try:
          	self.connection = pika.BlockingConnection(pika.ConnectionParameters(
                                                           host=config.get('general', 'HOST'),
                                                           port=config.getint('general', 'PORT'),
                                                           ssl=config.getboolean('general', 'USE_SSL'),
                                                           ssl_options = sslOptions
                                                          )
                                                         )
	  	self.channel = self.connection.channel()
Example #4
0
# read an INI (config) file
import ConfigParser

read_opts = ConfigParser.ConfigParser()
read_opts.read('../data/options.ini')

# print parameters and values
for section in read_opts.sections():
    print "[%s]" % section
    for param in read_opts.items(section):
        print param
Example #5
0

# Import necessary modules
import os
import tempfile
import urllib
import csv
import pyodbc
import ConfigParser


print "Read config.ini file..."
# User defined variables from .ini file...
# User needs to change config path
configPath = "D:\Dropbox\NiyaMIT\config.ini"
cfgParser = ConfigParser.ConfigParser()
cfgParser.read(configPath)
url = cfgParser.get("HIFLD OPEN DATA URLS", "CareFlty_URL")
url2 = cfgParser.get("HIFLD OPEN DATA URLS", "CareFlty2_URL")
userDefinedServer = cfgParser.get("SQL SERVER", "ServerName")
UserName = cfgParser.get("SQL SERVER", "UserName")
Password = cfgParser.get("SQL SERVER", "Password")
possibleDatabaseListRaw = cfgParser.get("DATABASE LIST", "possibleDatabaseList")
possibleDatabaseList = []
for database in possibleDatabaseListRaw.split(","):
    possibleDatabaseList.append(database)
userDefinedSqFt = cfgParser.get("HOSPITALS", "BedRoomSqFt")
print "Done"
print

Example #6
0
def read_config(filename):
    config = ConfigParser.ConfigParser()
    config.readfp(open(filename))
    return config
def main():

    cur_platform = '??'
    llvm_path = '??'
    ndk_root = _check_ndk_root_env()
    # del the " in the path
    ndk_root = re.sub(r"\"", "", ndk_root)
    python_bin = _check_python_bin_env()

    platform = sys.platform
    if platform == 'win32':
        cur_platform = 'windows'
    elif platform == 'darwin':
        cur_platform = platform
    elif 'linux' in platform:
        cur_platform = 'linux'
    else:
        print 'Your platform is not supported!'
        sys.exit(1)

    if platform == 'win32':
        x86_llvm_path = os.path.abspath(
            os.path.join(ndk_root, 'toolchains/llvm-3.3/prebuilt',
                         '%s' % cur_platform))
    else:
        x86_llvm_path = os.path.abspath(
            os.path.join(ndk_root, 'toolchains/llvm-3.3/prebuilt',
                         '%s-%s' % (cur_platform, 'x86')))
    x64_llvm_path = os.path.abspath(
        os.path.join(ndk_root, 'toolchains/llvm-3.3/prebuilt',
                     '%s-%s' % (cur_platform, 'x86_64')))

    if os.path.isdir(x86_llvm_path):
        llvm_path = x86_llvm_path
    elif os.path.isdir(x64_llvm_path):
        llvm_path = x64_llvm_path
    else:
        print 'llvm toolchain not found!'
        print 'path: %s or path: %s are not valid! ' % (x86_llvm_path,
                                                        x64_llvm_path)
        sys.exit(1)

    project_root = os.path.abspath(
        os.path.join(os.path.dirname(__file__), '..', '..'))
    cocos_root = os.path.abspath(os.path.join(project_root, ''))
    cxx_generator_root = os.path.abspath(
        os.path.join(project_root, 'tools/bindings-generator'))

    # save config to file
    config = ConfigParser.ConfigParser()
    config.set('DEFAULT', 'androidndkdir', ndk_root)
    config.set('DEFAULT', 'clangllvmdir', llvm_path)
    config.set('DEFAULT', 'cocosdir', cocos_root)
    config.set('DEFAULT', 'cxxgeneratordir', cxx_generator_root)
    config.set('DEFAULT', 'extra_flags', '')

    # To fix parse error on windows, we must difine __WCHAR_MAX__ and undefine __MINGW32__ .
    if platform == 'win32':
        config.set('DEFAULT', 'extra_flags',
                   '-D__WCHAR_MAX__=0x7fffffff -U__MINGW32__')

    conf_ini_file = os.path.abspath(
        os.path.join(os.path.dirname(__file__), 'userconf.ini'))

    print 'generating userconf.ini...'
    with open(conf_ini_file, 'w') as configfile:
        config.write(configfile)

    # set proper environment variables
    if 'linux' in platform or platform == 'darwin':
        os.putenv('LD_LIBRARY_PATH', '%s/libclang' % cxx_generator_root)
    if platform == 'win32':
        path_env = os.environ['PATH']
        os.putenv(
            'PATH', r'%s;%s\libclang;%s\tools\win32;' %
            (path_env, cxx_generator_root, cxx_generator_root))

    try:

        tolua_root = '%s/tools/tolua' % project_root
        output_dir = '%s/../runtime-src/Classes/auto' % project_root

        cmd_args = {'dragonbones.ini' : ('dragonbones', 'lua_dragonbones_auto'), \
                    }
        target = 'lua'
        generator_py = '%s/generator.py' % cxx_generator_root
        for key in cmd_args.keys():
            args = cmd_args[key]
            cfg = '%s/%s' % (tolua_root, key)
            print 'Generating bindings for %s...' % (key[:-4])
            command = '%s %s %s -s %s -t %s -o %s -n %s' % (
                python_bin, generator_py, cfg, args[0], target, output_dir,
                args[1])
            _run_cmd(command)

        if platform == 'win32':
            with _pushd(output_dir):
                _run_cmd('dos2unix *')

        print '---------------------------------'
        print 'Generating lua bindings succeeds.'
        print '---------------------------------'

    except Exception as e:
        if e.__class__.__name__ == 'CmdError':
            print '---------------------------------'
            print 'Generating lua bindings fails.'
            print '---------------------------------'
            sys.exit(1)
        else:
            raise
Example #8
0
def read_ini(ini_path):
    config = configparser.ConfigParser(allow_no_value=True)
    config.read(ini_path)
    os.path.isfile(ini_path)
    BORKIFY_METHOD = config.getint('Borkifier', 'Language')
Example #9
0
class Settings(object):

    c = ConfigParser.ConfigParser()

    def __init__(self, pair):
        self._path = "./config/%s.ini" % pair
        self.settings = self.parseConfig

        printInfo("Checking config file .....\n")
        if not os.path.isfile(self._path):
            printInfo("No config file ..... will create one")
            self.createConfig()

        self.settings = self.parseConfig()

        if not self.validateConfig():
            quit()

    def parseConfig(self):
        settings = {}
        self.c.read(self._path)

        for o in self.c.options("Settings"):
            try:
                settings[o] = float(self.c.get("Settings", o))
            except:
                settings[o] = self.c.get("Settings", o)

        return settings

    def createConfig(self):
        with open(self._path, 'w') as f:
            self.c.add_section('Settings')
            self.c.set('Settings', 'candlestickPeriod', 86400)
            self.c.set('Settings', 'candlestickHours', 168)
            self.c.set('Settings', 'bb_length', 10)
            self.c.set('Settings', 'bb_std', 2)
            self.c.set('Settings', 'maxBalance', 0.85)
            self.c.set('Settings', 'profitMargin', 1.1)
            self.c.set('Settings', 'longMargin', 0.97)
            self.c.set('Settings', 'shortMargin', 1.03)
            self.c.set('Settings', 'delta', 0.9)
            self.c.set('Settings', 'stopLimit', 0.05)
            self.c.set('Settings', 'stopLimitTimeout', 2)
            self.c.set('Settings', 'marginCloseTimeout', 2)
            self.c.set('Settings', "cycletimeout", 0)
            self.c.write(f)

        printSuccess(
            "Config file generated, please modify the config file and re-run this script"
        )
        quit()

    def validateConfig(self):
        ok = True

        if self.settings["longmargin"] >= 1.0:
            printError("Long margin cannot be >= 1.00")
            ok = False

        if self.settings["shortmargin"] <= 1.0:
            printError("Short margin cannot be <= 1.00")
            ok = False

        if self.settings["profitmargin"] <= 1.0:
            printError("Profit margin cannot be <= 1.0")
            ok = False

        if self.settings["candlestickperiod"] not in [
                300, 900, 1800, 7200, 14400, 86400
        ]:
            printError(
                "Invalid candle stick period, use 300, 900, 1800, 7200, 14400, or 86400"
            )
            ok = False

        if self.settings["maxbalance"] <= 0.0:
            printError("Max balance cannot be < 0.0")
            ok = False

        if self.settings["maxbalance"] >= 0.95:
            printError("Max balance cannot be >= 0.95")
            ok = False

        if self.settings["stoplimit"] <= 0.01:
            printError("Stop limit cannot be <= 0.01")
            ok = False

        if self.settings["stoplimittimeout"] < 0.5:
            printError("Stop limit timeout cannot be less than 0.5 hours")
            ok = False

        if self.settings["marginclosetimeout"] < 1:
            printError("Margin close timeout cannot be < 1")
            ok = False

        return ok

    def refresh(self):
        oldSettings = self.settings
        self.settings = self.parseConfig()

        if not self.validateConfig():
            self.settings = oldSettings
            quit()

        print "\n"
        for key in self.settings:
            if self.settings[key] != oldSettings[key]:
                printHeader(">> %s changed from %s to %s" %
                            (key, oldSettings[key], self.settings[key]))
Example #10
0
 def __init__(self):
     self.file = None
     self.config = ConfigParser.ConfigParser()
Example #11
0
def daemon():
    global MyPatching, backup_logger, hutil, run_result, run_status, error_msg, freezer, para_parser, snapshot_done, snapshot_info_array, g_fsfreeze_on, total_used_size
    #this is using the most recent file timestamp.
    hutil.do_parse_context('Executing')
    freezer = FsFreezer(patching=MyPatching, logger=backup_logger)
    global_error_result = None
    # precheck
    freeze_called = False
    configfile = '/etc/azure/vmbackup.conf'
    thread_timeout = str(60)

    #Adding python version to the telemetry
    try:
        python_version_info = sys.version_info
        python_version = str(sys.version_info[0]) + '.' + str(
            sys.version_info[1]) + '.' + str(sys.version_info[2])
        HandlerUtil.HandlerUtility.add_to_telemetery_data(
            "pythonVersion", python_version)
    except Exception as e:
        errMsg = 'Failed to do retrieve python version with error: %s, stack trace: %s' % (
            str(e), traceback.format_exc())
        backup_logger.log(errMsg, True, 'Error')

    try:
        if (freezer.mounts is not None):
            hutil.partitioncount = len(freezer.mounts.mounts)
        backup_logger.log(" configfile " + str(configfile), True)
        config = ConfigParsers.ConfigParser()
        config.read(configfile)
        if config.has_option('SnapshotThread', 'timeout'):
            thread_timeout = config.get('SnapshotThread', 'timeout')
    except Exception as e:
        errMsg = 'cannot read config file or file not present'
        backup_logger.log(errMsg, True, 'Warning')
    backup_logger.log("final thread timeout" + thread_timeout, True)

    snapshot_info_array = None

    try:
        # we need to freeze the file system first
        backup_logger.log('starting daemon', True)
        """
        protectedSettings is the privateConfig passed from Powershell.
        WATCHOUT that, the _context_config are using the most freshest timestamp.
        if the time sync is alive, this should be right.
        """
        if (hutil.is_prev_in_transition()):
            backup_logger.log(
                'retrieving the previous logs for this again inside daemon',
                True)
            backup_logger.set_prev_log()

        protected_settings = hutil._context._config['runtimeSettings'][0][
            'handlerSettings'].get('protectedSettings')
        public_settings = hutil._context._config['runtimeSettings'][0][
            'handlerSettings'].get('publicSettings')
        para_parser = ParameterParser(protected_settings, public_settings)

        commandToExecute = para_parser.commandToExecute
        #validate all the required parameter here
        backup_logger.log(commandToExecute, True)
        if (CommonVariables.iaas_install_command in commandToExecute.lower()):
            backup_logger.log('install succeed.', True)
            run_status = 'success'
            error_msg = 'Install Succeeded'
            run_result = CommonVariables.success
            backup_logger.log(error_msg)
        elif (CommonVariables.iaas_vmbackup_command
              in commandToExecute.lower()):
            if (para_parser.backup_metadata is None
                    or para_parser.public_config_obj is None
                    or para_parser.private_config_obj is None):
                run_result = CommonVariables.error_parameter
                hutil.SetExtErrorCode(ExtensionErrorCodeHelper.
                                      ExtensionErrorCodeEnum.error_parameter)
                run_status = 'error'
                error_msg = 'required field empty or not correct'
                backup_logger.log(error_msg, True, 'Error')
            else:
                backup_logger.log('commandToExecute is ' + commandToExecute,
                                  True)
                """
                make sure the log is not doing when the file system is freezed.
                """
                temp_status = 'success'
                temp_result = CommonVariables.ExtensionTempTerminalState
                temp_msg = 'Transitioning state in extension'
                blob_report_msg, file_report_msg = get_status_to_report(
                    temp_status, temp_result, temp_msg, None)
                if (hutil.is_status_file_exists()):
                    status_report_to_file(file_report_msg)
                status_report_to_blob(blob_report_msg)
                backup_logger.log('doing freeze now...', True)
                #partial logging before freeze
                if (para_parser is not None
                        and para_parser.logsBlobUri is not None
                        and para_parser.logsBlobUri != ""):
                    backup_logger.commit_to_blob(para_parser.logsBlobUri)
                else:
                    backup_logger.log(
                        "the logs blob uri is not there, so do not upload log."
                    )
                backup_logger.log('commandToExecute is ' + commandToExecute,
                                  True)

                PluginHostObj = PluginHost(logger=backup_logger)
                PluginHostErrorCode, dobackup, g_fsfreeze_on = PluginHostObj.pre_check(
                )
                doFsConsistentbackup = False

                if not (PluginHostErrorCode
                        == CommonVariables.FailedPrepostPluginhostConfigParsing
                        or PluginHostErrorCode
                        == CommonVariables.FailedPrepostPluginConfigParsing
                        or PluginHostErrorCode ==
                        CommonVariables.FailedPrepostPluginhostConfigNotFound
                        or PluginHostErrorCode == CommonVariables.
                        FailedPrepostPluginhostConfigPermissionError
                        or PluginHostErrorCode
                        == CommonVariables.FailedPrepostPluginConfigNotFound
                        or PluginHostErrorCode == CommonVariables.
                        FailedPrepostPluginConfigPermissionError):
                    backup_logger.log(
                        'App Consistent Consistent Backup Enabled', True)
                    HandlerUtil.HandlerUtility.add_to_telemetery_data(
                        "isPrePostEnabled", "true")

                if (PluginHostErrorCode !=
                        CommonVariables.PrePost_PluginStatus_Success):
                    backup_logger.log(
                        'Triggering File System Consistent Backup because of error code'
                        + ExtensionErrorCodeHelper.ExtensionErrorCodeHelper.
                        StatusCodeStringBuilder(PluginHostErrorCode), True)
                    doFsConsistentbackup = True

                if not doFsConsistentbackup:
                    preResult = PluginHostObj.pre_script()
                    dobackup = preResult.continueBackup

                    if (g_fsfreeze_on == False and preResult.anyScriptFailed):
                        dobackup = False

                if dobackup:
                    freeze_snapshot(thread_timeout)
                    backup_logger.log('unfreeze ends...')

                if not doFsConsistentbackup:
                    postResult = PluginHostObj.post_script()
                    if not postResult.continueBackup:
                        dobackup = False

                    if (g_fsfreeze_on == False and postResult.anyScriptFailed):
                        dobackup = False

                if not dobackup:
                    if run_result == CommonVariables.success and PluginHostErrorCode != CommonVariables.PrePost_PluginStatus_Success:
                        run_status = 'error'
                        run_result = PluginHostErrorCode
                        hutil.SetExtErrorCode(PluginHostErrorCode)
                        error_msg = 'Plugin Host Precheck Failed'
                        error_msg = error_msg + ExtensionErrorCodeHelper.ExtensionErrorCodeHelper.StatusCodeStringBuilder(
                            hutil.ExtErrorCode)
                        backup_logger.log(error_msg, True)

                    if run_result == CommonVariables.success:
                        pre_plugin_errors = preResult.errors
                        for error in pre_plugin_errors:
                            if error.errorCode != CommonVariables.PrePost_PluginStatus_Success and error.errorCode != CommonVariables.PrePost_ScriptStatus_Warning:
                                run_status = 'error'
                                run_result = error.errorCode
                                hutil.SetExtErrorCode(error.errorCode)
                                error_msg = 'PreScript failed for the plugin ' + error.pluginName
                                error_msg = error_msg + ExtensionErrorCodeHelper.ExtensionErrorCodeHelper.StatusCodeStringBuilder(
                                    hutil.ExtErrorCode)
                                backup_logger.log(error_msg, True)
                                break

                    if run_result == CommonVariables.success:
                        post_plugin_errors = postResult.errors
                        for error in post_plugin_errors:
                            if error.errorCode != CommonVariables.PrePost_PluginStatus_Success and error.errorCode != CommonVariables.PrePost_ScriptStatus_Warning:
                                run_status = 'error'
                                run_result = error.errorCode
                                hutil.SetExtErrorCode(error.errorCode)
                                error_msg = 'PostScript failed for the plugin ' + error.pluginName
                                error_msg = error_msg + ExtensionErrorCodeHelper.ExtensionErrorCodeHelper.StatusCodeStringBuilder(
                                    hutil.ExtErrorCode)
                                backup_logger.log(error_msg, True)
                                break

                if run_result == CommonVariables.success and not doFsConsistentbackup and not (
                        preResult.anyScriptFailed
                        or postResult.anyScriptFailed):
                    run_status = 'success'
                    run_result = CommonVariables.success_appconsistent
                    hutil.SetExtErrorCode(
                        ExtensionErrorCodeHelper.ExtensionErrorCodeEnum.
                        success_appconsistent)
                    error_msg = 'Enable Succeeded with App Consistent Snapshot'
                    backup_logger.log(error_msg, True)

        else:
            run_status = 'error'
            run_result = CommonVariables.error_parameter
            hutil.SetExtErrorCode(ExtensionErrorCodeHelper.
                                  ExtensionErrorCodeEnum.error_parameter)
            error_msg = 'command is not correct'
            backup_logger.log(error_msg, True, 'Error')
    except Exception as e:
        errMsg = 'Failed to enable the extension with error: %s, stack trace: %s' % (
            str(e), traceback.format_exc())
        backup_logger.log(errMsg, True, 'Error')
        global_error_result = e
    """
    we do the final report here to get rid of the complex logic to handle the logging when file system be freezed issue.
    """
    try:
        if (global_error_result is not None):
            if (hasattr(global_error_result, 'errno')
                    and global_error_result.errno == 2):
                run_result = CommonVariables.error_12
                hutil.SetExtErrorCode(
                    ExtensionErrorCodeHelper.ExtensionErrorCodeEnum.error_12)
            elif (para_parser is None):
                run_result = CommonVariables.error_parameter
                hutil.SetExtErrorCode(ExtensionErrorCodeHelper.
                                      ExtensionErrorCodeEnum.error_parameter)
            else:
                run_result = CommonVariables.error
                hutil.SetExtErrorCode(
                    ExtensionErrorCodeHelper.ExtensionErrorCodeEnum.error)
            run_status = 'error'
            error_msg += ('Enable failed.' + str(global_error_result))
        status_report_msg = None
        HandlerUtil.HandlerUtility.add_to_telemetery_data(
            "extErrorCode",
            str(ExtensionErrorCodeHelper.ExtensionErrorCodeHelper.
                ExtensionErrorCodeNameDict[hutil.ExtErrorCode]))
        total_used_size = -1
        blob_report_msg, file_report_msg = get_status_to_report(
            run_status, run_result, error_msg, snapshot_info_array)
        if (hutil.is_status_file_exists()):
            status_report_to_file(file_report_msg)
        status_report_to_blob(blob_report_msg)
    except Exception as e:
        errMsg = 'Failed to log status in extension'
        backup_logger.log(errMsg, True, 'Error')
    if (para_parser is not None and para_parser.logsBlobUri is not None
            and para_parser.logsBlobUri != ""):
        backup_logger.commit(para_parser.logsBlobUri)
    else:
        backup_logger.log(
            "the logs blob uri is not there, so do not upload log.")
        backup_logger.commit_to_local()

    sys.exit(0)
Example #12
0
class Grappy(scrapy.Spider):
    name = "grappy"

    config = ConfigParser.ConfigParser()
    config.read('config.ini')

    start_urls = [config.get('PAGES', 'LOGIN')]

    #send login form data to login page
    def parse(self, response):
        self.init_db()

        yield scrapy.FormRequest.from_response(
            response,
            formxpath='//form',
            formdata={
                'user[email]': self.config.get('AUTH', 'USERNAME'),
                'user[password]': self.config.get('AUTH', 'PASSWORD')
            },
            callback=self.after_login)

    #check if login was succesful, and proceed with first profile to crawl
    def after_login(self, response):
        if "try again" in response.body:
            self.logger.error(
                "Login error! Make sure the credentials are set in the config.ini file"
            )
            return
        else:
            self.logger.info(
                'Succesfully logged in, proceed with first profile!')
            baseurl = self.config.get('PAGES', 'START_PROFILE')
            yield response.follow(baseurl, self.action)

    #extract data from profile, save it and proceed with next profile
    def action(self, response):
        url = ''
        name = ''
        picture = ''
        details = ''
        dob = ''
        website = ''
        activity = ''
        interests = ''
        favourite = ''
        about = ''

        name = response.css('h1.userProfileName::text').extract()[0].strip()
        picture = response.xpath(
            '//img[@class="profilePictureIcon circularIcon circularIcon--huge circularIcon--border"]/@src'
        ).extract()[0]
        url = response.request.url
        titles = response.xpath("//div[@class='infoBoxRowTitle']").extract()
        items = response.xpath("//div[@class='infoBoxRowItem']").extract()

        for i in range(0, len(titles)):
            self.logger.info("key " + titles[i] + " value " + items[i].strip())
            if ("Details" in titles[i]):
                details = response.xpath(
                    "//div[@class='infoBoxRowItem']")[i].xpath(
                        "text()").extract_first().strip().replace("'", "`")
            if ("Birthday" in titles[i]):
                dob = response.xpath(
                    "//div[@class='infoBoxRowItem']")[i].xpath(
                        "text()").extract_first().strip().replace("'", "`")
            if ("Website" in titles[i]):
                website = response.xpath("//div[@class='infoBoxRowItem']")[
                    i].css('a::attr(href)').extract_first().replace("'", "`")
            if ("Activity" in titles[i]):
                activity = response.xpath(
                    "//div[@class='infoBoxRowItem']")[i].xpath(
                        "text()").extract_first().strip().replace("'", "`")
            if ("Interests" in titles[i]):
                interests = response.xpath(
                    "//div[@class='infoBoxRowItem']")[i].xpath(
                        "text()").extract_first().strip().replace("'", "`")
            if ("Favorite" in titles[i]):
                favourite = response.xpath(
                    "//div[@class='infoBoxRowItem']")[i].xpath(
                        "text()").extract_first().strip().replace("'", "`")
            if ("About" in titles[i]):
                about = response.xpath(
                    "//*[contains(@id, 'freeTextContainerdisplay_user')]/text()"
                )[0].extract().replace("'", "`")

        self.insert(url, name, picture, details, dob, website, activity,
                    interests, favourite, about)
        for href in response.css('div.friendName a::attr(href)'):
            yield response.follow(href, self.action)

    #insert profile information into DB
    def insert(self, url, name, picture, details, dob, website, activity,
               interests, favourite, about):
        conn = sqlite3.connect(self.config.get('OUTPUT', 'DB'))

        query = "INSERT INTO PROFILE (URL,NAME,PICTURE,DETAILS,DOB, WEBSITE, ACTIVITY, INTERESTS, FAVOURITES, ABOUT) VALUES ('" + url + "','" + name + "','" + picture + "','" + details + "','" + dob + "','" + website + "','" + activity + "','" + interests + "','" + favourite + "','" + about + "')"
        self.logger.info("execute query " + query)
        conn.execute(query)
        conn.commit()
        conn.close()

    #create DB and 'profile' table if nox exist
    def init_db(self):
        conn = sqlite3.connect(self.config.get('OUTPUT', 'DB'))
        self.logger.info("Database created successfully")
        conn.execute('''CREATE TABLE IF NOT EXISTS PROFILE
         (URL		TEXT	NOT NULL,
	 NAME           TEXT,
	 PICTURE	TEXT,
         DETAILS           TEXT,
	 DOB           TEXT,
	 WEBSITE           TEXT,
	 ACTIVITY           TEXT,
	 INTERESTS           TEXT,
	 FAVOURITES           TEXT,
	 ABOUT           TEXT);''')
        self.logger.info("Table created successfully")
        conn.close()
Example #13
0
def main():
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument('-b',
                        '--bucket',
                        help="The bucket name",
                        required=True)
    parser.add_argument('-o', '--object', help="The object URI", required=True)
    parser.add_argument('-e',
                        '--expires-in',
                        type=int,
                        nargs='?',
                        dest="expires_in",
                        help="How long the signed URL will be valid")
    parser.add_argument('-a',
                        '--access-key',
                        nargs='?',
                        dest="access_key",
                        default=None)
    parser.add_argument('-s',
                        '--secret-key',
                        nargs='?',
                        dest="secret_key",
                        default=None)

    if len(sys.argv[1:]) == 0:
        parser.print_help()
        sys.exit(1)
    args = parser.parse_args()

    bucket = args.bucket
    key = args.object
    if args.expires_in:
        seconds = args.expires_in
    else:
        seconds = 60 * 60

    if args.access_key and args.secret_key:
        access_key = args.access_key
        secret_key = args.secret_key
    elif len([x for x in (args.access_key, args.secret_key)
              if x is not None]) == 1:
        parser.error('--access-key and --secret-key must be given together')
    else:
        config = ConfigParser.ConfigParser()
        config.read(expanduser("~/.boto"))
        if config.has_option('Credentials',
                             'aws_access_key_id') and config.has_option(
                                 'Credentials', 'aws_secret_access_key'):
            access_key = config.get('Credentials', 'aws_access_key_id')
            secret_key = config.get('Credentials', 'aws_secret_access_key')
        else:
            config.read(expanduser("~/.s3cfg"))
            if config.has_option('default',
                                 'access_key') and config.has_option(
                                     'default', 'secret_key'):
                access_key = config.get('default', 'access_key')
                secret_key = config.get('default', 'secret_key')
            else:
                print(
                    "You have to specify access_key and secret_key on the command line or in the ~/.boto, ~/.s3cfg."
                )
                sys.exit(1)

    conn = S3Connection(aws_access_key_id=access_key,
                        aws_secret_access_key=secret_key)

    try:
        buckets = conn.get_all_buckets()
    except S3ResponseError as e:
        print(e.message)
        sys.exit(1)

    if conn.lookup(bucket) is None:
        print('No such bucket!')
        sys.exit(1)
    else:
        if conn.get_bucket(bucket).get_key(key) is None:
            print('The key "{0}" does not exist'.format(key))
            sys.exit(1)
        else:
            print conn.generate_url(seconds,
                                    'GET',
                                    bucket,
                                    key,
                                    response_headers={
                                        'response-content-type':
                                        'application/octet-stream'
                                    })
Example #14
0
 def __init__(self, config_path):
     self.config_path = config_path
     self.cc = configParser.ConfigParser()
     self.cc.readfp(file(config_path))
Example #15
0
def main():
    """
    Retrieve network indicators from mongodb, used by cuckoo. Pass to genStixDoc 
    to create the stix doc. Handle de-dup and filter lists for indicators, as 
    well as stix items created in previous runs.
    """
    ap = argparse.ArgumentParser()
    apg = ap.add_mutually_exclusive_group()
    apg.add_argument('--job-id', dest='jobId', default='', help='Cuckoo job id to query.')
    apg.add_argument('--md5', dest='md5', default='', help='File md5 hash to query.')
    apg.add_argument('--sha1', dest='sha1', default='', help='File sha1 hash to query.')
    apg.add_argument('--sha256', dest='sha256', default='', help='File sha256 hash to query.')
    apg.add_argument('--sha512', dest='sha512', default='', help='File sha512 hash to query.')
    args = ap.parse_args()
    config = ConfigParser.ConfigParser()
    config.read('app.conf')
    conn = pymongo.MongoClient(config.get('mongo','dbUrl'))
    with open(config.get('filterOut','fIpv4Addresses'), 'r+') as fIpv4AddressesFH:
                fIpv4Addresses = [line.rstrip('\n') for line in fIpv4AddressesFH]
    fIpv4AddressesFH.closed
    with open(config.get('filterOut','fHostNames'), 'r+') as fHostNamesFH:
                fHostNames = [line.rstrip('\n') for line in fHostNamesFH]
    fHostNamesFH.closed
    with open(config.get('filterOut','fSeenEntries'), 'w+') as fSeenEntriesFH:
                fSeenEntries = [line.rstrip('\n') for line in fSeenEntriesFH]
    fSeenEntriesFH.closed

    networkItems = []
    ipv4Addresses = []
    hostNames = []
    _l.info('Starting...')

    fSeenEntriesFH = open(config.get('filterOut','fSeenEntries'), 'a', 0)
   
    cfg_collections = config.get('mongo','dbCollectionNames')
    if ',' in cfg_collections:
        db_collection_names = cfg_collections.split(',')
    else:
        db_collection_names = [cfg_collections]
    
    cuckoo_names = config.get('dbsList','cuckoo')
    if ',' in cuckoo_names:
        cuckoo_servers = cuckoo_names.split(',')
    else:
        cuckoo_servers = [cuckoo_names]

    for dbkey, dbs in enumerate(cuckoo_servers):
        db = conn[dbs]
        mongo_collection = getattr(db, db_collection_names[dbkey])
        _l.debug('Connected to data source.')

        # Get a list of file names and hashes from db
        if args.jobId:
            cs = mongo_collection.aggregate([{"$match": {"info.id": int(args.jobId)}},
                                                {"$group": {"_id": {"targetFileSha1": "$target.file.sha1",
                                                 "targetFileSha256": "$target.file.sha256",
                                                 "targetFileSha512": "$target.file.sha512",
                                                 "targetFileSsdeep": "$target.file.ssdeep",
                                                 "targetFileMd5": "$target.file.md5",
                                                 "targetFileSize": "$target.file.size",
                                                 "targetFileName": "$target.file.name"}}}])
        elif args.md5:
            cs = mongo_collection.aggregate([{"$match": {"target.file.md5": args.md5}},
                                                {"$group": {"_id": {"targetFileSha1": "$target.file.sha1",
                                                 "targetFileSha256": "$target.file.sha256",
                                                 "targetFileSha512": "$target.file.sha512",
                                                 "targetFileSsdeep": "$target.file.ssdeep",
                                                 "targetFileMd5": "$target.file.md5",
                                                 "targetFileSize": "$target.file.size",
                                                 "targetFileName": "$target.file.name"}}}])
        elif args.sha1:
            cs = mongo_collection.aggregate([{"$match": {"target.file.sha1": args.sha1}},
                                                {"$group": {"_id": {"targetFileSha1": "$target.file.sha1",
                                                 "targetFileSha256": "$target.file.sha256",
                                                 "targetFileSha512": "$target.file.sha512",
                                                 "targetFileSsdeep": "$target.file.ssdeep",
                                                 "targetFileMd5": "$target.file.md5",
                                                 "targetFileSize": "$target.file.size",
                                                 "targetFileName": "$target.file.name"}}}])
        elif args.sha256:
            cs = mongo_collection.aggregate([{"$match": {"target.file.sha256": args.sha256}},
                                                {"$group": {"_id": {"targetFileSha1": "$target.file.sha1",
                                                 "targetFileSha256": "$target.file.sha256",
                                                 "targetFileSha512": "$target.file.sha512",
                                                 "targetFileSsdeep": "$target.file.ssdeep",
                                                 "targetFileMd5": "$target.file.md5",
                                                 "targetFileSize": "$target.file.size",
                                                 "targetFileName": "$target.file.name"}}}])
        elif args.sha512:
            cs = mongo_collection.aggregate([{"$match": {"target.file.sha512": args.sha512}},
                                                {"$group": {"_id": {"targetFileSha1": "$target.file.sha1",
                                                 "targetFileSha256": "$target.file.sha256",
                                                 "targetFileSha512": "$target.file.sha512",
                                                 "targetFileSsdeep": "$target.file.ssdeep",
                                                 "targetFileMd5": "$target.file.md5",
                                                 "targetFileSize": "$target.file.size",
                                                 "targetFileName": "$target.file.name"}}}])
        else:
            cs = mongo_collection.aggregate([{"$group": {"_id": {"targetFileSha1": "$target.file.sha1",
                                                         "targetFileSha256": "$target.file.sha256",
                                                         "targetFileSha512": "$target.file.sha512",
                                                         "targetFileSsdeep": "$target.file.ssdeep",
                                                         "targetFileMd5": "$target.file.md5",
                                                         "targetFileSize": "$target.file.size",
                                                         "targetFileName": "$target.file.name"}}}])
        _l.debug('Executed initial aggregation query.')
        for i in cs['result']:
            try:
                # Get all network indicators: addresses and names
                networkItems[:] = []
                ipv4Addresses[:] = []
                hostNames[:] = []
                networkUdpSrc = mongo_collection.find(
                    {
                        "target.file.sha1": i['_id']['targetFileSha1'],
                        "target.file.sha256": i['_id']['targetFileSha256'],
                        "target.file.sha512": i['_id']['targetFileSha512'],
                        "target.file.ssdeep": i['_id']['targetFileSsdeep'],
                        "target.file.md5": i['_id']['targetFileMd5'],
                        "target.file.size": i['_id']['targetFileSize'],
                        "target.file.name": i['_id']['targetFileName']}).distinct('network.udp.src')
                networkUdpDst = mongo_collection.find(
                    {
                        "target.file.sha1": i['_id']['targetFileSha1'],
                        "target.file.sha256": i['_id']['targetFileSha256'],
                        "target.file.sha512": i['_id']['targetFileSha512'],
                        "target.file.ssdeep": i['_id']['targetFileSsdeep'],
                        "target.file.md5": i['_id']['targetFileMd5'],
                        "target.file.size": i['_id']['targetFileSize'],
                        "target.file.name": i['_id']['targetFileName']}).distinct('network.udp.dst')
                networkIcmpSrc = mongo_collection.find(
                    {
                        "target.file.sha1": i['_id']['targetFileSha1'],
                        "target.file.sha256": i['_id']['targetFileSha256'],
                        "target.file.sha512": i['_id']['targetFileSha512'],
                        "target.file.ssdeep": i['_id']['targetFileSsdeep'],
                        "target.file.md5": i['_id']['targetFileMd5'],
                        "target.file.size": i['_id']['targetFileSize'],
                        "target.file.name": i['_id']['targetFileName']}).distinct('network.icmp.src')
                networkIcmpDst = mongo_collection.find(
                    {
                        "target.file.sha1": i['_id']['targetFileSha1'],
                        "target.file.sha256": i['_id']['targetFileSha256'],
                        "target.file.sha512": i['_id']['targetFileSha512'],
                        "target.file.ssdeep": i['_id']['targetFileSsdeep'],
                        "target.file.md5": i['_id']['targetFileMd5'],
                        "target.file.size": i['_id']['targetFileSize'],
                        "target.file.name": i['_id']['targetFileName']}).distinct('network.icmp.dst')
                networkTcpSrc = mongo_collection.find(
                    {
                        "target.file.sha1": i['_id']['targetFileSha1'],
                        "target.file.sha256": i['_id']['targetFileSha256'],
                        "target.file.sha512": i['_id']['targetFileSha512'],
                        "target.file.ssdeep": i['_id']['targetFileSsdeep'],
                        "target.file.md5": i['_id']['targetFileMd5'],
                        "target.file.size": i['_id']['targetFileSize'],
                        "target.file.name": i['_id']['targetFileName']}).distinct('network.tcp.src')
                networkTcpDst = mongo_collection.find(
                    {
                        "target.file.sha1": i['_id']['targetFileSha1'],
                        "target.file.sha256": i['_id']['targetFileSha256'],
                        "target.file.sha512": i['_id']['targetFileSha512'],
                        "target.file.ssdeep": i['_id']['targetFileSsdeep'],
                        "target.file.md5": i['_id']['targetFileMd5'],
                        "target.file.size": i['_id']['targetFileSize'],
                        "target.file.name": i['_id']['targetFileName']}).distinct('network.tcp.dst')
                networkDnsAnswersData = mongo_collection.find(
                    {
                        "target.file.sha1": i['_id']['targetFileSha1'],
                        "target.file.sha256": i['_id']['targetFileSha256'],
                        "target.file.sha512": i['_id']['targetFileSha512'],
                        "target.file.ssdeep": i['_id']['targetFileSsdeep'],
                        "target.file.md5": i['_id']['targetFileMd5'],
                        "target.file.size": i['_id']['targetFileSize'],
                        "target.file.name": i['_id']['targetFileName']}).distinct('network.dns.answers.data')
                networkDomainsIp = mongo_collection.find(
                    {
                        "target.file.sha1": i['_id']['targetFileSha1'],
                        "target.file.sha256": i['_id']['targetFileSha256'],
                        "target.file.sha512": i['_id']['targetFileSha512'],
                        "target.file.ssdeep": i['_id']['targetFileSsdeep'],
                        "target.file.md5": i['_id']['targetFileMd5'],
                        "target.file.size": i['_id']['targetFileSize'],
                        "target.file.name": i['_id']['targetFileName']}).distinct('network.domains.ip')
                networkHttpHost = mongo_collection.find(
                    {
                        "target.file.sha1": i['_id']['targetFileSha1'],
                        "target.file.sha256": i['_id']['targetFileSha256'],
                        "target.file.sha512": i['_id']['targetFileSha512'],
                        "target.file.ssdeep": i['_id']['targetFileSsdeep'],
                        "target.file.md5": i['_id']['targetFileMd5'],
                        "target.file.size": i['_id']['targetFileSize'],
                        "target.file.name": i['_id']['targetFileName']}).distinct('network.http.host')
                networkHosts = mongo_collection.find(
                    {
                        "target.file.sha1": i['_id']['targetFileSha1'],
                        "target.file.sha256": i['_id']['targetFileSha256'],
                        "target.file.sha512": i['_id']['targetFileSha512'],
                        "target.file.ssdeep": i['_id']['targetFileSsdeep'],
                        "target.file.md5": i['_id']['targetFileMd5'],
                        "target.file.size": i['_id']['targetFileSize'],
                        "target.file.name": i['_id']['targetFileName']}).distinct('network.hosts')
                networkDnsRequest = mongo_collection.find(
                    {
                        "target.file.sha1": i['_id']['targetFileSha1'],
                        "target.file.sha256": i['_id']['targetFileSha256'],
                        "target.file.sha512": i['_id']['targetFileSha512'],
                        "target.file.ssdeep": i['_id']['targetFileSsdeep'],
                        "target.file.md5": i['_id']['targetFileMd5'],
                        "target.file.size": i['_id']['targetFileSize'],
                        "target.file.name": i['_id']['targetFileName']}).distinct('network.dns.request')
                networkDomainsDomain = mongo_collection.find(
                    {
                        "target.file.sha1": i['_id']['targetFileSha1'],
                        "target.file.sha256": i['_id']['targetFileSha256'],
                        "target.file.sha512": i['_id']['targetFileSha512'],
                        "target.file.ssdeep": i['_id']['targetFileSsdeep'],
                        "target.file.md5": i['_id']['targetFileMd5'],
                        "target.file.size": i['_id']['targetFileSize'],
                        "target.file.name": i['_id']['targetFileName']}).distinct('network.domains.domain')
    
                # Aggregate all found items and remove duplicates and empty
                networkItems += networkUdpSrc + networkUdpDst + networkIcmpSrc + \
                    networkIcmpDst + networkTcpSrc + networkTcpDst + \
                    networkDnsAnswersData + networkDomainsIp + networkHttpHost + \
                    networkHosts + networkDnsRequest + networkDomainsDomain
                networkItems = list(set(networkItems))
                networkItems = filter(None, networkItems)
                
                # Split into one list for addresses and one for host names
                ipv4Addresses = keepAddresses(networkItems[:])
                hostNames = keepHostNames(networkItems[:])
    
                # Delete addresses and host names if in whitelist files
                ipv4Addresses = delIfMatchedAddr(ipv4Addresses, fIpv4Addresses)
                hostNames = delIfMatchedHostName(hostNames, fHostNames)
    
                # Get file names
                targetFileName = mongo_collection.find(
                    {
                        "target.file.sha1": i['_id']['targetFileSha1'],
                        "target.file.sha256": i['_id']['targetFileSha256'],
                        "target.file.sha512": i['_id']['targetFileSha512'],
                        "target.file.ssdeep": i['_id']['targetFileSsdeep'],
                        "target.file.md5": i['_id']['targetFileMd5'],
                        "target.file.size": i['_id']['targetFileSize'],
                        "target.file.name": i['_id']['targetFileName']}).distinct('target.file.name')
               
                # Call the function to create the output, check if seen before first
                if str(i['_id']['targetFileSha1']) + ',' + \
                    str(i['_id']['targetFileSha256']) + ',' + \
                    str(i['_id']['targetFileSha512']) + ',' + \
                    str(i['_id']['targetFileSsdeep']) + ',' + \
                    str(i['_id']['targetFileMd5']) + ',' + \
                    str(i['_id']['targetFileSize']) not in str(fSeenEntries):
                        if ipv4Addresses or hostNames:
                            genStixDoc(config.get('output','outputDir'),
                                       str(i['_id']['targetFileSha1']),
                                       str(i['_id']['targetFileSha256']),
                                       str(i['_id']['targetFileSha512']),
                                       str(i['_id']['targetFileSsdeep']),
                                       str(i['_id']['targetFileMd5']),
                                       str(i['_id']['targetFileSize']),
                                       i['_id']['targetFileName'],
                                       ipv4Addresses,
                                       hostNames)
                            # Write to file so that we can read back in as filter later
                            fSeenEntriesFH.write(str(i['_id']['targetFileSha1']) + ',' + \
                                str(i['_id']['targetFileSha256']) + ',' + \
                                str(i['_id']['targetFileSha512']) + ',' + \
                                str(i['_id']['targetFileSsdeep']) + ',' + \
                                str(i['_id']['targetFileMd5']) + ',' + \
                                str(i['_id']['targetFileSize']) + '\n')
                            _l.debug('Updated SeenEntries file with: ' + \
                                str(i['_id']['targetFileSha256']) + ',' + \
                                str(i['_id']['targetFileSha512']) + ',' + \
                                str(i['_id']['targetFileSsdeep']) + ',' + \
                                str(i['_id']['targetFileMd5']) + ',' + \
                                str(i['_id']['targetFileSize']) + \
                                ' since content has been written to stix file.\n')
            except Exception as e:
                import traceback
                tb = traceback.format_exc()
                _l.error('Row failed due to: ' + str(e) + "\n\n" + str(tb) + "\n\n" + str(repr(i)))
        conn.disconnect()
    fSeenEntriesFH.closed
    _l.info('Ended.')
Example #16
0
def run_qemu_io_blkdebug(test, params, env):
    """
    Run qemu-io blkdebug tests:
    1. Create image with given parameters
    2. Write the blkdebug config file
    3. Try to do operate in image with qemu-io and get the error message
    4. Get the error message from perror by error number set in config file
    5. Compare the error message

    @param test:   kvm test object
    @param params: Dictionary with the test parameters
    @param env:    Dictionary with test environment.
    """
    tmp_dir = params.get("tmp_dir", "/tmp")
    blkdebug_cfg = utils_misc.get_path(tmp_dir, params.get("blkdebug_cfg",
                                                            "blkdebug.cfg"))
    err_command = params.get("err_command")
    err_event = params.get("err_event")
    errn_list = re.split("\s+", params.get("errn_list").strip())
    re_std_msg = params.get("re_std_msg")
    test_timeout = int(params.get("test_timeout", "60"))
    pre_err_commands = params.get("pre_err_commands")
    image = params.get("images")
    blkdebug_default = params.get("blkdebug_default")

    error.context("Create image", logging.info)
    image_io = QemuImg(params.object_params(image), test.bindir, image)
    image_name = image_io.create(params.object_params(image))

    template_name =  utils_misc.get_path(test.virtdir, blkdebug_default)
    template = ConfigParser.ConfigParser()
    template.read(template_name)

    for errn in errn_list:
        log_filename = utils_misc.get_path(test.outputdir,
                                           "qemu-io-log-%s" % errn)
        error.context("Write the blkdebug config file", logging.info)
        template.set("inject-error", "event", '"%s"' % err_event)
        template.set("inject-error", "errno", '"%s"' % errn)

        error.context("Write blkdebug config file", logging.info)
        blkdebug = None
        try:
            blkdebug = open(blkdebug_cfg, 'w')
            template.write(blkdebug)
        finally:
            if blkdebug is not None:
                blkdebug.close()

        error.context("Operate in qemu-io to trigger the error", logging.info)
        session = qemu_io.QemuIOShellSession(test, params, image_name,
                                             blkdebug_cfg=blkdebug_cfg,
                                             log_filename=log_filename)
        if pre_err_commands:
            for cmd in re.split(",", pre_err_commands.strip()):
                session.cmd_output(cmd, timeout=test_timeout)

        output = session.cmd_output(err_command, timeout=test_timeout)
        error.context("Get error message from command perror", logging.info)
        perror_cmd = "perror %s" % errn
        std_msg = utils.system_output(perror_cmd)
        std_msg = re.findall(re_std_msg, std_msg)
        if std_msg:
            std_msg = std_msg[0]
        else:
            std_msg = ""
            logging.warning("Can not find error message from perror")

        session.close()
        error.context("Compare the error message", logging.info)
        if std_msg:
            if std_msg in output:
                logging.info("Error message is correct in qemu-io")
            else:
                fail_log = "The error message is mismatch:"
                fail_log += "qemu-io reports: '%s'," % output
                fail_log += "perror reports: '%s'" % std_msg
                raise error.TestFail(fail_log)
        else:
            logging.warning("Can not find error message from perror."
                            " The output from qemu-io is %s" % output)
Example #17
0
 def __init__(self, ID=0):
     Cf = ConfigParser.ConfigParser()
     Cf.read(pyu.getlong('agent.ini', 'ini'))
     self.ag_opt = dict(Cf.items(ID))
     self.parse()
Example #18
0
def plotDiagram(name):
    filename = name + ".dat"

    X = load(filename)

    flowWEA = X[:, 0]
    flowNSA = X[:, 1]
    qWEA = X[:, 2]
    qNSA = X[:, 3]
    avgDelayWEA = X[:, 4]
    avgDelayNSA = X[:, 5]
    avgDelayA = X[:, 6]
    greenWEA = X[:, 7]
    greenNSA = X[:, 8]

    filename = name + '.ini'
    ini = ConfigParser()
    ini.read(filename)

    N = ini.getint("general", "N")
    cut = ini.getboolean("general", "cut")

    distrWE = ini.get("demand", "distrWE")
    distrNS = ini.get("demand", "distrNS")

    tlType = ini.get("TL", "tlType")

    vehphWEA = eval(ini.get("demand", "vehphWEA"))
    vehphNSA = eval(ini.get("demand", "vehphNSA"))

    maxDelay = max(max(avgDelayWEA), max(avgDelayNSA))

    maxD = max(avgDelayA)
    c = maxD / (exp(1) - 1)
    V = [c * (exp(i) - 1) for i in frange(0, 1, 0.01)]

    [X, Y] = meshgrid(vehphWEA, vehphNSA)

    Z = griddata(flowWEA, flowNSA, avgDelayWEA, X, Y)

    figure(figsize=(12, 12))
    subplot(2, 2, 1)
    # contour(X, Y, Z, 100)
    contourf(X, Y, Z, range(0, 180, 5))

    colorbar()
    title('Average Delay WE')
    xlabel('Input flow WE')
    ylabel('Input flow NS')

    Z = griddata(flowWEA, flowNSA, avgDelayNSA, X, Y)

    subplot(2, 2, 2)
    # contour(X, Y, Z, 100)
    contourf(X, Y, Z, range(0, 180, 5))
    colorbar()
    title('Average Delay NS')
    xlabel('Input flow WE')
    ylabel('Input flow NS')

    Z = griddata(qWEA, qNSA, avgDelayWEA, X, Y)

    avgDelayWECut = [Z[i][i] for i in range(len(Z))]

    subplot(2, 2, 3)
    # contour(X, Y, Z, 100)
    contourf(X, Y, Z, range(0, 180, 5))
    colorbar()
    title('Average Delay WE')
    xlabel('Flow q WE')
    ylabel('Flow q NS')

    Z = griddata(qWEA, qNSA, avgDelayNSA, X, Y)

    avgDelayNSCut = [Z[i][i] for i in range(len(Z))]

    subplot(2, 2, 4)
    # contour(X, Y, Z, 100)
    contourf(X, Y, Z, range(0, 180, 5))
    colorbar()
    title('Average Delay NS')
    xlabel('Flow q WE')
    ylabel('Flow q NS')

    suptitle("Average Delay (WE " +
             ("poisson" if distrWE == 'p' else "uniform") + ", NS " +
             ("poisson" if distrNS == 'p' else "uniform") + ", " + tlType +
             ")")

    savefig(name + "AvgDelayNSWE.png")
    close()

    Z = griddata(flowWEA, flowNSA, avgDelayA, X, Y)

    figure(figsize=(12, 6))
    subplot(1, 2, 1)
    # contour(X, Y, Z, 100)
    contourf(X, Y, Z, range(0, 180, 5))
    colorbar()
    title('Average Delay')
    xlabel('Input flow WE')
    ylabel('Input flow NS')

    Z = griddata(qWEA, qNSA, avgDelayA, X, Y)

    avgDelayCut = [Z[i][i] for i in range(len(Z))]

    subplot(1, 2, 2)
    # contour(X, Y, Z, 100)
    contourf(X, Y, Z, range(0, 180, 5))
    #    clim(0, maxDelay)
    colorbar()
    title('Average Delay')
    xlabel('Flow q WE')
    ylabel('Flow q NS')

    suptitle("Average Delay (WE " +
             ("poisson" if distrWE == 'p' else "uniform") + ", NS " +
             ("poisson" if distrNS == 'p' else "uniform") + ", " + tlType +
             ")")

    savefig(name + "AvgDelay.png")
    close()

    Z = griddata(flowWEA, flowNSA, qWEA, X, Y)

    figure(figsize=(12, 6))
    subplot(1, 2, 1)
    contourf(X, Y, Z, range(0, 1250, 50))
    colorbar()
    title('Flow q WE')
    xlabel('Input flow WE')
    ylabel('Input flow NS')

    Z = griddata(flowWEA, flowNSA, qNSA, X, Y)

    subplot(1, 2, 2)
    contourf(X, Y, Z, range(0, 1250, 50))
    colorbar()
    title('Flow q NS')
    xlabel('Input flow WE')
    ylabel('Input flow NS')

    suptitle("Input flow vs. flow q (WE " +
             ("poisson" if distrWE == 'p' else "uniform") + ", NS " +
             ("poisson" if distrNS == 'p' else "uniform") + ", " + tlType +
             ")")

    savefig(name + "flowvsq.png")

    close()

    maxGreen = max(max(greenWEA), max(greenNSA))

    Z = griddata(flowWEA, flowNSA, greenWEA, X, Y)

    figure(figsize=(12, 12))
    subplot(2, 2, 1)
    contourf(X, Y, Z, range(0, 100, 5))
    #   clim(0, maxGreen)
    colorbar()
    title('Green-time WE')
    xlabel('Input flow WE')
    ylabel('Input flow NS')

    Z = griddata(flowWEA, flowNSA, greenNSA, X, Y)

    subplot(2, 2, 2)
    contourf(X, Y, Z, range(0, 100, 5))
    #    clim(0, maxGreen)
    colorbar()
    title('Green-time NS')
    xlabel('Input flow WE')
    ylabel('Input flow NS')

    Z = griddata(qWEA, qNSA, greenWEA, X, Y)

    greenWECut = [Z[i][i] for i in range(len(Z))]

    subplot(2, 2, 3)
    contourf(X, Y, Z, range(0, 100, 5))
    #   clim(0, maxGreen)
    colorbar()
    title('Green-time WE')
    xlabel('Flow q WE')
    ylabel('Flow q NS')

    Z = griddata(qWEA, qNSA, greenNSA, X, Y)

    greenNSCut = [Z[i][i] for i in range(len(Z))]

    subplot(2, 2, 4)
    contourf(X, Y, Z, range(0, 100, 5))
    colorbar()
    title('Green-time NS')
    xlabel('Flow q WE')
    ylabel('Flow q NS')

    suptitle("Green-time (WE " + ("poisson" if distrWE == 'p' else "uniform") +
             ", NS " + ("poisson" if distrNS == 'p' else "uniform") + ", " +
             tlType + ")")

    savefig(name + "GreenTime.png")
    close()

    q = X[0]

    figure()
    title("CUT  (WE " + ("poisson" if distrWE == 'p' else "uniform") +
          ", NS " + ("poisson" if distrNS == 'p' else "uniform") + ", " +
          tlType + ")")
    xlabel('Flow q')
    plot(q[0:15], avgDelayWECut[0:15], q[0:15], avgDelayNSCut[0:15], q[0:15],
         avgDelayCut[0:15], q[0:15], greenWECut[0:15], q[0:15],
         greenNSCut[0:15])
    legend(('avgDelayWE', 'avgDelayNS', 'avgDelay', 'greenWE', 'greenNS'),
           loc='upper left')
    savefig(name + "CUT.png")
    close()
Example #19
0
USE_SETUPCFG = os.environ.get('USE_SETUPCFG')
# override use of setup.cfg with env var.
if USE_SETUPCFG is not None:
    USE_SETUPCFG = bool(int(USE_SETUPCFG))
else:
    USE_SETUPCFG = True

setup_cfg = 'setup.cfg'
# contents of setup.cfg will override env vars, unless
# USE_SETUPCFG evaluates to True. Exception is use_ncconfig,
# which does not take precedence ofver USE_NCCONFIG env var.
ncconfig = None
use_ncconfig = None
if USE_SETUPCFG and os.path.exists(setup_cfg):
    sys.stdout.write('reading from setup.cfg...\n')
    config = configparser.ConfigParser()
    config.read(setup_cfg)
    try:
        HDF5_dir = config.get("directories", "HDF5_dir")
    except:
        pass
    try:
        HDF5_libdir = config.get("directories", "HDF5_libdir")
    except:
        pass
    try:
        HDF5_incdir = config.get("directories", "HDF5_incdir")
    except:
        pass
    try:
        netCDF4_dir = config.get("directories", "netCDF4_dir")
Example #20
0
def compareData(name1, name2, str):
    filename = name1 + '.ini'
    ini = ConfigParser()
    ini.read(filename)

    distrWE = ini.get("demand", "distrWE")
    distr = ("poisson" if (distrWE == 'p') else "uniform")

    vehphWEA = eval(ini.get("demand", "vehphWEA"))
    vehphNSA = eval(ini.get("demand", "vehphNSA"))

    filename1 = name1 + ".dat"

    data1 = load(filename1)

    flowWEA1 = data1[:, 0]
    flowNSA1 = data1[:, 1]
    qWEA1 = data1[:, 2]
    qNSA1 = data1[:, 3]
    avgDelayWEA1 = data1[:, 4]
    avgDelayNSA1 = data1[:, 5]
    avgDelayA1 = data1[:, 6]
    greenWEA1 = data1[:, 7]
    greenNSA1 = data1[:, 8]

    filename2 = name2 + ".dat"

    data2 = load(filename2)

    flowWEA2 = data2[:, 0]
    flowNSA2 = data2[:, 1]
    qWEA2 = data2[:, 2]
    qNSA2 = data2[:, 3]
    avgDelayWEA2 = data2[:, 4]
    avgDelayNSA2 = data2[:, 5]
    avgDelayA2 = data2[:, 6]
    greenWEA2 = data2[:, 7]
    greenNSA2 = data2[:, 8]

    [X, Y] = meshgrid(vehphWEA, vehphNSA)

    figure(figsize=(6, 6))

    Z1 = array(griddata(flowWEA1, flowNSA1, avgDelayA1, X, Y))
    Z2 = array(griddata(flowWEA2, flowNSA2, avgDelayA2, X, Y))

    Z = Z1 / Z2

    subplot(1, 1, 1)
    contourf(X, Y, Z, concatenate((frange(0, 2, 0.1), frange(2, 3, 1))))
    xlabel('Input flow WE')
    ylabel('Input flow NS')
    colorbar()

    title('Average Delay VA vs. FC (ratio), ' + distr)
    savefig(str + ".png")

    Z1 = array(griddata(flowWEA1, flowNSA1, avgDelayWEA1, X, Y))
    Z2 = array(griddata(flowWEA2, flowNSA2, avgDelayWEA2, X, Y))

    Z = Z1 / Z2

    figure(figsize=(12, 6))
    subplot(1, 2, 1)
    contourf(X, Y, Z, concatenate((frange(0, 2, 0.1), frange(2, 3, 1))))
    xlabel('Input flow WE')
    ylabel('Input flow NS')
    title('Average Delay WE')
    colorbar()

    Z1 = array(griddata(flowWEA1, flowNSA1, avgDelayNSA1, X, Y))
    Z2 = array(griddata(flowWEA2, flowNSA2, avgDelayNSA2, X, Y))

    Z = Z1 / Z2

    subplot(1, 2, 2)
    contourf(X, Y, Z, concatenate((frange(0, 2, 0.1), frange(2, 3, 1))))
    xlabel('Input flow WE')
    ylabel('Input flow NS')
    title('Average Delay NS')
    colorbar()

    suptitle('Average Delay VA vs. FC (ratio), ' + distr)
    savefig(str + "WENS.png")
Example #21
0
import time
import gc
from sklearn import metrics
import logging

from sklearn import cross_validation, metrics
from sklearn.model_selection import GridSearchCV
from sklearn.metrics import classification_report

from pca.PCAColumn import PCAColumn
from Utils.config import *
from Utils.string_utils import get_bool_value
from pca.PCAForXgb import PCAForXgb
import ConfigParser

ENV_CONFIG = ConfigParser.ConfigParser()
ENV_CONFIG.read("env.conf")
LEARNING_ALG = ENV_CONFIG.get('LEARNING', 'algorithm')
ACTIVATE_FS = get_bool_value(ENV_CONFIG.get('LEARNING', 'feature_selector'))
FS_PCT = int(ENV_CONFIG.get('LEARNING', 'feature_selector_pct'))

REMOVE_PCA_FEATURES = get_bool_value(
    ENV_CONFIG.get('LEARNING', 'remove_pca_processed_features'))


class ExprWithPCA(PCAForXgb):
    def preprocess(self,
                   ori_file_data,
                   all_pca_added_df,
                   all_encoders,
                   is_training=False):
Example #22
0
 def connect(self):
     self.config = ConfigParser.ConfigParser()
     self.config.read([os.path.expanduser('~/.gitlab.cfg')])
     host = self.config.get("default", "host")
     token = self.config.get("default", "token")
     return gitlab.Gitlab(host=host, token=token)
Example #23
0
    def getSections(self):

        config = ConfigParser.ConfigParser()
        config.read(self.__conFile)
        return config.sections()
Example #24
0
#bed leveling tool
import time
import sys, os
import serial
from subprocess import call
import numpy as np
import json
import ConfigParser
import logging
import re



config = ConfigParser.ConfigParser()
config.read('/var/www/lib/config.ini')

serialconfig = ConfigParser.ConfigParser()
serialconfig.read('/var/www/lib/serial.ini')

#check if LOCK FILE EXISTS
if os.path.isfile(config.get('task', 'lock_file')):
    print "printer busy"
    sys.exit()

#Args
try:
	logfile=str(sys.argv[1]) #param for the log file
	log_trace=str(sys.argv[2])	#trace log file
	fix_d=float(sys.argv[3]) #hight of the plane. (smaller=higher)
	
except:
Example #25
0
        self.worksheet_v4.write(GLOBAL_LINE_v4, 6, line_data["lego_memory_v4"], self.style)


if __name__ == '__main__':
    # init mylogging
    logger = mylogging.init_log(logging.DEBUG)

    global GLOBAL_LINE_5117
    global GLOBAL_LINE_v3
    global GLOBAL_LINE_v4

    # init excel
    wow = LoadExcel()
    # init config_parser
    try:
        cf = ConfigParser.ConfigParser()
        cf.read("../conf/load_config.conf")
        threadsize_list=[1,2,6,10,12]
        #threadsize_list=[1]
        if len(sys.argv) == 2:
            #TODO
            model_list = sys.argv[1].split(",")
            for model in model_list:
                conf_name = "conf_%s" % model
                for thread_size in threadsize_list:
                    print "==========model: %s,thread_size: %s================" % (model, thread_size)
                    try:
                        db_name = cf.get(conf_name, "test_db") % thread_size
                    except Exception as e:
                        print ("[error]: Pls Check The Modle:%s input wrong!" % model)
                        sys.exit(1)
Example #26
0
def main():
    parser = optparse.OptionParser(usage="usage: %prog [options]")
    parser.add_option("-S",
                      "--protocol",
                      dest="protocol",
                      default="http",
                      help="default is http, set to https if required")
    parser.add_option("-P",
                      "--port",
                      dest="port",
                      default="8080",
                      help="Set Ambari Protocol")
    parser.add_option("-u",
                      "--username",
                      dest="username",
                      default="admin",
                      help="Ambari Username")
    parser.add_option("-p",
                      "--password",
                      dest="password",
                      default="admin",
                      help="Ambari Password")
    parser.add_option("-H",
                      "--host",
                      dest="host",
                      default="localhost",
                      help="Ambari Host")
    parser.add_option(
        "-C",
        "--configfile",
        dest="configs",
        default="./configs",
        help="Config file containing key and truststore information")

    (options, args) = parser.parse_args()
    global username
    global password
    global port
    global protocol
    global host
    global clustername
    username = options.username
    password = options.password
    port = options.port
    protocol = options.protocol
    host = options.host
    clustername = ambariREST(
        protocol, host, port, username, password,
        "api/v1/clusters")["items"][0]["Clusters"]["cluster_name"]
    installedservices = [
        line["ServiceInfo"]["service_name"] for line in ambariREST(
            protocol, host, port, username, password, "api/v1/clusters/" +
            clustername + "/services")["items"]
    ]
    definitions = loaddefinitions()
    ambari = ambariProps(protocol, host, port, username, password, clustername)
    updater = propertiesupdater(definitions)
    Config = ConfigParser.ConfigParser()
    Config.read(options.configs)
    os.popen('PYTHONHTTPSVERIFY=0')
    global changeprops
    changeprops = {
        "KEYSTORELOC":
        Config.get("Configs", "KeyStoreLocation") + '/server.jks',
        "KEYSTORERANGER":
        Config.get("Configs", "KeyStoreLocation") + '/ranger-plugin.jks',
        "KEYPASS":
        Config.get("Configs", "KeyStorePassword"),
        "TRUSTSTORELOC":
        Config.get("Configs", "TrustStoreLocation") + '/truststore.jks',
        "TRUSTSTOREPASS":
        Config.get("Configs", "TrustStorePassword"),
        "RANGERCOMMONNAME":
        'ranger.' + Config.get("Configs", "Domain"),
        "RANGERURL":
        replaceurl(ambari.get("admin-properties", "policymgr_external_url"),
                   6182),
        "TIMELINEURL":
        ambari.get("yarn-site",
                   "yarn.timeline-service.webapp.address").split(':')[0] +
        ':8190',
        "HISTORYURL":
        replaceurl(
            'http://' +
            ambari.get("mapred-site", "mapreduce.jobhistory.webapp.address"),
            19889),
        "KMSURL":
        str(
            ambari.get("core-site",
                       "hadoop.security.key.provider.path").replace(
                           ':9292', ':9393')).replace('//http@', '//https@'),
        "ATLASURL":
        replaceurl(ambari.get("application-properties", "atlas.rest.address"),
                   21443)
    }
    for service in installedservices:
        if service in definitions.keys():
            updater.service(service)
        else:
            continue
Example #27
0
    def user_configuration(self, configFile=None):

        # get a logger
        logger = logging.getLogger("configuration")

        # load and parse the provided configFile, if provided
        if not configFile:
            logger.warn(
                'no user configuration file provided; using only built-in default settings'
            )
            return

        # load the config file
        try:
            configparser = ConfigParser.ConfigParser()
            configparser.readfp(open(configFile))
            logger.debug(
                'successfully read and parsed user configuration file %s' %
                configFile)
        except:
            logger.fatal('error reading user configuration file %s' %
                         configFile)
            raise

        #work_dir must be provided before initialising other directories
        self.work_dir = None

        if self.work_dir == None:
            try:
                self.work_dir = configparser.get('Paths', 'work')

            except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
                if self.work_dir == None:
                    logger.critical('Paths:work has no value!')
                    raise Exception

        # look for those items that are user-configurable, and get their values
        # sptk_bindir= ....

        # a list instead of a dict because OrderedDict is not available until 2.7
        # and I don't want to import theano here just for that one class
        # each entry is a tuple of (variable name, default value, section in config file, option name in config file)
        #
        # the type of the default value is important and controls the type that the corresponding
        # variable will have
        #
        # to set a default value of 'undefined' use an empty string
        # or the special value 'impossible', as appropriate
        #
        impossible_int = int(-99999)
        impossible_float = float(-99999.0)

        user_options = [
            ('work_dir', self.work_dir, 'Paths', 'work'),
            ('data_dir', '', 'Paths', 'data'),
            ('plot_dir', '', 'Paths', 'plot'),
            ('plot', False, 'Utility', 'plot'),
            ('profile', False, 'Utility', 'profile'),
            ('file_id_scp', os.path.join(self.work_dir,
                                         'data/file_id_list.scp'), 'Paths',
             'file_id_list'),
            ('test_id_scp', os.path.join(self.work_dir,
                                         'data/test_id_list.scp'), 'Paths',
             'test_id_list'),
            ('GV_dir', os.path.join(self.work_dir,
                                    'data/GV'), 'Paths', 'GV_dir'),
            ('in_stepw_dir', os.path.join(self.work_dir, 'data/stepw'),
             'Paths', 'in_stepw_dir'),
            ('in_mgc_dir', os.path.join(self.work_dir,
                                        'data/mgc'), 'Paths', 'in_mgc_dir'),
            ('in_lf0_dir', os.path.join(self.work_dir,
                                        'data/lf0'), 'Paths', 'in_lf0_dir'),
            ('in_bap_dir', os.path.join(self.work_dir,
                                        'data/bap'), 'Paths', 'in_bap_dir'),
            ('in_sp_dir', os.path.join(self.work_dir,
                                       'data/sp'), 'Paths', 'in_sp_dir'),
            ('in_seglf0_dir', os.path.join(self.work_dir, 'data/lf03'),
             'Paths', 'in_seglf0_dir'),

            ## for glottHMM
            ('in_F0_dir', os.path.join(self.work_dir,
                                       'data/F0'), 'Paths', 'in_F0_dir'),
            ('in_Gain_dir', os.path.join(self.work_dir,
                                         'data/Gain'), 'Paths', 'in_Gain_dir'),
            ('in_HNR_dir', os.path.join(self.work_dir,
                                        'data/HNR'), 'Paths', 'in_HNR_dir'),
            ('in_LSF_dir', os.path.join(self.work_dir,
                                        'data/LSF'), 'Paths', 'in_LSF_dir'),
            ('in_LSFsource_dir', os.path.join(self.work_dir, 'data/LSFsource'),
             'Paths', 'in_LSFsource_dir'),

            ## for joint duration
            ('in_seq_dur_dir', os.path.join(self.work_dir, 'data/S2S_dur'),
             'Paths', 'in_seq_dur_dir'),
            ('in_dur_dir', os.path.join(self.work_dir,
                                        'data/dur'), 'Paths', 'in_dur_dir'),
            ('nn_norm_temp_dir',
             os.path.join(self.work_dir,
                          'data/step_hidden9'), 'Paths', 'nn_norm_temp_dir'),
            ('process_labels_in_work_dir', False, 'Labels',
             'process_labels_in_work_dir'),
            ('label_style', 'HTS', 'Labels', 'label_style'),
            ('label_type', 'state_align', 'Labels', 'label_type'),
            ('in_label_align_dir',
             os.path.join(self.work_dir,
                          'data/label_state_align'), 'Labels', 'label_align'),
            ('question_file_name',
             os.path.join(self.work_dir, 'data/questions.hed'), 'Labels',
             'question_file_name'),
            ('silence_pattern', ['*-#+*'], 'Labels', 'silence_pattern'),
            ('subphone_feats', 'full', 'Labels', 'subphone_feats'),
            ('xpath_file_name',
             os.path.join(self.work_dir, 'data/xml_labels/xpaths.txt'),
             'Labels', 'xpath_file_name'),
            ('label_config_file', 'configuration/examplelabelconfigfile.py',
             'Labels', 'label_config'),
            ('add_frame_features', True, 'Labels', 'add_frame_features'),
            ('fill_missing_values', False, 'Labels', 'fill_missing_values'),
            ('xpath_label_align_dir',
             os.path.join(self.work_dir, 'data/label_state_align'), 'Labels',
             'xpath_label_align'),
            ('enforce_silence', False, 'Labels', 'enforce_silence'),
            ('remove_silence_using_binary_labels', False, 'Labels',
             'remove_silence_using_binary_labels'),
            ('precompile_xpaths', True, 'Labels', 'precompile_xpaths'),
            ('iterate_over_frames', True, 'Labels', 'iterate_over_frames'),
            ('appended_input_dim', 0, 'Labels', 'appended_input_dim'),
            ('buffer_size', 200000, 'Data', 'buffer_size'),
            ('train_file_number', impossible_int, 'Data', 'train_file_number'),
            ('valid_file_number', impossible_int, 'Data', 'valid_file_number'),
            ('test_file_number', impossible_int, 'Data', 'test_file_number'),
            ('log_path', os.path.join(self.work_dir,
                                      'log'), 'Paths', 'log_path'),
            ('log_file', '', 'Paths', 'log_file'),
            ('log_config_file', 'configuration/exampleloggingconfigfile.conf',
             'Paths', 'log_config_file'),
            ('sptk_bindir', 'tools/bin/SPTK-3.9', 'Paths', 'sptk'),
            ('straight_bindir', 'tools/bin/straight', 'Paths', 'straight'),
            ('world_bindir', 'tools/bin/WORLD', 'Paths', 'world'),
            ('network_type', 'RNN', 'Architecture', 'network_type'),
            ('model_type', 'DNN', 'Architecture', 'model_type'),
            ('hidden_layer_type',
             ['TANH', 'TANH', 'TANH', 'TANH', 'TANH',
              'TANH'], 'Architecture', 'hidden_layer_type'),
            ('output_layer_type', 'LINEAR', 'Architecture',
             'output_layer_type'),
            ('sequential_training', False, 'Architecture',
             'sequential_training'),
            ('dropout_rate', 0.0, 'Architecture', 'dropout_rate'),

            ## some config variables for token projection DNN
            ('scheme', 'stagewise', 'Architecture', 'scheme'),
            ('index_to_project', 0, 'Architecture', 'index_to_project'),
            ('projection_insize', 10000, 'Architecture', 'projection_insize'),
            ('projection_outsize', 10, 'Architecture', 'projection_outsize'),
            ('initial_projection_distrib', 'gaussian', 'Architecture',
             'initial_projection_distrib'),
            ('projection_weights_output_dir', 'some_path', 'Architecture',
             'projection_weights_output_dir'),
            ('layers_with_projection_input', [0], 'Architecture',
             'layers_with_projection_input'),
            ('projection_learning_rate_scaling', 1.0, 'Architecture',
             'projection_learning_rate_scaling'),
            ('learning_rate', 0.0002, 'Architecture', 'learning_rate'),
            ('l2_reg', 0.00001, 'Architecture', 'L2_regularization'),
            ('l1_reg', 0.0, 'Architecture', 'L1_regularization'),
            ('batch_size', 16, 'Architecture', 'batch_size'),
            ('training_epochs', 25, 'Architecture', 'training_epochs'),
            ('hidden_activation', 'tanh', 'Architecture', 'hidden_activation'),
            ('output_activation', 'linear', 'Architecture',
             'output_activation'),
            ('hidden_layer_size', [1024, 1024, 1024, 1024, 1024,
                                   1024], 'Architecture', 'hidden_layer_size'),
            ('private_hidden_sizes', [1024], 'Architecture',
             'private_hidden_sizes'),
            ('stream_weights', [1.0], 'Architecture', 'stream_weights'),
            ('private_l2_reg', 0.00001, 'Architecture', 'private_l2_reg'),
            ('warmup_epoch', 5, 'Architecture', 'warmup_epoch'),
            ('warmup_momentum', 0.3, 'Architecture', 'warmup_momentum'),
            ('momentum', 0.9, 'Architecture', 'momentum'),
            ('warmup_epoch', 5, 'Architecture', 'warmup_epoch'),
            ('mdn_component', 1, 'Architecture', 'mdn_component'),
            ('var_floor', 0.01, 'Architecture', 'var_floor'),
            ('beta_opt', False, 'Architecture', 'beta_opt'),
            ('eff_sample_size', 0.8, 'Architecture', 'eff_sample_size'),
            ('mean_log_det', -100.0, 'Architecture', 'mean_log_det'),
            ('start_from_trained_model', '_', 'Architecture',
             'start_from_trained_model'),
            ('use_rprop', 0, 'Architecture', 'use_rprop'),
            ('mgc_dim', 60, 'Outputs', 'mgc'),
            ('dmgc_dim', 60 * 3, 'Outputs', 'dmgc'),
            ('vuv_dim', 1, 'Outputs', 'vuv'),
            ('lf0_dim', 1, 'Outputs', 'lf0'),
            ('dlf0_dim', 1 * 3, 'Outputs', 'dlf0'),
            ('bap_dim', 25, 'Outputs', 'bap'),
            ('dbap_dim', 25 * 3, 'Outputs', 'dbap'),
            ('cmp_dim', (60 * 3) + 1 + (1 * 3) + (25 * 3), 'Outputs', 'cmp'),
            ('stepw_dim', 55, 'Outputs', 'stepw_dim'),
            ('temp_sp_dim', 1025, 'Outputs', 'temp_sp_dim'),
            ('seglf0_dim', 7, 'Outputs', 'seglf0_dim'),
            ('delta_win', [-0.5, 0.0, 0.5], 'Outputs', 'delta_win'),
            ('acc_win', [1.0, -2.0, 1.0], 'Outputs', 'acc_win'),
            ('do_MLPG', True, 'Outputs', 'do_MLPG'),

            ## for GlottHMM
            ('F0_dim', 1, 'Outputs', 'F0'),
            ('dF0_dim', 1 * 3, 'Outputs', 'dF0'),
            ('Gain_dim', 1, 'Outputs', 'Gain'),
            ('dGain_dim', 1 * 3, 'Outputs', 'dGain'),
            ('HNR_dim', 5, 'Outputs', 'HNR'),
            ('dHNR_dim', 5 * 3, 'Outputs', 'dHNR'),
            ('LSF_dim', 30, 'Outputs', 'LSF'),
            ('dLSF_dim', 30 * 3, 'Outputs', 'dLSF'),
            ('LSFsource_dim', 10, 'Outputs', 'LSFsource'),
            ('dLSFsource_dim', 10 * 3, 'Outputs', 'dLSFsource'),

            ## for joint dur:-
            ('seq_dur_dim', 1, 'Outputs', 'seq_dur'),
            ('remove_silence_from_dur', True, 'Outputs',
             'remove_silence_from_dur'),
            ('dur_dim', 5, 'Outputs', 'dur'),
            ('dur_feature_type', 'numerical', 'Outputs', 'dur_feature_type'),
            ('output_feature_normalisation', 'MVN', 'Outputs',
             'output_feature_normalisation'),
            ('multistream_switch', False, 'Streams', 'multistream_switch'),
            #            ('use_private_hidden'  , False, 'Streams', 'use_private_hidden'),
            ('output_features', ['mgc', 'lf0', 'vuv',
                                 'bap'], 'Streams', 'output_features'),
            ('gen_wav_features', ['mgc', 'bap',
                                  'lf0'], 'Streams', 'gen_wav_features'),

            #            ('stream_mgc_hidden_size'   ,  192 , 'Streams', 'stream_mgc_hidden_size'),
            #            ('stream_lf0_hidden_size'   ,  32  , 'Streams', 'stream_lf0_hidden_size'),
            #            ('stream_vuv_hidden_size'   ,  32  , 'Streams', 'stream_vuv_hidden_size'),
            #            ('stream_bap_hidden_size'   ,  128 , 'Streams', 'stream_bap_hidden_size'),
            #            ('stream_stepw_hidden_size' ,  64  , 'Streams', 'stream_stepw_hidden_size'),
            #            ('stream_seglf0_hidden_size',  64  , 'Streams', 'stream_seglf0_hidden_size'),
            #            ('stream_cmp_hidden_size'   ,  256 , 'Streams', 'stream_cmp_hidden_size'),  #when multi-stream is disabled, use this to indicate the final hidden layer size
            #if this is also not provided, use the top common hidden layer size

            ## Glott HMM -- dummy values -- haven't used private streams:--
            #            ('stream_F0_hidden_size'   ,  192 , 'Streams', 'stream_F0_hidden_size'),
            #            ('stream_Gain_hidden_size'   ,  192 , 'Streams', 'stream_Gain_hidden_size'),
            #            ('stream_HNR_hidden_size'   ,  192 , 'Streams', 'stream_HNR_hidden_size'),
            #            ('stream_LSF_hidden_size'   ,  192 , 'Streams', 'stream_LSF_hidden_size'),
            #            ('stream_LSFsource_hidden_size'   ,  192 , 'Streams', 'stream_LSFsource_hidden_size'),

            ## joint dur -- dummy values -- haven't used private streams:--
            #            ('stream_dur_hidden_size'   ,  192 , 'Streams', 'stream_dur_hidden_size'),

            #            ('stream_sp_hidden_size'    , 1024, 'Streams', 'stream_sp_hidden_size'),

            #            ('stream_weight_mgc'   , 1.0, 'Streams', 'stream_weight_mgc'),
            #            ('stream_weight_lf0'   , 3.0, 'Streams', 'stream_weight_lf0'),
            #            ('stream_weight_vuv'   , 1.0, 'Streams', 'stream_weight_vuv'),
            #            ('stream_weight_bap'   , 1.0, 'Streams', 'stream_weight_bap'),
            #            ('stream_weight_stepw' , 0.0, 'Streams', 'stream_weight_stepw'),
            #            ('stream_weight_seglf0', 1.0, 'Streams', 'stream_weight_seglf0'),
            #            ('stream_weight_sp'    , 1.0, 'Streams', 'stream_weight_sp'),

            ## Glott HMM - unused?
            #            ('stream_weight_F0'   , 1.0, 'Streams', 'stream_weight_F0'),
            #            ('stream_weight_Gain'   , 1.0, 'Streams', 'stream_weight_Gain'),
            #            ('stream_weight_HNR'   , 1.0, 'Streams', 'stream_weight_HNR'),
            #            ('stream_weight_LSF'   , 1.0, 'Streams', 'stream_weight_LSF'),
            #            ('stream_weight_LSFsource'   , 1.0, 'Streams', 'stream_weight_LSFsource'),

            ## dur - unused?
            #            ('stream_weight_dur'   , 1.0, 'Streams', 'stream_weight_dur'),
            #            ('stream_lf0_lr'       , 0.5, 'Streams', 'stream_lf0_lr'),
            #            ('stream_vuv_lr'       , 0.5, 'Streams', 'stream_vuv_lr'),
            ('vocoder_type', 'STRAIGHT', 'Waveform', 'vocoder_type'),
            ('sr', 48000, 'Waveform', 'samplerate'),
            ('fl', 4096, 'Waveform', 'framelength'),
            ('shift', 1000 * 240 / 48000, 'Waveform', 'frameshift'),
            ('sp_dim', (4096 / 2) + 1, 'Waveform', 'sp_dim'),
            # fw_alpha: 'Bark' or 'ERB' allowing deduction of alpha, or explicity float value (e.g. 0.77)
            ('fw_alpha', 0.77, 'Waveform', 'fw_alpha'),
            ('pf_coef', 1.4, 'Waveform', 'postfilter_coef'),
            ('co_coef', 2047, 'Waveform', 'minimum_phase_order'),
            ('use_cep_ap', True, 'Waveform', 'use_cep_ap'),
            ('do_post_filtering', True, 'Waveform', 'do_post_filtering'),
            ('apply_GV', False, 'Waveform', 'apply_GV'),
            ('test_synth_dir', 'test_synthesis/wav', 'Waveform',
             'test_synth_dir'),
            ('DurationModel', False, 'Processes', 'DurationModel'),
            ('AcousticModel', False, 'Processes', 'AcousticModel'),
            ('GenTestList', False, 'Processes', 'GenTestList'),
            ('NORMLAB', False, 'Processes', 'NORMLAB'),
            ('MAKEDUR', False, 'Processes', 'MAKEDUR'),
            ('MAKECMP', False, 'Processes', 'MAKECMP'),
            ('NORMCMP', False, 'Processes', 'NORMCMP'),
            ('TRAINDNN', False, 'Processes', 'TRAINDNN'),
            ('DNNGEN', False, 'Processes', 'DNNGEN'),
            ('GENWAV', False, 'Processes', 'GENWAV'),
            ('CALMCD', False, 'Processes', 'CALMCD'),
            ('NORMSTEP', False, 'Processes', 'NORMSTEP'),
            ('GENBNFEA', False, 'Processes', 'GENBNFEA'),
            ('mgc_ext', '.mgc', 'Extensions', 'mgc_ext'),
            ('bap_ext', '.bap', 'Extensions', 'bap_ext'),
            ('lf0_ext', '.lf0', 'Extensions', 'lf0_ext'),
            ('cmp_ext', '.cmp', 'Extensions', 'cmp_ext'),
            ('lab_ext', '.lab', 'Extensions', 'lab_ext'),
            ('utt_ext', '.utt', 'Extensions', 'utt_ext'),
            ('stepw_ext', '.stepw', 'Extensions', 'stepw_ext'),
            ('sp_ext', '.sp', 'Extensions', 'sp_ext'),

            ## GlottHMM
            ('F0_ext', '.F0', 'Extensions', 'F0_ext'),
            ('Gain_ext', '.Gain', 'Extensions', 'Gain_ext'),
            ('HNR_ext', '.HNR', 'Extensions', 'HNR_ext'),
            ('LSF_ext', '.LSF', 'Extensions', 'LSF_ext'),
            ('LSFsource_ext', '.LSFsource', 'Extensions', 'LSFsource_ext'),

            ## joint dur
            ('dur_ext', '.dur', 'Extensions', 'dur_ext'),
        ]

        # this uses exec(...) which is potentially dangerous since arbitrary code could be executed
        for (variable, default, section, option) in user_options:
            value = None

            try:
                # first, look for a user-set value for this variable in the config file
                value = configparser.get(section, option)
                user_or_default = 'user'

            except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
                # use default value, if there is one
                if (default == None) or \
                   (default == '')   or \
                   ((type(default) == int) and (default == impossible_int)) or \
                   ((type(default) == float) and (default == impossible_float))  :
                    logger.critical('%20s has no value!' %
                                    (section + ":" + option))
                    raise Exception
                else:
                    value = default
                    user_or_default = 'default'

            if type(default) == str:
                exec('self.%s = "%s"' % (variable, value))
            elif type(default) == int:
                exec('self.%s = int(%s)' % (variable, value))
            elif type(default) == float:
                exec('self.%s = float(%s)' % (variable, value))
            elif type(default) == bool:
                exec('self.%s = bool(%s)' % (variable, value))
            elif type(default) == list:
                exec('self.%s = list(%s)' % (variable, value))
            else:
                logger.critical(
                    'Variable %s has default value of unsupported type %s',
                    variable, type(default))
                raise Exception(
                    'Internal error in configuration settings: unsupported default type'
                )

            logger.info('%20s has %7s value %s' %
                        (section + ":" + option, user_or_default, value))

        self.combined_feature_name = ''
        for feature_name in self.output_features:
            self.combined_feature_name += '_'
            self.combined_feature_name += feature_name

        self.combined_model_name = self.model_type
        for hidden_type in self.hidden_layer_type:
            self.combined_model_name += '_' + hidden_type

        self.combined_model_name += '_' + self.output_layer_type
Example #28
0
def parse(filename):
    global config
    configname = filename
    config = ConfigParser.ConfigParser()
    config.read([filename, os.path.expanduser('~/.' + filename)])
Example #29
0
# @{
##

import time
import os
from oeqa.runtime.wifi import wifi
import string
try:
 import ConfigParser
except:
 import configparser as ConfigParser
from oeqa.oetest import oeRuntimeTest
from oeqa.utils.helper import shell_cmd_timeout
from oeqa.utils.decorators import tag

ssid_config = ConfigParser.ConfigParser()
config_path = os.path.join(os.path.dirname(__file__), "files/config.ini")
ssid_config.readfp(open(config_path))

@tag(TestType="FVT")
class CommWiFiConect(oeRuntimeTest):
    """
    @class CommWiFiConect
    """
    def setUp(self):
        ''' initialize wifi class
        @fn setUp
        @param self
        @return
        '''
        self.wifi = wifi.WiFiFunction(self.target)
def conf():
    config = ConfigParser.ConfigParser()
    config.read('manifest.ini')
    return config