Esempio n. 1
0
    def __init__(self):
        wx.Frame.__init__(self,
                          None,
                          -1,
                          u'七牛文件管理(' + VERSION + ')',
                          size=(800, 640))

        self.conf = Config()
        ak, sk = self.conf.getKeys()
        self.__auth = qiniu.Auth(ak, sk)

        self.SetIcon(libs.icons.desktop().GetIcon())
        #self.SetWindowStyle((self.GetWindowStyle() | wx.STAY_ON_TOP))

        self.createSplitter()
        self.createMenuBar(self.menuData())
        self.createToolBar(self.toolbarData())
        self.createStatusBar([-1, 100, 140, 70])
        self.createTaskBarIcon(self.taskMenuData())
        self.Bind(wx.EVT_CLOSE, self.onHideEvent)
        self.Center()

        for bucket in self.conf.getBuckets():
            self._bucketPanel.setBucketName(bucket)
            return
Esempio n. 2
0
    def __init__(self, args):
        if not os.path.exists(args.path):
            print('The specified path does not exist.')
            exit(1)
        if not os.path.isdir(args.path):
            print('The specified path is not a directory.')
            exit(1)

        print('Creating .s4 directory')
        configdirpath = os.path.join(args.path, '.s4')
        os.mkdir(configdirpath, mode=0o700)

        print('Creating .s4/config.toml')
        configfilepath = os.path.join(configdirpath, 'config.toml')
        config = Config()
        config.save(configfilepath)

        print('Collecting user details')
        response = input('What is your "AWS Access Key ID": ')
        config.config['aws']['credentials']['aws_access_key_id'] = response
        response = input('What is your "AWS Access Key Secret": ')
        config.config['aws']['credentials']['aws_secret_access_key'] = response
        response = input('Name of the sync destination S3 bucket: ')
        config.config['aws']['bucket']['name'] = response
        # TODO: perform validation on the relative path
        response = input('Relative path inside the bucket to sync to: ')
        config.config['aws']['bucket']['path'] = response

        print('Saving .s4/config.toml')
        config.save(configfilepath)
Esempio n. 3
0
def main():
    Config(True)
    root = [
        os.path.abspath(os.path.realpath('%s/../../%s' % (__file__, x)))
        for x in sub_modules
    ]
    pytest.main(root + ['--exitfirst', '--cov', '--cov-report', 'html'])
Esempio n. 4
0
def sql_filter(sql, db_type):
    """
    过滤SQL 是否符合
    :param sql: SQL语句
    :param db_type: 数据库类型
    :return: 返回一个字典
    """
    ret = {
        "status": True,
        "message": "",
    }
    conf = Config()
    filter_classes = conf.get("%s_filter" % db_type, "filter_class")
    # 获取配置文件中配置的所有的filter_class
    if filter_classes:
        filter_classes = filter_classes.split(",")
        imp_module = importlib.import_module(
            'db_query.backends.%s.sql_filter' % db_type)
        for filter_class in filter_classes:
            # 循环所有的过滤器,如果发现有一个过滤失败,那么就返回
            try:
                db_filter_class = getattr(imp_module, filter_class)
                db_filter = db_filter_class()
                if not db_filter.sql_passes(sql):
                    ret["message"] = db_filter.filter_description()
                    ret["status"] = False
                    break
            except Exception as e:
                ret["message"] = str(e)
                ret["status"] = False
                break
    return ret
Esempio n. 5
0
    def __init__(self, parent, auth):
        wx.Panel.__init__(self, parent)
        self.__frame = self.GetParent().GetParent()

        self.__auth = auth
        self.__bucketManger = qiniu.BucketManager(self.__auth)
        self.__conf = Config()

        self.__bucketName = ""
        self.__download_url = None
        self.__prefix = ""
        self.__marker = None
        self.__limit = LIMIT

        self.__initImages()

        self.__boxSizer = wx.BoxSizer()
        self.__dataList = self.__initList()
        self.__boxSizer.Add(self.__dataList, 1, wx.EXPAND)

        self.__initPopMenu()

        self.SetSizer(self.__boxSizer)

        # init column sorter
        wx.lib.mixins.listctrl.ColumnSorterMixin.__init__(self, LIMIT)
Esempio n. 6
0
    def __init__(self, parent, buckets, onConfigSuccess):
        wx.Panel.__init__(self, parent, style=wx.BORDER_SIMPLE)
        self.frame = parent
        self.onConfigSuccess = onConfigSuccess
        self.buckets = buckets
        self.conf = Config()

        self.startConfigBucket()
Esempio n. 7
0
 def __init__(self, override_cfg_file: Optional[str] = None) -> None:
     self.config = Config(override_cfg_file)
     self.database = Database(self.config.db_path)
     self.video_id_filter: List[int] = []
     self.channel_filter: List[str] = []
     self.date_begin_filter = 0.0
     self.date_end_filter = (0.0, False)
     self.include_watched_filter = False
Esempio n. 8
0
 def __init__(self, parent):
     wx.Dialog.__init__(self, parent)
     conf = Config()
     boxSizer = wx.BoxSizer(wx.HORIZONTAL)
     b = BucketConfigPanel(self,
                           conf.getBuckets().keys(), self.onOkBtn)
     boxSizer.Add(b, 1, wx.EXPAND)
     self.Layout()
     self.Center()
Esempio n. 9
0
def set_tokens():
    config = Config(getcwd()+'/tasks/app.config')
    dicts = config.get_section_options_value('users')
    user_token_url = config.get_config_value('urls','URL_USER_LOGIN')
    r = Redis(host='localhost',port=6379,db=1)
    d = Datas(user_token_url)

    for k,v in dicts.items():
	    token = d.get_usertoken(k,v)
	    r.set(k,token)
Esempio n. 10
0
    def __init__(self, parent, onLoginSuccess):
        wx.Panel.__init__(self, parent, style=wx.BORDER_SIMPLE)
        self.frame = parent
        self.onLoginSuccess = onLoginSuccess

        self.conf = Config()
        accessKey, secretKey = self.conf.getKeys()

        loginTitle = wx.StaticText(self, -1, u" 登 陆 ")
        loginTitle.SetFont(wx.Font(18, wx.SWISS, wx.NORMAL, wx.BOLD))

        accessKeyLabel = wx.StaticText(self, -1, "AccessKey:")
        self.accessKey = wx.TextCtrl(self, -1, accessKey)

        secretKeyLabel = wx.StaticText(self, -1, "SecretKey:")
        self.secretKey = wx.TextCtrl(self, -1, secretKey, style=wx.PASSWORD)

        self.rememberMe = wx.CheckBox(self, -1, u"记住账号")
        if accessKey != "" and secretKey != "":
            self.rememberMe.SetValue(True)

        self.loginBtn = wx.Button(self, -1, u"登陆")
        self.loginBtn.Bind(wx.EVT_BUTTON, self.OnLogin)

        self.exitBtn = wx.Button(self, -1, u"退出")
        self.exitBtn.Bind(wx.EVT_BUTTON, self.OnExit)

        # title
        mainSizer = wx.BoxSizer(wx.VERTICAL)
        mainSizer.Add((0, 10))
        mainSizer.Add(loginTitle, 0, wx.CENTER | wx.TOP | wx.BOTTOM, 5)
        mainSizer.Add(wx.StaticLine(self), 0, wx.EXPAND | wx.TOP | wx.BOTTOM, 5)

        # ak and sk
        keySizer = wx.FlexGridSizer(cols=2, hgap=7, vgap=7)
        keySizer.AddGrowableCol(1)
        keySizer.Add(accessKeyLabel, 0, wx.ALIGN_RIGHT | wx.ALIGN_CENTER_VERTICAL)
        keySizer.Add(self.accessKey, 0, wx.EXPAND)
        keySizer.Add(secretKeyLabel, 0, wx.ALIGN_RIGHT | wx.ALIGN_CENTER_VERTICAL)
        keySizer.Add(self.secretKey, 0, wx.EXPAND)
        mainSizer.Add(keySizer, 0, wx.EXPAND | wx.ALL, 5)

        mainSizer.Add(self.rememberMe, 0, wx.ALL | wx.ALIGN_RIGHT, 5)
        mainSizer.Add((10, 10))  # some empty space

        btnSizer = wx.BoxSizer(wx.HORIZONTAL)
        btnSizer.Add((30, 20))
        btnSizer.Add(self.loginBtn,1)
        btnSizer.Add((30, 20))
        btnSizer.Add(self.exitBtn,1)
        btnSizer.Add((30, 20))

        mainSizer.Add(btnSizer, 0, wx.EXPAND | wx.BOTTOM, 10)

        self.SetSizer(mainSizer)
Esempio n. 11
0
 def test_conf_info(self):
     from libs.config import Config
     conf = Config()
     db_type_list = conf.items('db_type')
     mysql_version_list = conf.items('mysql_version')
     oracle_version = conf.items('oracle_version')
     print(db_type_list)
     print(mysql_version_list)
     print(oracle_version)
     self.assertEqual(len(db_type_list) > 0, True)
     self.assertEqual(len(mysql_version_list) > 0, True)
     self.assertEqual(len(oracle_version) > 0, True)
Esempio n. 12
0
def read_config():
    config = Config('argument parser for classification training')
    config.add_argument('--topk', type=int, default=5)
    config.add_argument('--backbone', type=str, default='resnet101')
    config.add_argument('--groups', type=int, default=1)
    config.add_argument('--conv',
                        type=str,
                        default='conv',
                        choices=['conv', 'seperable', 'deform'])
    config.add_argument('--dilation', type=int, nargs='+', default=1)

    return config.parse_args()
Esempio n. 13
0
    def __init__(self):
        __config = Config()

        self.__host = __config.get('MySQL', 'Host')
        self.__user = __config.get('MySQL', 'User')
        self.__password = __config.get('MySQL', 'Pass')
        self.__database = __config.get('MySQL', 'Base')

        try:
            self.__conn = MySQLdb.connect(self.__host, self.__user,
                                          self.__password, self.__database)
        except MySQLdb.Error as e:
            print "Error %d: %s" % (e.args[0], e.args[1])
            sys.exit(1)
Esempio n. 14
0
def read_config():
    config = Config('argument parser for object detection training')
    config.add_argument('--backbone', type=str, default='resnet101')
    config.add_argument('--groups', type=int, default=1)
    config.add_argument('--dilation', type=int, nargs='+', default=1)
    config.add_argument('--conv',
                        type=str,
                        default='conv',
                        choices=['conv', 'seperable', 'deform'])
    config.add_argument('--fpn_layer', type=int, default=4)
    config.add_argument('--num_anchors', type=int, default=9)
    config.add_argument('--no_fix_backbone_bn',
                        action='store_true',
                        default=False)
    config.add_argument('--use_bn', action='store_true', default=False)
    config.add_argument('--use_residual', action='store_true', default=False)
    config.add_argument('--use_postprocessing',
                        action='store_true',
                        default=False)
    config.add_argument('--cls_thresh', type=float, default=0.05)
    config.add_argument('--nms_thresh', type=float, default=0.5)
    config.add_argument('--bbox_min_size',
                        type=int,
                        default=4,
                        help='minimum bbox size for ground truth')
    config.add_argument('--rpn_pos_overlap',
                        type=float,
                        default=0.5,
                        help='rpn positive overlap threshold')
    config.add_argument('--rpn_low_neg_overlap',
                        type=float,
                        default=0.3,
                        help='the lower bound of rpn negative overlap')
    config.add_argument('--rpn_high_neg_overlap',
                        type=float,
                        default=0.4,
                        help='the higher bound of rpn negative overlap')
    config.add_argument('--crowd_overlap',
                        type=float,
                        default=0.3,
                        help='remove anchors if they ovelap with crowds')
    config.add_argument('--border_ratio',
                        type=float,
                        default=0.8,
                        help='filter border anchors')
    config.add_argument('--use_mask',
                        action='store_true',
                        help='whether to use mask targets in dataloader')
    return config.parse_args()
Esempio n. 15
0
def read_config():
    config = Config('argument parser for semantic segmentation training')
    config.add_argument('--backbone', type=str, default='resnet101')
    config.add_argument('--groups', type=int, default=1)
    config.add_argument('--conv',
                        type=str,
                        default='conv',
                        choices=['conv', 'seperable', 'deform'])
    config.add_argument('--fpn_layer', type=int, default=3)
    config.add_argument('--dilation', type=int, nargs='+', default=1)
    config.add_argument('--use_postprocessing',
                        action='store_true',
                        default=False)
    config.add_argument('--use_aug', action='store_true', default=False)
    return config.parse_args()
Esempio n. 16
0
def get_projects():
    # print("获取projects")
    # todo 修改为 缓存方式的

    try:
        conf = Config()
        url = conf.get('project_info', 'url')
        url = quote(url, safe='?|/|=|&|:')
        res = requests.get(url, timeout=5)
        ret = res.json()['data']
        ret_tuple = tuple(
            map(lambda x: (x['project']['en_name'], x['project']['ch_name']),
                ret))
    except Exception as e:
        print('连接失败')
        ret_tuple = []
    return ret_tuple
Esempio n. 17
0
    def __init__(self):
        super(MainWindow, self).__init__()

        # set up UI from QtDesigner
        self.ui = Ui_mainwindow()
        self.ui.setupUi(self)

        # init window at center
        self.move(QApplication.desktop().screen().rect().center() -
                  self.rect().center())
        self.show()

        # User
        self.user_state = None
        self.shows_window = None
        self.settings = None

        settings.config = Config(settings._CONFIG_FILE,
                                 default_config=settings._DEFAULTS)
        if settings.config.has_property('default_user'):
            self.set_user_state(
                UserContent(
                    settings.config['default_user'],
                    user_dir=settings.config['user_dir']
                    if settings.config.has_property('user_dir') else None,
                    cache_dir=settings.config['cache_dir']
                    if settings.config.has_property('cache_dir') else None,
                    storage_dir=settings.config['storage_dir']
                    if settings.config.has_property('storage_dir') else None,
                    apikey=settings._TVDB_API_KEY))
        else:  # create login window, this will use the function self.set_user_state to set the user_state
            self.setEnabled(False)
            self.loginwindow = LoginWindow(main_window=self)
            self.loginwindow.move(
                self.pos() + self.rect().center() -
                self.loginwindow.rect().center())  # position login window
            self.loginwindow.show()

        self.ui.shows_button.clicked.connect(self.display_shows)
        self.ui.config_button.clicked.connect(self.display_settings)
Esempio n. 18
0
    def __init__(self, args):
        self.args = args

        self.config = Config()
        self.config.load('s4config.toml')

        session = boto3.session.Session(**self.config['aws']['credentials'])
        self.s3 = session.resource('s3')

        # Check if bucket exists
        check = self.check_bucket_access(self.config['aws']['bucket']['name'])
        if check == BucketAccess.NotFound:
            print('Configured bucket does not exist.')
            exit(1)
        elif check == BucketAccess.Forbidden:
            print('Error: do not have permission to access configured bucket')
            print('The following are some known causes of this error:')
            print(' - User does not have access to S3')
            print(' - The bucket is owned by another account')
            exit(1)

        # Sync files
        self.sync()
Esempio n. 19
0
def main():
    Config(test=False)
    app.run(debug=True)
Esempio n. 20
0
import sys
from threading import Thread

from libs.requirements import TestRequirements

DIR = os.path.dirname(os.path.realpath(__file__))
if sys.version_info < (3, 6):
    raise RuntimeError("This application requires Python 3.6+")

TestRequirements(DIR + "/requirements.txt").test_requirements()

# pylint: disable=wrong-import-position
import web.app
from libs.config import Config
from mylogger import logger

# noqa: MC0001
if __name__ == "__main__":
    conf = Config()
    conf.load_app()
    args = conf.args
    t1 = Thread(target=web.app.start_app,
                args=[
                    "My car info", args.base_path, logger.level < 20,
                    args.listen,
                    int(args.port)
                ])
    t1.setDaemon(True)
    t1.start()
    t1.join()
Esempio n. 21
0
import os
import sys

from libs.config import Config
from libs.fastqc import PreProcessing
from libs.mapping import NCFilter, RefMapping
from libs.quantifier import Quantifier

if __name__ == '__main__':
    cfg = sys.argv[1]
    config = Config(cfg)

    pp = PreProcessing(config)
    pp.process()

    ncf = NCFilter(config)
    ncf.process()

    ref = RefMapping(config)
    ref.process()

    quanti = Quantifier(config)
    quanti.process()
Esempio n. 22
0
 def do_reload(self):
     self.log(logging.INFO, "Reloading")
     config = Config(config_file)
     self.load(config.get('general', 'routes', {}))
     self.mqtt.subscribe(self._actions.keys())
Esempio n. 23
0
            self.log(logging.INFO, "Requesting Node Discovery")
            self.xbee.xbee.at(command='ND')

        while True:
            try:
                self.mqtt.loop()
            except Exception as e:
                logging.exception("Error while looping MQTT (%s)" % e)

if __name__ == "__main__":

    def resolve_path(path):
        return path if path[0] == '/' else os.path.join(os.path.dirname(os.path.realpath(__file__)), path)

    config_file = resolve_path('config/xbee2mqtt.yaml');
    config = Config(config_file)

    handler = logging.StreamHandler()
    formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
    handler.setFormatter(formatter)

    logger = logging.getLogger()
    logger.setLevel(config.get('daemon', 'logging_level', logging.INFO))
    logger.addHandler(handler)

    mqtt = MosquittoWrapper(config.get('mqtt', 'client_id', None))
    mqtt.host = config.get('mqtt', 'host', 'localhost')
    mqtt.port = config.get('mqtt', 'port', 1883)
    mqtt.username = config.get('mqtt', 'username', None)
    mqtt.password = config.get('mqtt', 'password', None)
    mqtt.keepalive = config.get('mqtt', 'keepalive', 60)
Esempio n. 24
0
def readme(ctx, directory: str, template_dir: str, template: str, filename: str, configfile: str, overwrite: bool, backup: bool, namespace: str, repo: str, token: str):
    """Command that will create a project readme.md for a repository ."""
    try:
        # Define the dictionary object that will be passed to the readme template.
        ReadmeObj = {}
        # Set the directory path based on either the passed directory arg, or the click context.
        path = SetPath(directory, ctx)

        # Call the config parser to parse a config file if one exists within the target directory.
        ReadmeConfig = Config(path, configfile)
        # print(json.dumps(ReadmeConfig.config, indent=4, sort_keys=True))
        ReadmeObj.update(git=ReadmeConfig.config.get('GitConfig', {}))
        ReadmeObj.update(readme=ReadmeConfig.config.get('ReadMeConfig', {}))
        
        # Gather the project variables to construct the object that the template will use to generate the readme document.
        # Set the type of provider class to call
        if ctx.obj.get('provider').lower() == 'terraform' or ctx.obj.get('provider') == 'tf':
            Variables = TFDoc(path).BuildVarList()
            log.debug("Variable list retrieved from provided file path...")
            ReadmeObj.update(variables=Variables)
            # Add the required and optional images
            ReadMeObjVar = ReadmeObj.get('variables')
            ReadMeObjVar.update(required_image=ReadmeConfig.config.get('VariablesConfig').get('Required').get('Image'))
            ReadMeObjVar.update(optional_image=ReadmeConfig.config.get('VariablesConfig').get('Optional').get('Image'))
            
            # Gather the project outputs to use to construct the object that the template will use to generate the readme document.
            Outputs = TFDoc(path).BuildOutputList()
            log.debug("Output list retrieved from provided file path...")
            ReadmeObj.update(outputs=Outputs)

            # Graph the things
            GraphImage = TFDoc(path).BuildGraph()
            if GraphImage != "None":
                ReadmeObj.update(tfdiagram=GraphImage)

        # Update the variable objects with the data from the config pull
        ReadmeConfig.UpdateVars(ReadmeObj)
        log.info("ReadmeObj has been updated with variable data from the parsed config.")
        # print(json.dumps(ReadmeObj, indent=4, sort_keys=True))

        # Add the Build Tree
        BuildTree = DirTree(path)
        log.debug("Dirtree created:\n{}".format(BuildTree))
        ReadmeObj.update(tree=BuildTree)
        
        # Last attempt to fetch GitRepo Data and add it to the ReadMeObj before we generate the documentation.
        # If the namespace and repository were provided, then assign the values, if not check for .git/config file and parse.
        # Set variables to hold the Namespace and Repo
        GitNameSpace = None
        GitRepo = None  
        
        # Try and use the variables first
        if repo is not None and namespace is not None:
            GitNameSpace = namespace
            GitRepo = repo
            log.debug("Configuring Git NameSpace and Repo from command variables...")
            log.debug("GitNameSpace set to value: {}".format(namespace))
            log.debug("GitRepo set to value: {}".format(repo))
        # Try and fetch the repository URL from the configured directory.
        elif isinstance(ReadmeObj.get('git'), dict):
            GitNameSpace = ReadmeObj.get('git').get('NameSpace')
            GitRepo = ReadmeObj.get('git').get('Name')
            log.debug("Configuring Git NameSpace and Repo from the project config file...")
            log.debug("GitNameSpace set to value: {}".format(ReadmeObj.get('git').get('NameSpace')))
            log.debug("GitRepo set to value: {}".format(ReadmeObj.get('git').get('Name')))
        elif os.path.exists("{}/.git/config".format(path)):
            GitNameSpace, GitRepo = ParseRepoUrl(path)
            log.debug("Configuring Git NameSpace and Repo from the project .git/config file...")
            log.debug("GitNameSpace set to value: {}".format(GitNameSpace))
            log.debug("GitRepo set to value: {}".format(GitRepo))
        # Attempt to make the call to the repository to fetch repo and release data.
        log.debug("Attempting to make request from Github...")
        if GitNameSpace is not None and GitRepo is not None:
            RequestObj = Github(GitNameSpace, GitRepo, token).GetGitHubData()
            log.debug("Git Repository Request State: {}".format(RequestObj))
            if RequestObj.get('state') != "fail":
                ReadmeObj.update(repo=RequestObj)
                log.debug("Template Object has been updated to include the repository response object.")
        log.info("Generation of template dictionary object completed: {}".format(json.dumps(ReadmeObj, indent=4, sort_keys=True)))
        # Load, render and write the Jinja Template
        log.debug("Attempting to build project config file...")
        DocTemplate = Jinja(TemplateDir=template_dir, Template=template, OutputDir=path, OutputFile=filename)
        DocTemplate.LoadTemplate()
        DocTemplate.RenderTemplate(ReadmeObj)
        DocTemplate.WriteTemplate(Overwrite=overwrite, Backup=backup)
        # Add the Dir Tree Variable to the Template Dictionary:
        cprint("Template file successfully written to: {}".format(os.path.join(path, filename)))
    except Exception as e:
        cprint(" ERROR ENCOUNTERED: ", 'grey', 'on_red')
        cprint("Error: Failed to complete config file creation", 'red')
        cprint(str(e), 'red')
        log.error("Error: Failed to complete config file creation")
        log.error(str(e))
        raise
        sys.exit()
Esempio n. 25
0
def read_config():
    config = Config('argument parser for gan training')
    config.add_argument('--input_nc',
                        type=int,
                        default=3,
                        help='# of input image channels')
    config.add_argument('--output_nc',
                        type=int,
                        default=3,
                        help='# of output image channels')
    config.add_argument('--ngf',
                        type=int,
                        default=64,
                        help='# of gen filters in first conv layer')
    config.add_argument('--ndf',
                        type=int,
                        default=64,
                        help='# of discrim filters in first conv layer')
    config.add_argument('--netD_arch',
                        type=str,
                        default='nlayers',
                        help='selects model to use for netD')
    config.add_argument('--netG_arch',
                        type=str,
                        default='resnet_9blocks',
                        help='selects model to use for netG')
    config.add_argument('--n_layers_D',
                        type=int,
                        default=3,
                        help='only used if netD_arch==n_layers')
    config.add_argument('--dataset_mode',
                        type=str,
                        default='unaligned',
                        help='chooses how datasets are loaded. [unaligned '
                        '| aligned | single]')
    config.add_argument(
        '--model',
        type=str,
        default='cycle_gan',
        help='chooses which model to use. cycle_gan, pix2pix, test')
    config.add_argument('--reverse_AB',
                        action='store_true',
                        help='AtoB or BtoA')
    config.add_argument('--norm',
                        type=str,
                        default='instance',
                        help='instance normalization or batch normalization')
    config.add_argument('--serial_batches',
                        action='store_true',
                        help='if true, takes images in order to make batches, '
                        'otherwise takes them randomly')
    config.add_argument('--no_dropout',
                        action='store_true',
                        help='no dropout for the generator')
    config.add_argument(
        '--max_dataset_size',
        type=int,
        default=float("inf"),
        help='Maximum number of samples allowed per dataset. '
        'If the dataset directory contains more than max_dataset_size, '
        'only a subset is loaded.')
    config.add_argument(
        '--resize_or_crop',
        type=str,
        default='resize_and_crop',
        help='scaling and cropping of images at load time '
        '[resize_and_crop|crop|scale_width|scale_width_and_crop]')
    config.add_argument(
        '--no_flip',
        action='store_true',
        help='if specified, do not flip the images for data augmentation')
    config.add_argument(
        '--init_type',
        type=str,
        default='normal',
        help='network initialization [normal|xavier|kaiming|orthogonal]')
    config.add_argument('--display_step',
                        type=int,
                        default=100,
                        help='frequency of showing training results on screen')
    config.add_argument('--niter',
                        type=int,
                        default=100,
                        help='# of iter at starting learning rate')
    config.add_argument(
        '--niter_decay',
        type=int,
        default=100,
        help='# of iter to linearly decay learning rate to zero')
    config.add_argument(
        '--no_lsgan',
        action='store_true',
        help='do *not* use least square GAN, if false, use vanilla GAN')
    config.add_argument(
        '--pool_size',
        type=int,
        default=50,
        help='the size of image buffer that stores previously generated images'
    )
    config.add_argument('--lambda_A',
                        type=float,
                        default=10.0,
                        help='weight for cycle loss (A -> B -> A)')
    config.add_argument('--lambda_B',
                        type=float,
                        default=10.0,
                        help='weight for cycle loss (B -> A -> B)')
    config.add_argument(
        '--lr_decay_iters',
        type=int,
        default=50,
        help='multiply by a gamma every lr_decay_iters iterations')
    config.add_argument(
        '--identity',
        type=float,
        default=0.5,
        help=
        'use identity mapping. Setting identity other than 1 has an effect of '
        'scaling the weight of the identity mapping loss. For example, if the weight '
        'of the identity loss should be 10 times smaller than the weight of the '
        'reconstruction loss, please set optidentity = 0.1')
    return config.parse_args()
def main():
    config = Config(BASE_DIR + '/schedd_configuration/', type='schedd')
    sqlite = Sqlite(BASE_DIR + '/storage/hsm.db', config.XMLconfiguration)

    settings = path.abspath(
        path.join(path.dirname(__file__), '..', 'settings.ini'))

    es = Es(settings)

    schedd_metrics = es.get_schedd_metrics()
    sqlite.scheddhosts_availability(schedd_metrics)
    #
    submissionhosts = sqlite.get_data(type='schedd')

    for host in submissionhosts:
        sls_doc = SlsDocument()

        if host != 'none':
            availability = submissionhosts[host]['availability']
            notificated = submissionhosts[host]['notificated']
            contacts = submissionhosts[host]['contacts']
            text = submissionhosts[host]['errorsdesc']
            errors = sqlite.get_schedd_history_logs(submissionhost=host)

            if availability == 0 or availability == 10 or availability == 50:
                if len(errors) > 0:
                    mailtext = ''
                    for error in errors:
                        if error['notificated'] == 0:
                            mailtext = mailtext + error['fulltext']
                            sqlite.update_schedd_entry(
                                'SCHEDDHISTORYLOG',
                                'notificated',
                                1,
                                host,
                                checkmetrictime=error['checkmetrictime'])
                            sqlite.update_schedd_entry(
                                'SCHEDDHISTORYLOG',
                                'notificationtime',
                                str(datetime.utcnow()),
                                host,
                                checkmetrictime=error['checkmetrictime'])
                    if mailtext != '':
                        email = Notifications(
                            text=mailtext,
                            subject='Service issues on submissionhost: {0}'.
                            format(host),
                            to=contacts)
                        email.send_notification_email()
                        sqlite.update_schedd_entry('SUBMISSIONHOSTS',
                                                   'notificated', 1, host)
                        email = {}
            elif availability == 100 and notificated == 1:
                sqlite.update_schedd_entry('SUBMISSIONHOSTS', 'notificated', 0,
                                           host)

            id = 'PandaHarvesterCondor'

            sls_doc.set_id('%s_%s' % (id, (str(host).split('.'))[0]))
            sls_doc.set_status(availability)
            sls_doc.set_avail_desc(host)
            sls_doc.set_avail_info(text)
            sls_doc.set_webpage("")
            sls_doc.set_contact("")

            try:
                sls_doc.send_document()
            except Exception as ex:
                _logger.error(ex)
Esempio n. 27
0
def setupConfig():
    config = Config()
    config.setStorage(ConfigJson('config.json'))
    if os.path.exists('config.json'):
        config.load()
    return config
Esempio n. 28
0
def test_config_value():
	c = Config('app.config')
	assert c.get_config_value('urls','URL_USER_LOGIN')
Esempio n. 29
0
from dash import callback_context, html, dcc
from dash.exceptions import PreventUpdate
from flask import request

from libs.psa.setup.app_decoder import firstLaunchConfig
from libs.config import Config
from mylogger import LOG_FILE, logger
from otp.otp import new_otp_session
from web.app import dash_app
import dash_bootstrap_components as dbc
from dash.dependencies import Output, Input, State

config = Config()
login_config_layout = dbc.Row(
    dbc.Col(
        md=12,
        lg=2,
        className="m-3",
        children=[
            dbc.Row(html.H2('Config')),
            dbc.Row(
                className="ms-2",
                children=[
                    dbc.Form([
                        html.Div([
                            dbc.Label("Car Brand", html_for="psa-app"),
                            dcc.Dropdown(
                                id="psa-app",
                                options=[{
                                    "label": "Peugeot",
                                    "value": "com.psa.mym.mypeugeot"
Esempio n. 30
0
def  test_config_options():
    c = Config('app.config')
    return c.get_section_options_value('users')