コード例 #1
0
def main(argv):
    parser = argparse.ArgumentParser(
        description='Visual multilingual information denoising bottleneck')
    parser.add_argument('CONFIG', type=str)
    args = parser.parse_args(argv)

    torch.backends.cudnn.enabled = True
    torch.backends.cudnn.benchmark = True

    config = ConfigFactory.parse_file(args.CONFIG)
    if not config.dset_dir:
        config.dset_dir = "/ws/ifp-53_2/hasegawa/lwang114/data/zerospeech2021-dataset/phonetic"

    seed = config.seed
    torch.manual_seed(seed)
    torch.cuda.manual_seed(seed)
    np.random.seed(seed)

    np.set_printoptions(precision=4)
    torch.set_printoptions(precision=4)

    print()
    print('[CONFIGS]')
    print(config)
    print()

    net = Solver(config)
    save_embedding = config.get('save_embedding', False)
    if config.mode == 'train':
        net.train(save_embedding=save_embedding)
    elif config.mode == 'test':
        net.load_checkpoint()
        net.test(save_embedding=save_embedding)
    else:
        return 0
コード例 #2
0
def load_config(file_dir, config_file='config.conf'):
    config = ConfigFactory.parse_file(os.path.join(file_dir, f'{config_file}'))
    if 'airflow' in config:
        if 'start_date' in config['airflow']:
            config['airflow']['start_date'] = pd.to_datetime(
                config['airflow']['start_date']).to_pydatetime()
    return config
コード例 #3
0
def main():
    config_path = os.path.join('web', 'rhasspy.conf')
    logging.info('Loading configuration from %s' % config_path)

    config = ConfigFactory.parse_file(config_path)
    train_speech_recognizer(config)
    train_intent_recognizer(config)
コード例 #4
0
ファイル: GetConfig.py プロジェクト: hotbless/patchtemp
 def get_config(self):
     try:
         conf = ConfigFactory.parse_file('./conf/config.conf')
     except Exception as err:
         raise err("Configuration Failed !")
     else:
         return conf
コード例 #5
0
ファイル: tool.py プロジェクト: peoplepattern/pyhocon
    def convert(input_file=None, output_file=None, format='json'):
        """Convert to json, properties or yaml

        :param format: json, properties or yaml
        :type format: basestring
        :return: json, properties or yaml string representation
        """

        if input_file is None:
            content = sys.stdin.read()
            config = ConfigFactory.parse_string(content)
        else:
            config = ConfigFactory.parse_file(input_file)

        if format.lower() == 'json':
            res = HOCONConverter.to_json(config)
        elif format.lower() == 'properties':
            res = HOCONConverter.to_properties(config)
        elif format.lower() == 'yaml':
            res = HOCONConverter.to_yaml(config)
        else:
            raise Exception("Format must be 'json', 'properties' or 'yaml'")

        if output_file is None:
            print(res)
        else:
            with open(output_file, "w") as fd:
                fd.write(res)
コード例 #6
0
ファイル: __init__.py プロジェクト: N2BBrasil/tap-gsheets
def main():

    # parse arguments. get config file path.
    parser = argparse.ArgumentParser()
    parser.add_argument('-c', '--config', help='config file', required=True)
    parser.add_argument('-o', '--overrides',
                        type=json.loads,
                        help='a JSON string with configuration overrides',
                        required=False,
                        default="{}"
                        )
    args = parser.parse_args()

    # the configuration file can be provided in json as much as in hocon
    # ConfigFactory will pick up the format from the file extension
    config = ConfigFactory.parse_file(args.config)

    # we like to keep the config as a dict from here on
    config = config.as_plain_ordered_dict()

    # now we override file config with command line provided config
    config.update(args.overrides)

    # go on processing
    sync(config)
コード例 #7
0
ファイル: simulatools.py プロジェクト: himelbrand/simulatools
def single_run(policy,
               trace,
               size=4,
               changes={},
               name=None,
               save=True,
               reuse=False,
               verbose=False,
               readonly=False):
    name = name if name else policy
    policy = Policy[policy]
    trace = Trace[trace]
    if 0 < size < 9:
        size = trace.typical_caches()[size - 1]
    conf_path = caffeine_root + 'simulator{0}src{0}main{0}resources{0}'.format(
        os.sep)
    conf_file = conf_path + 'application.conf'
    if not os.path.exists(output_csvs_path):
        os.makedirs(output_csvs_path)
    run_simulator = './gradlew simulator:run -x caffeine:compileJava -x caffeine:compileCodeGenJava'
    #   run_simulator = './gradlew simulator:run'
    if os.path.exists(conf_file):
        conf = ConfigFactory.parse_file(conf_file)
    else:
        conf = ConfigFactory.parse_string("""
                                          caffeine {
                                            simulator {
                                            }
                                          }
                                          """)
    simulator = conf['caffeine']['simulator']
    simulator.put('files.paths',
                  [resources_path + trace.format() + os.sep + trace.file()])

    simulator.put('files.format', trace.value['format'])
    simulator.put('maximum-size', size)
    simulator.put('policies', [policy.value])
    simulator.put('admission', [Admission.ALWAYS.value])
    simulator.put('report.format', 'csv')
    simulator.put(
        'report.output',
        output_csvs_path + '{}-{}-{}.csv'.format(trace.name, size, name))

    for k, v in changes.items():
        simulator.put(k, v)

    with open(conf_file, 'w') as f:
        f.write(HOCONConverter.to_hocon(conf))
    if (not reuse or not os.path.isfile(
            simulator['report']['output'])) and not readonly:
        call(run_simulator,
             shell=True,
             cwd=caffeine_root,
             stdout=subprocess.DEVNULL if not verbose else None)
    with open(simulator['report']['output'], 'r') as csvfile:
        reader = csv.DictReader(csvfile)
        results = {line['Policy']: float(line['Hit rate']) for line in reader}
    if not save:
        os.remove(simulator['report']['output'])
    return results if len(results) != 1 else results[0]
コード例 #8
0
 def from_file(cls, path, soup=None, savae_config=(10, 0.0002, 0.3)):
     config_tree = ConfigFactory.parse_file(path)
     if soup:
         config_tree['seed'] = soup
     config_tree['num_iter'], config_tree['eps'], config_tree[
         'momentum'] = savae_config
     return cls(config_tree)
コード例 #9
0
 def from_conf(cls, conf_path):
     conf = ConfigFactory.parse_file(str(conf_path))
     data_path = Path(
         os.path.abspath(get_conf(conf, group=cls.GROUP, key="data_path")))
     if get_conf(conf, group=cls.GROUP, key="output_dir") is None:
         output_dir = data_path.parent.joinpath("output")
     else:
         output_dir = Path(get_conf(conf, group=cls.GROUP,
                                    key="output_dir"))
     return cls(data_path=data_path,
                output_dir=output_dir,
                n_evals=get_conf(conf,
                                 group=cls.GROUP,
                                 key="n_evals",
                                 default=50),
                n_bootstraps=get_conf(conf,
                                      group=cls.GROUP,
                                      key="n_bootstraps",
                                      default=100),
                n_folds=get_conf(conf,
                                 group=cls.GROUP,
                                 key="n_folds",
                                 default=6),
                search_method=get_conf(conf,
                                       group=cls.GROUP,
                                       key="search_method",
                                       default="particle swarm"))
コード例 #10
0
    def convert(input_file=None, output_file=None, format='json'):
        """Convert to json, properties or yaml

        :param format: json, properties or yaml
        :type format: basestring
        :return: json, properties or yaml string representation
        """

        if input_file is None:
            content = sys.stdin.read()
            config = ConfigFactory.parse_string(content)
        else:
            config = ConfigFactory.parse_file(input_file)

        if format.lower() == 'json':
            res = HOCONConverter.to_json(config)
        elif format.lower() == 'properties':
            res = HOCONConverter.to_properties(config)
        elif format.lower() == 'yaml':
            res = HOCONConverter.to_yaml(config)
        else:
            raise Exception("Format must be 'json', 'properties' or 'yaml'")

        if output_file is None:
            print(res)
        else:
            with open(output_file, "w") as fd:
                fd.write(res)
コード例 #11
0
def get_custom_settings(args):
    custom_settings_file = vars(args).get('custom_settings_file')
    if custom_settings_file and os.path.exists(custom_settings_file):
        print('Loading custom settings {}'.format(custom_settings_file))
        return ConfigFactory.parse_file(custom_settings_file)
    else:
        return None
    def __init__(self, conf_path):
        super(Option, self).__init__()
        self.conf = ConfigFactory.parse_file(conf_path)
        #  ------------ General options ----------------------------------------
        self.save_path = self.conf['save_path']
        self.dataPath = self.conf['dataPath']  # path for loading data set
        self.dataset = self.conf[
            'dataset']  # options: imagenet | cifar10 | cifar100 | imagenet100 | mnist
        self.nGPU = self.conf['nGPU']  # number of GPUs to use by default
        self.GPU = self.conf['GPU']  # default gpu to use, options: range(nGPU)
        self.visible_devices = self.conf['visible_devices']

        # ------------- Data options -------------------------------------------
        self.nThreads = self.conf['nThreads']  # number of data loader threads

        # ---------- Optimization options --------------------------------------
        self.nEpochs = self.conf[
            'nEpochs']  # number of total epochs to train 400
        self.batchSize = self.conf['batchSize']  # mini-batch size 128
        self.momentum = self.conf['momentum']  # momentum 0.9
        self.weightDecay = float(self.conf['weightDecay'])  # weight decay 1e-4
        self.ori_opt_type = self.conf['ori_opt_type']
        self.quan_opt_type = self.conf['quan_opt_type']

        # lr master for optimizer 1 (mask vector d)
        self.ori_lr = self.conf['ori_lr']  # initial learning rate
        self.quan_lr = self.conf['quan_lr']
        self.lrPolicy = self.conf[
            'lrPolicy']  # options: multi_step | linear | exp | const | step
        self.power = self.conf['power']  # power for inv policy (lr_policy)
        self.step = self.conf[
            'step']  # step for linear or exp learning rate policy
        self.decayRate = self.conf['decayRate']  # lr decay rate
        self.endlr = self.conf['endlr']

        # ---------- Model options ---------------------------------------------
        self.netType = self.conf[
            'netType']  # options: ResNet | PreResNet | GreedyNet | NIN | LeNet5 | LeNet500300 | DenseNet_Cifar | AlexNet
        self.experimentID = self.conf['experimentID']
        self.depth = self.conf['depth']  # resnet depth: (n-2)%6==0
        self.nClasses = self.conf[
            'nClasses']  # number of classes in the dataset
        self.wideFactor = self.conf[
            'wideFactor']  # wide factor for wide-resnet
        self.drawNetwork = self.conf['drawNetwork']

        # ---------- Quantization options ---------------------------------------------
        self.quantization_k = self.conf['quantization_k']
        self.alpha = float(self.conf['alpha'])
        self.beta = float(self.conf['beta'])
        self.gamma = float(self.conf['gamma'])

        # ---------- Resume or Retrain options ---------------------------------------------
        self.resume = None if len(self.conf['resume']) == 0 else self.conf[
            'resume']  # "./checkpoint_064.pth"
        self.ori_retrain = None if len(
            self.conf['ori_retrain']) == 0 else self.conf['ori_retrain']
        self.quan_retrain = None if len(
            self.conf['quan_retrain']) == 0 else self.conf['quan_retrain']
コード例 #13
0
 def parse_m5o_file(self, file_path):
     try:
         parsed = ConfigFactory.parse_file(file_path)
         parsed["_file_path"] = str(file_path)
         return parsed
     except Exception as e:
         raise MeltanoAnalysisFileParserError(str(e),
                                              str(file_path.parts[-1]))
コード例 #14
0
def load(file: str, clazz):
    try:
        conf = ConfigFactory.parse_file(file)
        return __parse(conf, clazz, "")
    except pyparsing.ParseSyntaxException as e:
        raise MalformedConfigException(
            f'parsing failure line {e.lineno} character {e.col}, got "{e.line}"'
        )
コード例 #15
0
def agentset():

    agentset_conf = ConfigFactory.parse_file('conf/agentset.conf')
    agentset_prim = agentset_conf.get_list('primitives')

    html = load_html(agentset_prim)

    return render_template('agentset.html', task=Markup(html))
コード例 #16
0
ファイル: configs.py プロジェクト: chrisjmyoon/loca
 def __init__(self, conf_path="configs/eval.hocon"):
     self.eval_conf = ConfigFactory.parse_file(conf_path)
     self.plot = self.eval_conf['plot']
     self.num_epochs = self.eval_conf['num_epochs']
     self.lr = self.eval_conf['lr']
     self.weight_decay = self.eval_conf['weight_decay']
     self.batch_size = self.eval_conf['batch_size']
     self.show_plots = self.eval_conf['show_plots']
コード例 #17
0
ファイル: run-q1-22.py プロジェクト: chapter09/tpch-spark
def gen_conf(scale, app_name, query):
    conf = ConfigFactory.parse_file('../conf/application.conf')
    conf.put("all.query-num", query)
    conf.put("all.data-scale", scale)
    conf.put("all.app-suffix", app_name)

    with open('../conf/application-run.conf', 'w') as f:
       f.write(HOCONConverter.convert(conf, 'hocon'))
コード例 #18
0
ファイル: test_apic.py プロジェクト: moodysanalytics/apic
def test_get_config_item():
    config = ConfigFactory.parse_file(
        'default_configuration\\application.conf')
    actual = apic.get_config_item(config, 'non_existing_item')
    assert actual is None

    actual = apic.get_config_item(config, 'sso_service_base_url')
    assert actual == 'https://sso.moodysanalytics.com'
コード例 #19
0
ファイル: configs.py プロジェクト: chrisjmyoon/loca
 def add_dataset(self, dataset_path, name):
     data_conf_obj = dict()
     data_conf = ConfigFactory.parse_file(dataset_path)
     data_conf_obj['data_path'] = os.path.expanduser(data_conf['data_path'])
     data_conf_obj['label1'] = os.path.expanduser(data_conf['label1'])
     data_conf_obj['label2'] = os.path.expanduser(data_conf['label2'])
     data_conf_obj['meta_path'] = os.path.expanduser(data_conf['meta_path'])
     self.datasets[name] = data_conf_obj
コード例 #20
0
    def load(cls, config_name="reference.conf"):
        frame = inspect.stack()[1]
        module = inspect.getmodule(frame[0])
        loader = pkgutil.get_loader(module.__name__)
        base = Path(loader.path).parent

        config = ConfigFactory.parse_file(str(base / config_name))
        return cls(config)
コード例 #21
0
ファイル: option.py プロジェクト: qdmy/dcp
    def __init__(self, conf_path):
        super(Option, self).__init__()
        self.conf = ConfigFactory.parse_file(conf_path)

        # ------------- general options ----------------------------------------
        self.save_path = self.conf['save_path']  # log path
        self.data_path = self.conf['data_path']  # path for loading data set
        self.dataset = self.conf['dataset']  # options: imagenet | cifar10
        self.seed = self.conf['seed']  # manually set RNG seed
        self.gpu = self.conf['gpu']  # GPU id to use, e.g. "0,1,2,3"
        self.n_gpus = len(
            self.gpu.split(','))  # number of GPUs to use by default

        # ------------- data options -------------------------------------------
        self.n_threads = self.conf[
            'n_threads']  # number of threads used for data loading
        self.n_classes = self.conf[
            'n_classes']  # number of classes in the dataset
        self.multi_frame = self.conf['multi_frame']
        # ------------- discrimination-aware options ---------------------------
        self.n_losses = self.conf['n_losses']  # number of additional losses

        # self.pruning_rate = self.conf['pruning_rate']  # pruning rate
        # change by liuxu
        rates = self.conf['pruning_rate'].split(',')
        self.pruning_rate = [1 - float(i) for i in rates]

        self.softmax_weight = self.conf[
            'softmax_weight']  # weight of the softmax loss
        self.mse_weight = self.conf[
            'mse_weight']  # weight of the mean square loss
        self.max_samples = self.conf[
            'max_samples']  # maximum sample size used for channel selection, -1 means using whole data set
        self.warmstart = self.conf[
            'warmstart']  # whether to use warmstart in subproblem

        # ------------- common optimization options ----------------------------
        self.batch_size = self.conf['batch_size']  # mini-batch size
        self.momentum = self.conf['momentum']  # momentum
        self.weight_decay = self.conf['weight_decay']  # weight decay
        self.lr = 0.1
        self.layer_wise_lr = self.conf[
            'layer_wise_lr']  # initial learning rate

        # ------------- model options ------------------------------------------
        self.net_type = self.conf[
            'net_type']  # options: resnet | preresnet | vgg
        self.experiment_id = self.conf[
            'experiment_id']  # identifier for experiment
        self.depth = self.conf['depth']  # resnet depth: (n-2)%6==0

        # ---------- resume or pretrained options ---------------------------------
        # path to model to pretrained with, load model state_dict only
        self.pretrained = None if len(
            self.conf['pretrained']) == 0 else self.conf['pretrained']
        # path to directory containing checkpoint, load state_dicts of model and optimizer, as well as training epoch
        self.resume = None if len(
            self.conf['resume']) == 0 else self.conf['resume']
コード例 #22
0
ファイル: hpc.py プロジェクト: yyxql/bcbio-nextgen
def _load_custom_config(run_config):
    """Load custom configuration input HOCON file for cromwell.
    """
    from pyhocon import ConfigFactory, HOCONConverter, ConfigTree
    conf = ConfigFactory.parse_file(run_config)
    out = {}
    if "database" in conf:
        out["database"] = HOCONConverter.to_hocon(ConfigTree({"database": conf.get_config("database")}))
    return out
コード例 #23
0
    def __init__(self, config='./config'):
        reader = ConfigFactory.parse_file(config).get('reader')

        self.input_path = '/dev/input/event{}'.format(reader)
        self.device = evdev.InputDevice(self.input_path)
        self.device.grab()

        self.prefix = "KEY_"
        self.enter = "ENTER"
コード例 #24
0
def test_factory_persist_local():
    source_path = f"{BASE_PATH}/Brodmann/BIrodmann/test/unit_test_files/factory_persist_local/source/groupby_limit.csv"
    conf_file = f"{BASE_PATH}/Brodmann/BIrodmann/test/unit_test_files/factory_persist_local/conf/conf.conf"
    conf = ConfigFactory.parse_file(conf_file)
    source_df = pd.read_csv(source_path)
    FactoryPersistDF(source_df, "local", conf,
                     logging.getLogger(__name__)).build().persist()
    target_df = pd.read_csv(conf.get_string("App.Reports.persist.path"))
    assert_frame_equal(target_df, source_df)
コード例 #25
0
def load_default_config(path: Union[Path, str] = None):
    """Load custom configuration from specified file. Modifies global constants!"""
    # pylint: disable=global-statement
    global DEFAULT_CONFIG
    if path:
        DEFAULT_CONFIG = ConfigTree.merge_configs(
            DEFAULT_CONFIG, ConfigFactory.parse_file(str(path)))
    else:
        DEFAULT_CONFIG = default_config(RESOURCES_PATH, "graph.conf")
コード例 #26
0
 def open(self):
     raw_input = ConfigFactory.parse_file(
         os.path.abspath('../config/formed_signal_input.conf'))
     with ProcessPoolExecutor(
             max_workers=ProjParaShop.PARALLEL_SIZE) as executor:
         print(
             list(
                 executor.map(SignalFeature4Fog().process,
                              raw_input["FORMED_SIGNAL_MSG"])))
コード例 #27
0
ファイル: timelapse.py プロジェクト: RonanFinley/STEMPlace
    def read_config(path):
        from pyhocon import ConfigFactory as HoconConfigFactory

        config = HoconConfigFactory.parse_file(path.absolute())

        global canvas_width, canvas_height, hex_palette, default_color_idx
        canvas_width = config.get('board.width')
        canvas_height = config.get('board.height')
        hex_palette = config.get('board.palette')
        default_color_idx = config.get('board.defaultColor')
コード例 #28
0
ファイル: NN_spark.py プロジェクト: shaz13/gsoc_relationship
 def __init__(self):
     conf = ConfigFactory.parse_file("../config/ml.conf")
     self.mode = conf["NN.mode"]
     self.input_file = conf["NN.input_file"]
     self.model = conf["NN.model"]
     self.output = conf["NN.output"]
     self.batch_size = conf["NN.batch_size"]
     self.epochs = conf["NN.epochs"]
     self.cluster_size = conf["NN.cluster_size"]
     self.steps = conf["NN.steps"]
コード例 #29
0
ファイル: config.py プロジェクト: wormhole-digital/ctl
    def provide(config_file_path=None) -> "Config":
        if config_file_path is None:
            if "NOX__CONFIG_FILE_PATH" in os.environ:
                config_file_path = os.environ["NOX__CONFIG_FILE_PATH"]
            else:
                config_file_path = "nox.conf"

        config = ConfigFactory.parse_file(config_file_path)

        return Config(config)
コード例 #30
0
ファイル: hutils.py プロジェクト: jiajunxiong/hshg
def read_conf(filename):
    conf = ConfigFactory.parse_file(filename)
    config_list = conf.get_config("config_list")
    for exchange in config_list:
        config = {}
        exchange_conf = config_list.get_config(exchange)
        config["api_key"] = exchange_conf["api_key"]
        config["api_secret"] = exchange_conf["api_secret"]
        config_list[exchange] = config
    return config_list
コード例 #31
0
ファイル: run.py プロジェクト: chapter09/tpch-spark
def gen_conf(t1, t2, scale, app_name, query):
    conf = ConfigFactory.parse_file('../conf/application.conf')
    conf.put("Q23.table-list", [t1, t2])
    conf.put("all.data-scale", scale)
    conf.put("all.hdfs", 'hdfs://%s:8020/'%HDFS)
    conf.put("all.app-suffix", app_name)
    conf.put("Q23.query", QUERYS[query])

    with open('../conf/application-run.conf', 'w') as f:
       f.write(HOCONConverter.convert(conf, 'hocon'))
コード例 #32
0
def load_config(file_path):
    """
    Wrapper over pyhocon that returns a python dict rather
    than pyhocon's structure, and also corrects None values
    :param file_path: path of the config file to parse
    :return: config dict
    """
    conf = ConfigFactory.parse_file(file_path)
    _load_config(conf)
    return dict(conf)
コード例 #33
0
 def parse(hocon_file):
     """
     This function takes a hocon file
     and returns a dictionary.
     :param hocon_file: Path to hocon file.
     :returns conf: Dictionary version of passed hocon file
     """
     conf = ConfigFactory.parse_file(hocon_file)
     conf = json.loads(HOCONConverter.convert(conf, 'json'))
     return conf
コード例 #34
0
ファイル: main.py プロジェクト: chimpler/catdb
def main():
    parent_parser = argparse.ArgumentParser(add_help=False)
    parent_parser.add_argument('-d', '--database', help='database', required=True, action='store')
    parent_parser.add_argument('-s', '--schema', help='schema', required=False, action='store', default=None)
    parent_parser.add_argument('-t', '--table', help='table filter (using % as a wildcard)', required=False,
                               action='store')
    parent_parser.add_argument('-dr', '--dry-run', dest='dry_run', help='dry run', required=False, action='store_true')

    argparser = argparse.ArgumentParser(description='export')
    subparsers = argparser.add_subparsers(help='sub-command help', dest='subparser_name')
    ddl_parser = subparsers.add_parser('ddl', help='ddl', parents=[parent_parser])
    ddl_parser.add_argument('-e', '--export', dest='export_file', help='export', required=False, action='store')
    ddl_parser.add_argument('-i', '--import', dest='import_file', help='import', required=False)
    data_parser = subparsers.add_parser('data', help='data', parents=[parent_parser])
    data_parser.add_argument('-e', '--export', dest='export_file', help='export', required=False, action='store')
    data_parser.add_argument('-i', '--import', dest='import_file', help='import', required=False)
    subparsers.add_parser('list', help='list', parents=[parent_parser])

    args = argparser.parse_args()
    home_dir = os.environ['HOME']
    config_path = os.path.join(home_dir, '.catdb')
    if not os.path.exists(config_path):
        sys.stderr.write(
            'File {config_path} not found. Go to https://github.com/chimpler/catdb for more details\n'.format(
                config_path=config_path))
        sys.exit(1)

    config = ConfigFactory.parse_file(config_path)
    db_config = config['databases.' + args.database]

    db = DbManager.get_db(db_config['type'], db_config)
    if args.subparser_name == 'list':
        print '\n'.join(db.list_tables(args.table, args.schema))
    elif args.subparser_name == 'ddl':
        if args.export_file:
            ddl_str = json.dumps(db.get_ddl(args.table, args.schema), sort_keys=True,
                                 indent=config['ddl-format.indent'], separators=(',', ': '))
            with open_output_file(args.export_file) as fd:
                fd.write(ddl_str)
        elif args.import_file:
            with open_input_file(args.import_file) as fd:
                ddl = json.loads(fd.read())
                table_statement = db.create_database_statement(ddl, args.database, args.schema)
                if args.dry_run:
                    print table_statement
                else:
                    db.execute(table_statement)
    elif args.subparser_name == 'data':
        if args.export_file:
            db.export_to_file(args.export_file, args.table, args.schema, config['data-format.delimiter'],
                              config['data-format.null'])

        elif args.import_file:
            db.import_from_file(args.import_fileport_file, args.table, args.schema, config['data-format.delimiter'],
                                config['data-format.null'])
コード例 #35
0
ファイル: log.py プロジェクト: lvyiwei1/StylePTB
    def from_file(cls, path, fmt='hocon'):
        if fmt == 'hocon':
            config_tree = ConfigFactory.parse_file(path)
        elif fmt == 'json':
            with open(path, 'r') as f:
                d = json.load(f)
            config_tree = ConfigFactory.from_dict(d)
        else:
            raise ValueError('Invalid format: {}'.format(fmt))

        return cls(config_tree)
コード例 #36
0
ファイル: __init__.py プロジェクト: simple2source/magEx
 def __init__(self, routers=None, **kwargs):
     self.extensions = {}
     self.kwargs = kwargs
     default_config_file = os.path.join(os.path.dirname(os.path.abspath(sys.argv[0])), 'application.conf')
     config_file = self.kwargs.pop('config', default_config_file)
     if os.path.exists(config_file):
         self.config = ConfigFactory.parse_file(config_file)
     else:
         self.config = ConfigTree()
     if routers is None:
         routers = []
     self.routers = routers
     self.filters = []
コード例 #37
0
ファイル: main.py プロジェクト: fabiofumarola/scraper
    def __init__(self, args):
        name_conf_file = "./config/%s.conf" % args[1]
        conf = ConfigFactory.parse_file(name_conf_file)
        kafka_servers = conf.get('scraper.kafka.servers')
        self.kafka = KafkaListing(kafka_servers)
        self.producer = self.kafka.producer()
        redis_conf = conf.get('scraper.redis')
        self.visited_pages = redis.StrictRedis(host=redis_conf['host'], port=redis_conf['port'],
                                               db=redis_conf['db'])

        job_conf = conf.get('scraper.job')
        self.timeout = int(job_conf['timeout'])
        self.job_name = job_conf['name']
        self.mode = job_conf['mode']
        print('starting job %s' % job_conf)
コード例 #38
0
def load_config(config_file, fallback_config_files):
    """
    Load configuration from a HOCON configuration file, with an optional fallback chain

    @param config_file:           the primary configuration file
    @param fallback_config_files: an optional list of fallback configuration files

    @rtype:                       ConfigTree
    @return:                      configuration
    """

    config = ConfigFactory.parse_file(config_file)

    if fallback_config_files:
        for fallback_config_file in fallback_config_files:
            if isfile(fallback_config_file):
                config = config.with_fallback(fallback_config_file)
            else:
                print 'Warn: "%s" not found or not a file' % fallback_config_file

    return config
コード例 #39
0
ファイル: converter.py プロジェクト: chunyang-wen/pyhocon
    def convert_from_file(cls, input_file=None, output_file=None, output_format='json', indent=2, compact=False):
        """Convert to json, properties or yaml

        :param input_file: input file, if not specified stdin
        :param output_file: output file, if not specified stdout
        :param output_format: json, properties or yaml
        :return: json, properties or yaml string representation
        """

        if input_file is None:
            content = sys.stdin.read()
            config = ConfigFactory.parse_string(content)
        else:
            config = ConfigFactory.parse_file(input_file)

        res = cls.convert(config, output_format, indent, compact)
        if output_file is None:
            print(res)
        else:
            with open(output_file, "w") as fd:
                fd.write(res)
コード例 #40
0
            print(directory_name + " was not a directory. Skipping.")
            continue

        directory_path = os.path.join(opts.template_directory, directory_name)
        template_path = os.path.join(directory_path, "template.json")
        config_path = os.path.join(directory_path, "template.conf")
        if not os.path.isfile(template_path):
            print("%s is not a template directory (template file is missing)" % directory_path)
            continue

        if not os.path.isfile(config_path):
            print("%s is not a template directory (config file is missing)" % directory_path)
            continue

        template = open(template_path).read()
        config = ConfigFactory.parse_file(config_path)
        if "parameters" not in config:
            config["parameters"] = {}

        template_variables = get_template_variables(template)
        complete_parameters = update_parameters(config["parameters"], template_variables,
                                                template_path, opts.parameter_type)
        parameters_changed = False
        if config["parameters"] != complete_parameters:
            parameters_changed = True
            config["parameters"] = complete_parameters

        template_without_dashes, parameters_without_dashes = convert_dashes_to_underscores(template,
                                                                                           config["parameters"],
                                                                                           template_path)
コード例 #41
0
 def test_include_dict_from_samples(self):
     config = ConfigFactory.parse_file("samples/animals.conf")
     assert config.get('cat.garfield.say') == 'meow'
     assert config.get('dog.mutt.hates.garfield.say') == 'meow'
コード例 #42
0
ファイル: config.py プロジェクト: magyarchan/lulzbot
 def __init__(self, config = "development.conf"):
     self.conf = ConfigFactory.parse_file(config)
コード例 #43
0
ファイル: helpers.py プロジェクト: pchandar/irtk
def get_test_config():
    return ConfigFactory.parse_file(pkg_resources.resource_filename('tests', 'test_data/test.conf'))
コード例 #44
0
ファイル: cfg.py プロジェクト: nathanielksmith/prosaic
def read_config(cfgpath):
    return ConfigFactory.parse_file(cfgpath)
コード例 #45
0
ファイル: log.py プロジェクト: siddk/lang2program
 def from_file(cls, path):
     config_tree = ConfigFactory.parse_file(path)
     return cls(config_tree)
コード例 #46
0
 def __init__(self, defaults, config_path):
     file_config = ConfigFactory.parse_file(config_path)
     default_config = ConfigFactory.parse_string(defaults)
     self._config = file_config.with_fallback(default_config)
コード例 #47
0
            f.write("    nGramGranularity = 5,\n")
            f.write("    addNGramFeatures = true,\n")
            f.write("    inputFile    = \"file:///scratch/network/alexeys/bills/lexs/bills_combined_3_COILNJ_new.json\",\n")
            if cat != "":
                f.write("    outputFile   = \"/user/alexeys/valid_pairs_\""+cat+",\n")
                f.write("    outputParquetFile = \"/user/alexeys/bills_combined_\""+cat+",\n")
            else:
                f.write("    outputFile   = \"/user/alexeys/valid_pairs\",\n")
                f.write("    outputParquetFile = \"/user/alexeys/bills_combined\",\n")
            f.write("    nPartitions  = 40,\n")
            f.write("    numTextFeatures = 1048576,\n")
            f.write("    kval = 40,\n")
            f.write("    numConcepts = 1500\n")
            f.write("}\n")
    for cat in strictCats:
        createOneConfig(conf,cat)


if __name__=='__main__':
    base_config_path = 'src/main/resources/workflow1_makeCartesian.conf'
    conf = ConfigFactory.parse_file(base_config_path)

    nFolders = 14
    nPartitions = 120
    nCPartitions = 3000
    measureName = "maxasymjaccard"
    inputParquetFile = sys.argv[1] #/user/alexeys/bills_combined
    inputPairsFile = sys.argv[2] #/user/alexeys/valid_pairs
    outputMainFile = sys.argv[3] #/user/alexeys/output_sample
    createConfigsA(conf, nPartitions, nCPartitions, measureName, inputParquetFile, inputPairsFile, outputMainFile, nFolders)
コード例 #48
0
ファイル: config.py プロジェクト: ariloulaleelay/wrflow
 def __init__(self, default_path, config_path):
     default_config = ConfigFactory.parse_file(default_path)
     file_config = ConfigFactory.parse_file(config_path)
     self._config = file_config.with_fallback(default_config)
コード例 #49
0
def prepareAndImportConf(options):
    logging.info('Parsing base config ...')

    conf = ConfigFactory.parse_file(DEFAULT_BASE_CONF_NAME)

    logging.info('Parsing base config ... Successful')

    logging.info('Assigning parameters ...')

    name = options.env
    region = options.region
    subscriptionId = options.subId
    tenantId = options.tenantId
    clientId = options.clientId
    clientSecret = options.clientSecret

    username = options.username
    keyFileName = DEFAULT_BASE_DIR + "/" + username + "/" + options.keyFileName
    generateKeyToFile(keyFileName, username)

    networkSecurityGroupResourceGroup = options.networkSecurityGroupResourceGroup
    networkSecurityGroup = options.networkSecurityGroup
    virtualNetworkResourceGroup = options.virtualNetworkResourceGroup
    virtualNetwork = options.virtualNetwork
    subnetName = options.subnetName
    computeResourceGroup = options.computeResourceGroup
    hostFqdnSuffix = options.hostFqdnSuffix

    dbHostOrIP = options.dbHostOrIP
    dbUsername = options.dbUsername
    dbPassword = options.dbPassword

    masterType = options.masterType.upper()
    workerType = options.workerType.upper()
    edgeType = options.edgeType.upper()

    dirUsername = options.dirUsername
    dirPassword = options.dirPassword

    logging.info('Assigning parameters ... Successful')

    logging.info('Modifying config ...')

    conf.put('name', name)
    conf.put('provider.region', region)
    conf.put('provider.subscriptionId', subscriptionId)
    conf.put('provider.tenantId', tenantId)
    conf.put('provider.clientId', clientId)
    conf.put('provider.clientSecret', clientSecret)

    conf.put('ssh.username', username)
    conf.put('ssh.privateKey', keyFileName)

    setInstanceParameters(conf, 'instances.master', masterType, networkSecurityGroupResourceGroup,
                          networkSecurityGroup,
                          virtualNetworkResourceGroup, virtualNetwork, subnetName,
                          computeResourceGroup, hostFqdnSuffix)
    setInstanceParameters(conf, 'instances.worker', workerType, networkSecurityGroupResourceGroup,
                          networkSecurityGroup,
                          virtualNetworkResourceGroup, virtualNetwork, subnetName,
                          computeResourceGroup, hostFqdnSuffix)
    setInstanceParameters(conf, 'instances.edge', edgeType, networkSecurityGroupResourceGroup,
                          networkSecurityGroup,
                          virtualNetworkResourceGroup, virtualNetwork, subnetName,
                          computeResourceGroup, hostFqdnSuffix)
    setInstanceParameters(conf, 'cloudera-manager.instance', edgeType,
                          networkSecurityGroupResourceGroup, networkSecurityGroup,
                          virtualNetworkResourceGroup, virtualNetwork, subnetName,
                          computeResourceGroup, hostFqdnSuffix)
    setInstanceParameters(conf, 'cluster.masters.instance', masterType,
                          networkSecurityGroupResourceGroup, networkSecurityGroup,
                          virtualNetworkResourceGroup, virtualNetwork, subnetName,
                          computeResourceGroup, hostFqdnSuffix)
    setInstanceParameters(conf, 'cluster.workers.instance', masterType,
                          networkSecurityGroupResourceGroup, networkSecurityGroup,
                          virtualNetworkResourceGroup, virtualNetwork, subnetName,
                          computeResourceGroup, hostFqdnSuffix)

    conf.put('databaseServers.mysqlprod1.host', dbHostOrIP)
    conf.put('databaseServers.mysqlprod1.user', dbUsername)
    conf.put('databaseServers.mysqlprod1.password', dbPassword)

    logging.info('Modifying config ... Successful')

    confLocation = DEFAULT_BASE_DIR + "/" + username + "/" + DEFAULT_CONF_NAME

    logging.info('Writing modified config to %s ...' % confLocation)

    with open(confLocation, "w") as text_file:
        text_file.write(tool.HOCONConverter.to_hocon(conf))

    logging.info('Writing modified config to %s ... Successful' % confLocation)

    logging.info('Importing config to Cloudera Director server ...')

    command = "python setup-default.py --admin-username '%s' --admin-password '%s' '%s'" % (
        dirUsername, dirPassword, confLocation)
    execAndLog(command)

    logging.info('Importing config to Cloudera Director server ... Successful')
コード例 #50
0
def parse_config():
    print("Parsing config file")
    return ConfigFactory.parse_file('../conf/application.conf')
コード例 #51
0
ファイル: config.py プロジェクト: kostyaev/ann-search
from pyhocon import ConfigFactory

conf = ConfigFactory.parse_file("application.conf")

index_dir = conf.get("index_dir")
コード例 #52
0
ファイル: start-controller.py プロジェクト: chlin501/psrs
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from pyhocon import ConfigFactory
import argparse
import os
import subprocess

def get_args():
  parser = argparse.ArgumentParser(description='Find controller.')   
  parser.add_argument("-c" , "--conf", dest="conf_path", help="get controller from the config file")
  return parser.parse_args()

def start(host, port, log_dir):
  if not os.path.exists(log_dir):
    os.makedirs(log_dir)  
  cmd = 'nohup sbt "runMain psrs.Controller --host {0} --port {1}" > {2}/controller_{0}_{1}.log 2>&1 &'.format(host, port, log_dir)
  subprocess.Popen(cmd, shell=True) 

if __name__ == '__main__':
  args = get_args()
  if args.conf_path is not None:
    conf = ConfigFactory.parse_file(args.conf_path)
    host = conf['akka.remote.netty.tcp.hostname'] 
    port = conf['akka.remote.netty.tcp.port'] 
    log_dir = conf['psrs.log-dir'] 
    start(host, port, log_dir)
  else:
    print "error: application.conf is not supplied!"
コード例 #53
0
ファイル: app.py プロジェクト: datagrok/tildetown-scripts
def post_helpdesk():
    desc = request.form['desc']
    captcha = request.form['hmm']

    if captcha == 'scrop':
        status = "success" if send_email(request.form) else "fail"
    else:
        status = "fail"

    # should we bother restoring other fields beside desc?
    return redirect('/helpdesk?status={}&desc={}'.format(status, desc))

if __name__ == '__main__':
    app.config['DEBUG'] = True
    # tension between this and cfg function...

    conf = ConfigFactory.parse_file('cfg.conf')

    logfile = conf.get('logfile', '/tmp/cgi.log')
    logging.basicConfig(filename=logfile, level=logging.DEBUG)

    app.config['DATA_DIR'] = conf['guestbook_dir']
    app.config['TRELLO_EMAIL'] = conf['trello_email']
    app.config['MAILGUN_URL'] = conf['mailgun_url']
    app.config['MAILGUN_KEY'] = conf['mailgun_key']

    logging.debug("Running with data_dir=", app.config['DATA_DIR'])
    logging.debug(app.config)

    app.run()
コード例 #54
0
ファイル: custom_settings.py プロジェクト: fsat/conductr-cli
def load_from_file(args):
    custom_settings_file = vars(args).get('custom_settings_file')
    if custom_settings_file and os.path.exists(custom_settings_file):
        return ConfigFactory.parse_file(custom_settings_file)
    else:
        return None
コード例 #55
0
    conn.request("GET", url + "/api/search/usage?notUsedSince=" + str(date) + "&repos=" + repo, headers=headers)

    res = conn.getresponse()

    data = res.read()

    json_object = json.loads(data)
    
    if 'results' in json_object:    
        for value in json_object["results"]:
            if not_exclude(value["uri"], exclude):
                updated = value["uri"].replace("api/storage/", "")
                substr = "/artifactory/"
                pos = updated.index(substr)
                path = updated[pos:len(updated)]
                print path
                clean_path(conn, path, headers)


config = ConfigFactory.parse_file("cleaner.conf")

artifactory_url = urlparse(config.get("artifactory.url"))
port = artifactory_url.port if artifactory_url.port else 80 if artifactory_url.scheme == 'http' else 443

conn = httplib.HTTPConnection(artifactory_url.hostname, port) if artifactory_url.scheme == 'http' else httplib.HTTPSConnection(artifactory_url.hostname, port)

user_pass = b64encode(config.get("artifactory.auth")).decode("ascii")
headers = { 'Authorization' : 'Basic %s' %  user_pass }

for value in config.get("repos"):
    outdated(conn, artifactory_url.path, headers, value.get('name'), value.get_int('interval'), config.get("exclude"))
コード例 #56
0
from lib import helpers

helpers.printCurrentTime("start ./amazon_statistics.py")

import logging
logging.basicConfig(
    format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO)

# CONFIG

from pyhocon import ConfigFactory

config = ConfigFactory.parse_file('./application.conf')
amazon_dump_dir = config.get_string('amazon-dump.dir')
movie_reviews_file = config.get_string('amazon-dump.files.reviews.movies')

text_min_len = config.get_int('amazon-dump.statistics.review.text.min-len')
statistic_measures_cache_dir = config.get_string(
    'amazon-dump.statistics.measures.cache-dir')
reviews_count_file = config.get_string(
    'amazon-dump.statistics.measures.reviews-count.cache-file')
number_of_reviews_by_asin_file = config.get_string(
    'amazon-dump.statistics.measures.number-of-reviews-by-asin.cache-file')
number_of_reviews_by_person_file = config.get_string(
    'amazon-dump.statistics.measures.number-of-reviews-by-person.cache-file')
bow_by_asin_file = config.get_string(
    'amazon-dump.statistics.measures.bow-by-asin.cache-file')

import pickle
import os
コード例 #57
0
#! /usr/bin/env python

# Legacy support for sentences table in DeepDive.
# The script reads the table from the database and stores it in the new column format.

from pyhocon import ConfigFactory
import json
import psycopg2
import psycopg2.extras
import sys
import pipe

conf = ConfigFactory.parse_file("../view.conf")

docs = conf.get("view.docs")


def find_token_offsets(s):
    # split on whitespace
    pos = [-1] + [i for i, ltr in enumerate(s) if ltr == " "] + [len(s)]
    offsets = [[pos[i] + 1, pos[i + 1]] for i in range(0, len(pos) - 1)]
    return offsets


def write_docs():
    # write extractions to json file
    dbconf = conf.get("view.db.default")
    conn_string = "host='%s' dbname='%s' user='******' password='******'" % (
        dbconf.get("host"),
        dbconf.get("dbname"),
        dbconf.get("user"),
コード例 #58
0
#!/usr/bin/env python

ES_HOST = {"host" : "localhost", "port" : 9200}
INDEX_NAME = 'view'
TYPE_ANNOTATORS_NAME = 'annotators'
TYPE_ANNOTATIONS_NAME = 'annotations'
N = 1000

from pyhocon import ConfigFactory
from elasticsearch import Elasticsearch
import json
import sys

conf = ConfigFactory.parse_file('../view.conf')

conf_annotations = conf.get_list('view.annotations')

es = Elasticsearch(hosts = [ES_HOST])

# create a small table that only contains the names of all available extractors
def index_annotators():
  es.delete_by_query(index = INDEX_NAME, doc_type = TYPE_ANNOTATORS_NAME, body = {
      "query": {
        "match_all": {}
      }
  })
  for ann in conf_annotations:
    es.index(index = INDEX_NAME, doc_type = TYPE_ANNOTATORS_NAME, body = {
      "name" : ann.get('name')
    }, refresh = False)
  es.indices.refresh(index = INDEX_NAME)
コード例 #59
0
ファイル: common.py プロジェクト: DataKind-BLR/ichangemycity
import pymongo
from pyhocon import ConfigFactory
from math import sin, cos, sqrt, atan2, radians
from text import TextUtils
from datetime import datetime

# load the configuration
conf = ConfigFactory.parse_file("conf/app.conf")

def mongoClient():
	# TODO make singleton / add other properties
	return  pymongo.MongoClient()	

class Properties(object):
	# fields used for computing distances
	textFields = set(conf["data.textFields"])
	# fields derived from textFields
	tokenizedTextFields = set(map(lambda _ : "tokenized_" + _, conf["data.textFields"]))

class LatLong(object):
	"""
	Represents a (latitude, longitude) tuple.
	"""
	@staticmethod
	def distance(l1, l2):
		# Source: http://stackoverflow.com/questions/4913349/haversine-formula-in-python-bearing-and-distance-between-two-gps-points
		# Radius of earth in kilometers
		R = 6373.0		 
		# convert to radians
		lat1 = radians(l1.latitude)
		lon1 = radians(l1.longitude)