Exemplo n.º 1
0
def prepare_data():
  """ Method to get configuration data from source files. 
      Outputs a dictionary of categories as keys and lists of config_options as values
  """

  # These variables should point to the configuration files
  su2_basedir = os.environ['SU2_HOME']
  config_cpp = os.path.join(su2_basedir,'Common/src/config_structure.cpp')
  config_hpp = os.path.join(su2_basedir,'Common/include/option_structure.hpp')

  # Check that files exist
  if not os.path.isfile(config_cpp):
    sys.exit('Could not find cpp file, please check that su2_basedir is set correctly in config_gui.py')
  if not os.path.isfile(config_hpp):
    sys.exit('Could not find hpp file, please check that su2_basedir is set correctly in config_gui.py')
 
  # Run the parser
  option_list = parse_config(config_cpp, config_hpp)

  # Organize data into dictionary with categories as keys
  option_data = {}
  for opt in option_list:
    if not opt.option_category in option_data:
      option_data[opt.option_category] = []
    option_data[opt.option_category].append(opt)

  return option_data
Exemplo n.º 2
0
def main(config_file="config.yml"):
    """
    Main function that loads config, sets up logging, and runs evaluation

    Args:
        config_file (str): path to config file (for logging)

    Returns:
        None
    """
    logger.info("Evaluating")
    # load config
    logger.info(f"Loading config {config_file}")
    config = parse_config(config_file)
    logger.info(f"Config: \n{config}")
    # log experiment
    params = {"param0": np.random.rand()}
    metrics = {"metric0": np.random.rand()}
    artifacts = {"config_file": config_file}
    log_experiment(
        params,
        metrics,
        artifacts,
        config["experiment_name"],
        os.environ["MLFLOW_TRACKING_URI"],
        os.environ["MLFLOW_ARTIFACT_LOCATION"],
    )
Exemplo n.º 3
0
def prepare_data():
    """ Method to get configuration data from source files. 
      Outputs a dictionary of categories as keys and lists of config_options as values
  """

    # These variables should point to the configuration files
    su2_basedir = os.environ['SU2_HOME']
    config_cpp = os.path.join(su2_basedir, 'Common/src/config_structure.cpp')
    config_hpp = os.path.join(su2_basedir,
                              'Common/include/option_structure.hpp')

    # Check that files exist
    if not os.path.isfile(config_cpp):
        sys.exit(
            'Could not find cpp file, please check that su2_basedir is set correctly in config_gui.py'
        )
    if not os.path.isfile(config_hpp):
        sys.exit(
            'Could not find hpp file, please check that su2_basedir is set correctly in config_gui.py'
        )

    # Run the parser
    option_list = parse_config(config_cpp, config_hpp)

    # Organize data into dictionary with categories as keys
    option_data = {}
    for opt in option_list:
        if not opt.option_category in option_data:
            option_data[opt.option_category] = []
        option_data[opt.option_category].append(opt)

    return option_data
Exemplo n.º 4
0
def main(config_file="config.yml"):
    """
    Main function that loads config, sets up logging, and runs training

    Args:
        config_file (str): path to config file (for logging)

    Returns:
        None
    """
    logger.info("Training")
    logger.info(f"Loading config {config_file}")
    config = parse_config(config_file)
    logger.info(f"Config: \n{config}")
Exemplo n.º 5
0
def etl(config_file):
    """
    ETL function that loads raw data, converts to df, and writes to processed dir.
    Args:
        config_file (str): path to config file
    Returns:
        None
    """
    # Parse relevant data from config file
    config = parse_config(config_file)
    raw_data_dir = Path(config["global"]["raw_data_dir"])
    processed_data_dir = Path(config["global"]["processed_data_dir"])

    # Load dataset and convert to pandas dataframe
    logger.info(f"Extract data from CA housing dataset in {raw_data_dir}.")
    #data = datasets.fetch_california_housing(data_home=raw_data_dir)
    #y = pd.DataFrame(data.target, columns=["MedianHouseValue"])
    #X = pd.DataFrame(data.data, columns=data.feature_names)

    # Save as parquet file in processed dir
    logger.info(f"Write processed data to parquet file in {processed_data_dir}.")
    processed_data_dir.mkdir(parents=True, exist_ok=True)
Exemplo n.º 6
0
    print('Current level of nice:', nice_level)
    nice_increment = 19 - nice_level
    nice_level = os.nice(nice_increment)
    print('Current level of nice:', nice_level)

    tile = "DES0453-4457"
    tile = "DES2327-5248"
    tile = "DES2359+0043"
    # File where you want the input coords saved
    coord_file = tile + '_' + "test_input.txt"
    rcore = "5"

    # Folder where you want the output cats saved
    outdir = ""
    config_file = 'wise2des.cfg'
    cfg = parse_config(config_file=config_file, debug=True)
    release = cfg.get("des", "release")
    datapath = cfg.get("des", "datapath")

    tile = None
    single = False
    args = parse_args(version=None)

    checkplots = args.checkplots
    saveplots = args.saveplots
    nworkers = args.nworkers
    nskip = args.skip
    modulo = args.modulo
    overwrite = args.overwrite
    remainder = args.remainder
    remainder = min(remainder, modulo - 1)
Exemplo n.º 7
0
from TOSSIM import *
from  parse_config import *
import sys

small_nodes = 3
small_t, small_r = parse_config("small_linkgain.out", small_nodes, "small.out")
small_t.addChannel("SimpleMessageC", sys.stdout)
#small_t.addChannel("Receive", sys.stdout)
for i in xrange(0, small_nodes):
    n = small_t.getNode(i)
    n.bootAtTime(1002 * i + 1);
for i in xrange(0, 1000):
    small_t.runNextEvent()

Exemplo n.º 8
0
def container(pytestconfig):
    settings = parse_config(pytestconfig.option.ini_file)
    container = create_container(settings)
    container.configure()
    return container