Beispiel #1
0
 def test_mdao_config_artifacts(self):
     os.chdir(_this_dir)
     run_mdao.run('mdao_config_artifacts.json')
     with open('output.csv') as output:
         csvreader = iter(csv.reader(output))
         headers = next(csvreader)
         for row in csvreader:
             guid = row[headers.index('GUID')]
             self.assertTrue(os.path.isfile(os.path.join('artifacts', guid, 'bin.out')))
Beispiel #2
0
 def test_mdao_config_file_testbench(self):
     os.chdir(_this_dir)
     run_mdao.run('mdao_config_file_testbench.json')
     with open('output.csv') as output:
         csvreader = iter(csv.reader(output))
         headers = next(csvreader)
         for row in csvreader:
             checksum_file = row[headers.index('checksum')]
             checksum_expected = str(int(hashlib.md5('sample data').hexdigest(), 16))
             self.assertEqual(checksum_file, checksum_expected)
Beispiel #3
0
 def test_run_csv(self):
     import csv
     with open(os.path.join(_this_dir, 'run_csv.csv'), 'rU') as csv_input_file:
         with open(os.path.join(_this_dir, 'run_csv_input.csv'), 'wb') as csv_desvar_file:
             writer = csv.writer(csv_desvar_file)
             reader = csv.reader(csv_input_file)
             for row in reader:
                 writer.writerow(row[0:2])
     with run_regression(os.path.join(_this_dir, 'run_csv.csv')):
         driver = run_mdao.drivers.CsvDriver(_this_dir, 'run_csv_input.csv')
         run_mdao.run('mdao_config_basic_CyPhy.json', override_driver=driver)
Beispiel #4
0
def test_main():
    tb_json_names = glob.glob(os.path.join(_this_dir, '*/testbench_manifest.json'))
    tb_jsons = [(tb_json_name, open(tb_json_name, 'r').read()) for tb_json_name in tb_json_names]
    try:
        for input_filename in glob.glob(_this_dir + '/mdao_config*json'):
            output_filename = input_filename + '.output.csv'
            with run_regression(output_filename):
                run_mdao.run(input_filename)

        with run_regression(os.path.join(_this_dir, 'single_run.csv')):
            run_mdao.run_one('mdao_config_constant.json', (('designVariable.Naca_Code', 4040), ))

        with run_regression(os.path.join(_this_dir, 'run_failure.csv')):
            run_mdao.run_one('mdao_config_basic_CyPhy.json', (('designVariable.y', 0), ('designVariable.x', 'Ia')))

    finally:
        for tb_json_name, contents in tb_jsons:
            with open(tb_json_name, 'w') as tb_json:
                tb_json.write(contents)
Beispiel #5
0
 def test_run_csv(self):
     import csv
     import io
     with io.open(os.path.join(_this_dir, 'run_csv.csv'),
                  'r',
                  encoding='utf8',
                  newline='') as csv_input_file:
         mode = 'w'
         if six.PY2:
             mode += 'b'
             open_kwargs = {}
         else:
             open_kwargs = {'newline': ''}
         with io.open(os.path.join(_this_dir, 'run_csv_input.csv'), mode,
                      **open_kwargs) as csv_desvar_file:
             writer = csv.writer(csv_desvar_file)
             reader = csv.reader(csv_input_file)
             for row in reader:
                 writer.writerow(map(type(''), row[1:3]))
     with run_regression(os.path.join(_this_dir, 'run_csv.csv')):
         driver = run_mdao.drivers.CsvDriver(_this_dir, 'run_csv_input.csv')
         run_mdao.run('mdao_config_basic_CyPhy.json',
                      override_driver=driver)
Beispiel #6
0
 def _test_mdao_config(self, input_filename, output_filename):
     with run_regression(output_filename):
         run_mdao.run(input_filename)
Beispiel #7
0
                    for name, parameter in six.iteritems(
                        component.get('parameters', {}))
                }, unknowns, resids)
            if component.get('type',
                             'TestBenchComponent') == 'TestBenchComponent':
                if mdao_component.ret_code != 0:
                    raise Exception('Component execution failed')

            def serialize(val):
                if isinstance(val, numpy.ndarray):
                    return val.tolist()
                return val

            sys.stdout.write(
                json.dumps({
                    name: serialize(val)
                    for name, val in six.iteritems(unknowns)
                }))

    else:
        run_kwargs = {}
        if args.desvar_input:
            original_dir = os.path.dirname(os.path.abspath(args.filename))
            run_kwargs['override_driver'] = CsvDriver(original_dir,
                                                      args.desvar_input)

        run_kwargs['append_csv'] = args.append_csv
        run_kwargs['profile'] = args.profile

        run_mdao.run(args.filename, **run_kwargs)
Beispiel #8
0
    os.chdir(os.path.dirname(input_filename))
    mdao_config = json.load(open(input_filename))
    zbuff = StringIO.StringIO()
    with zipfile.ZipFile(zbuff, 'w') as zf:
        # this might be too fancy, we can zip everything in 'root_dir'
        # FIXME will not work for FMU components
        for component in six.itervalues(mdao_config['components']):
            try:
                component_dir = component['details']['directory']
            except KeyError:
                continue
            for root, dirs, files in os.walk(component_dir):
                for file in files:
                    zf.write(os.path.join(root, file))
        zf.write(input_filename, 'mdao_config.json')
    zipkey = 'run_mdao_config_' + hashlib.sha224(zbuff.getvalue()).hexdigest()
    redis_conn.set(zipkey, zbuff.getvalue())
    import run_mdao.drivers
    import run_mdao.celery_tasks

    def _run_one(self, problem, run):
        task = run_mdao.celery_tasks.run_one.delay(zipkey, run)
        task.get(timeout=TASK_TIMEOUT_S)

        self.iter_count += 1
        if self.use_restart:
            self.restart.record_iteration()

    run_mdao.drivers.PredeterminedRunsDriver.run_one = _run_one
    run_mdao.run(input_filename)
Beispiel #9
0
    parser.add_argument('filename', nargs='?', default='mdao_config.json')
    parser.add_argument('--one-component', help='component name')

    args = parser.parse_args()

    if args.one_component:
        filename = args.filename
        with open(filename, 'r') as mdao_config_json:
            mdao_config = json.load(mdao_config_json)
        component = mdao_config['components'][args.one_component]
        if component.get('type') == 'IndepVarComp':
            raise Exception('Doesnt make sense to run IndepVarComp')
        with run_mdao.with_problem(mdao_config, os.path.dirname(os.path.abspath(filename))) as top:
            # print repr({name: parameter['value'] for name, parameter in six.iteritems(component.get('parameters', {}))})
            mdao_component = {c.name: c for c in top.root.components()}[args.one_component]
            unknowns = {}
            resids = {}
            mdao_component.solve_nonlinear({run_mdao._get_param_name(name, component.get('type')): parameter['value'] for name, parameter in six.iteritems(component.get('parameters', {}))}, unknowns, resids)
            if component.get('type', 'TestBenchComponent') == 'TestBenchComponent':
                if mdao_component.ret_code != 0:
                    raise Exception('Component execution failed')

            def serialize(val):
                if isinstance(val, numpy.ndarray):
                    return val.tolist()
                return val
            sys.stdout.write(json.dumps({name: serialize(val) for name, val in six.iteritems(unknowns)}))

    else:
        run_mdao.run(args.filename)
Beispiel #10
0
    def _testTestBench(self, context, master, config):
        result = master.RunInTransactionWithConfigLight(config)[0]
        if not result.Success:
            self.fail(result.Exception)
        print 'Output directory is {}'.format(result.OutputDirectory)

        project = context.Project
        project.BeginTransactionInNewTerr()
        try:
            kind = context.Meta.Name
        finally:
            project.AbortTransaction()

        if kind == 'ParametricExploration':
            import run_mdao
            import openmdao.api
            originalDir = os.getcwd()
            test = self

            class ConstraintCheckingRecorder(openmdao.api.BaseRecorder):
                def record_metadata(self, group):
                    pass

                def record_derivatives(self, derivs, metadata):
                    pass

                def record_iteration(self, *args, **kwargs):
                    test._checkParametricExplorationMetrics(context, self.root)

                def close(self):
                    pass

                def startup(self, root):
                    super(ConstraintCheckingRecorder, self).startup(root)
                    self.root = root

            os.chdir(result.OutputDirectory)
            try:
                mdao_top = run_mdao.run(
                    'mdao_config.json',
                    additional_recorders=[ConstraintCheckingRecorder()])
            finally:
                os.chdir(originalDir)
            self._checkParametricExplorationMetrics(context, mdao_top.root)
        else:
            try:
                subprocess.check_call(
                    (os.path.join(meta_path,
                                  r'bin\Python27\Scripts\python.exe'), '-m',
                     'testbenchexecutor', '--detailed-errors',
                     'testbench_manifest.json'),
                    cwd=result.OutputDirectory)
            except:
                failed_txt = os.path.join(result.OutputDirectory,
                                          '_FAILED.txt')
                if os.path.isfile(failed_txt):
                    print(open(failed_txt, 'r').read())
                raise

            with open(
                    os.path.join(result.OutputDirectory,
                                 'testbench_manifest.json')) as manifest_file:
                manifest = json.load(manifest_file)

            self.assertEqual(manifest['Status'], 'OK')
            self._checkTestBenchMetrics(context, manifest,
                                        result.OutputDirectory)
Beispiel #11
0
    os.chdir(os.path.dirname(input_filename))
    mdao_config = json.load(open(input_filename))
    zbuff = StringIO.StringIO()
    with zipfile.ZipFile(zbuff, 'w') as zf:
        # this might be too fancy, we can zip everything in 'root_dir'
        # FIXME will not work for FMU components
        for component in six.itervalues(mdao_config['components']):
            try:
                component_dir = component['details']['directory']
            except KeyError:
                continue
            for root, dirs, files in os.walk(component_dir):
                for file in files:
                    zf.write(os.path.join(root, file))
        zf.write(input_filename, 'mdao_config.json')
    zipkey = 'run_mdao_config_' + hashlib.sha224(zbuff.getvalue()).hexdigest()
    redis_conn.set(zipkey, zbuff.getvalue())
    import run_mdao.drivers
    import run_mdao.celery_tasks

    def _run_one(self, problem, run):
        task = run_mdao.celery_tasks.run_one.delay(zipkey, run)
        task.get(timeout=TASK_TIMEOUT_S)

        self.iter_count += 1
        if self.use_restart:
            self.restart.record_iteration()

    run_mdao.drivers.PredeterminedRunsDriver.run_one = _run_one
    run_mdao.run(input_filename)