def feat_import2(): """ NDP load """ data = { 'lib1.mcdplib/model1.mcdp': "mcdp {}", 'lib2.mcdplib/model2.mcdp': "`lib1.model1", 'lib2.mcdplib/model3.mcdp': """\ mcdp { a = instance `lib1.model1 } """ } d = create_hierarchy(data) librarian = Librarian() librarian.find_libraries(d) lib1 = librarian.load_library('lib1') _model1 = lib1.load_ndp('model1', context=Context()) lib2 = librarian.load_library('lib2') context = lib1._generate_context_with_hooks() _model2 = lib2.load_ndp('model2', context) _model3 = lib2.load_ndp('model3', context)
def go(): librarian = Librarian() librarian.find_libraries('../..') library = librarian.load_library('droneD_complete_templates') library.use_cache_dir('_cached/drone_unc2') context = library._generate_context_with_hooks() res = {} res['intervals'] = [0, 0.01, 0.1, 1.0, 5.0, 10.0, 50, 100.0, 250, 500, 1000] res['results'] = [] for i, interval_mw in enumerate(res['intervals']): s = get_ndp_code(interval_mw=interval_mw) ndp = parse_ndp(s, context=context) basename = ('drone_unc2_%02d_%s_mw' % (i, interval_mw)).replace('.', '_') fn = os.path.join('generated', 'drone_unc2', basename + '.mcdp') dn = os.path.dirname(fn) if not os.path.exists(dn): os.makedirs(dn) with open(fn, 'w') as f: f.write(s) print('Generated %s' % fn) result = solve_stats(ndp) result['ndp'] = ndp res['results'].append(result) return res
def feat_import5(): """ Warnings in imports. """ data = { 'lib1.mcdplib/model0.mcdp': """ mcdp { provides f [Nat] f <= Nat: 2 } """, 'lib1.mcdplib/model1.mcdp': """ mcdp { a = instance `model0 } """ } d = create_hierarchy(data) librarian = Librarian() librarian.find_libraries(d) lib = librarian.load_library('lib1') context = lib._generate_context_with_hooks() _model2 = lib.load_ndp('model1', context) for w in context.warnings: print w.format_user() assert_equal(len(context.warnings), 1)
def go(): librarian = Librarian() librarian.find_libraries('../..') library = librarian.load_library('droneD_complete_templates') library.use_cache_dir('_cached/drone_unc2') context = library._generate_context_with_hooks() res = {} res['intervals'] = [ 0, 0.01, 0.1, 1.0, 5.0, 10.0, 50, 100.0, 250, 500, 1000 ] res['results'] = [] for i, interval_mw in enumerate(res['intervals']): s = get_ndp_code(interval_mw=interval_mw) ndp = parse_ndp(s, context=context) basename = ('drone_unc2_%02d_%s_mw' % (i, interval_mw)).replace( '.', '_') fn = os.path.join('generated', 'drone_unc2', basename + '.mcdp') dn = os.path.dirname(fn) if not os.path.exists(dn): os.makedirs(dn) with open(fn, 'w') as f: f.write(s) print('Generated %s' % fn) result = solve_stats(ndp) result['ndp'] = ndp res['results'].append(result) return res
def process(s): librarian = Librarian() librarian.find_libraries('../..') library = librarian.load_library('droneD_complete_templates') library.use_cache_dir('_cached/drone_unc1') ndp = library.parse_ndp(s) combinations = { "endurance": (np.linspace(1, 1.5, 10), "hour"), "extra_payload": (100, "g"), "num_missions": (1000, "[]"), "velocity": (1.0, "m/s"), 'extra_power': (0.5, 'W'), } result_like = dict(total_cost="USD", total_mass='kg') dataU = solve_combinations(ndp, combinations, result_like, upper=1, lower=None) dataL = solve_combinations(ndp, combinations, result_like, upper=None, lower=1) return dict(dataL=dataL, dataU=dataU)
def find_dependencies(config_dirs, maindir, seeds): """ returns res, with res['fd'] ~ FindDependencies """ librarian = Librarian() for e in config_dirs: librarian.find_libraries(e) default_library = librarian.get_library_by_dir(maindir) fd = FindDependencies(default_library) if seeds is None: # add all models for all libraries libnames = list(librarian.get_libraries()) seeds = [] for libname in libnames: library = librarian.load_library(libname) ndps = library.list_spec(SPEC_MODELS) for name in ndps: seeds.append('%s.%s' % (libname, name)) else: pass fd.search(seeds) res = {} res['fd'] = fd return res
def feat_import5(): """ Warnings in imports. """ data = { 'lib1.mcdplib/model0.mcdp': """ mcdp { provides f [Nat] f <= Nat: 2 } """, 'lib1.mcdplib/model1.mcdp': """ mcdp { a = instance `model0 } """ } d = create_hierarchy(data) librarian = Librarian() librarian.find_libraries(d) lib = librarian.load_library('lib1') context = lib._generate_context_with_hooks() _model2 = lib.load_ndp('model1', context) for w in context.warnings: print w.format_user() assert_equal(len(context.warnings), 1)
def find_dependencies(config_dirs, maindir, seeds): """ returns res, with res['fd'] ~ FindDependencies """ librarian = Librarian() for e in config_dirs: librarian.find_libraries(e) default_library = librarian.get_library_by_dir(maindir) fd = FindDependencies(default_library) if seeds is None: # add all models for all libraries libnames = list(librarian.get_libraries()) seeds = [] for libname in libnames: library = librarian.load_library(libname) ndps = library.list_ndps() for name in ndps: seeds.append('%s.%s' % (libname, name)) else: pass fd.search(seeds) res = {} res['fd'] = fd return res
def feat_import1(): """ Poset load """ data = { 'lib1.mcdplib/poset1.mcdp_poset': "product(mass:g, energy:J)", 'lib2.mcdplib/poset2.mcdp_poset': "`lib1.poset1" } d = create_hierarchy(data) librarian = Librarian() librarian.find_libraries(d) libraries = librarian.get_libraries() assert 'lib1' in libraries assert 'lib2' in libraries assert 'path' in libraries['lib1'] assert 'library' in libraries['lib1'] lib1 = librarian.load_library('lib1') context = lib1._generate_context_with_hooks() _poset1 = lib1.load_poset('poset1', context) lib2 = librarian.load_library('lib2') _poset2 = lib2.load_poset('poset2', context)
def feat_import1(): """ Poset load """ data = { 'lib1.mcdplib/poset1.mcdp_poset': "product(mass:g, energy:J)", 'lib2.mcdplib/poset2.mcdp_poset': "`lib1.poset1" } d = create_hierarchy(data) librarian = Librarian() librarian.find_libraries(d) libraries = librarian.get_libraries() assert 'lib1' in libraries assert 'lib2' in libraries assert 'path' in libraries['lib1'] assert 'library' in libraries['lib1'] lib1 = librarian.load_library('lib1') context = lib1._generate_context_with_hooks() _poset1 = lib1.load_poset('poset1', context) lib2 = librarian.load_library('lib2') _poset2 = lib2.load_poset('poset2', context)
def feat_import3(): """ NDP load """ data = { 'lib1.mcdplib/model0.mcdp': "mcdp {}", 'lib1.mcdplib/model1.mcdp': "`model0", 'lib2.mcdplib/model2.mcdp': "`lib1.model1", } d = create_hierarchy(data) librarian = Librarian() librarian.find_libraries(d) lib2 = librarian.load_library('lib2') context = lib2._generate_context_with_hooks() _model2 = lib2.load_ndp('model2', context)
def feat_import4(): data = { 'lib1.mcdplib/model0.mcdp': "mcdp {}", 'lib1.mcdplib/template1.mcdp_template': "template [] mcdp { a = instance `model0 }", 'lib2.mcdplib/template2.mcdp_template': "`lib1.template1", 'lib2.mcdplib/model2.mcdp': "specialize [] `template2", } d = create_hierarchy(data) librarian = Librarian() librarian.find_libraries(d) lib2 = librarian.load_library('lib2') context = lib2._generate_context_with_hooks() _model2 = lib2.load_ndp('model2', context)
def feat_import3(): """ NDP load """ data = { 'lib1.mcdplib/model0.mcdp': "mcdp {}", 'lib1.mcdplib/model1.mcdp': "`model0", 'lib2.mcdplib/model2.mcdp': "`lib1.model1", } d = create_hierarchy(data) librarian = Librarian() librarian.find_libraries(d) lib2 = librarian.load_library('lib2') context = lib2._generate_context_with_hooks() _model2 = lib2.load_ndp('model2', context)
def feat_import4(): data = { 'lib1.mcdplib/model0.mcdp': "mcdp {}", 'lib1.mcdplib/template1.mcdp_template': "template [] mcdp { a = instance `model0 }", 'lib2.mcdplib/template2.mcdp_template': "`lib1.template1", 'lib2.mcdplib/model2.mcdp': "specialize [] `template2", } d = create_hierarchy(data) librarian = Librarian() librarian.find_libraries(d) lib2 = librarian.load_library('lib2') context = lib2._generate_context_with_hooks() _model2 = lib2.load_ndp('model2', context)
def feat_import2(): """ NDP load """ data = { 'lib1.mcdplib/model1.mcdp': "mcdp {}", 'lib2.mcdplib/model2.mcdp': "`lib1.model1", 'lib2.mcdplib/model3.mcdp': """\ mcdp { a = new lib1.model1 } """ } d = create_hierarchy(data) librarian = Librarian() librarian.find_libraries(d) lib1 = librarian.load_library('lib1') _model1 = lib1.load_ndp('model1', context=Context()) lib2 = librarian.load_library('lib2') context = lib1._generate_context_with_hooks() _model2 = lib2.load_ndp('model2', context) _model3 = lib2.load_ndp('model3', context)
def go(): model_name = 'droneC' queries = [] def add(q): queries.append(q) n = 10 endurance = np.linspace(1, 20, n) payload = np.linspace(5, 50, n) for endurance, payload in zip(endurance, payload): q = { "num_missions": (1000, "[]"), "extra_power": (5, "W"), "extra_payload": (payload, "g"), "endurance": (endurance, "minutes"), } add(q) result_like = dict(total_cost="CHF", total_mass='kg') what_to_plot_res = result_like what_to_plot_fun = dict(extra_payload="g", endurance="minutes") librarian = Librarian() librarian.find_libraries('..') lib = librarian.load_library('droneC_cost_v1') ndp = lib.load_ndp(model_name) data = solve_queries(ndp, queries, result_like) r = Report() plot_all_directions(r, queries=data['queries'], results=data['results'], what_to_plot_res=what_to_plot_res, what_to_plot_fun=what_to_plot_fun) fn = 'out/droneC_c1.html' print('writing to %r' % fn) r.to_html(fn)
def process(s): librarian = Librarian() librarian.find_libraries('../..') library = librarian.load_library('droneD_complete_templates') library.use_cache_dir('_cached/drone_unc1') ndp = library.parse_ndp(s) combinations = { "endurance": (np.linspace(1, 1.5, 10), "hour"), "extra_payload": (100, "g"), "num_missions": ( 1000, "[]"), "velocity": (1.0, "m/s"), 'extra_power': (0.5, 'W'), } result_like = dict(total_cost="USD", total_mass='kg') dataU = solve_combinations(ndp, combinations, result_like, upper=1, lower=None) dataL = solve_combinations(ndp, combinations, result_like, upper=None, lower=1) return dict(dataL=dataL, dataU=dataU)
def go(algo): librarian = Librarian() librarian.find_libraries('../..') library = librarian.load_library('droneD_complete_templates') library.use_cache_dir('_cached/drone_unc3') context = library._generate_context_with_hooks() res = {} res['n'] = [ 1, 5, 10, 15, 25, 50, 61, 75, 92, 100, 125, 150, 160, 175, 182, 200, 300, 400, 500, 600, 1000, 1500 ] # res['n'] = [1, 5, 10, 15, 25, 50, 61, 100] # res['n'] = [3000] res['results'] = [] for n in res['n']: ndp = create_ndp(context) result = solve_stats(ndp=ndp, n=n, algo=algo) result['ndp'] = ndp res['results'].append(result) return res