def randomHourMinutes_calls(root_path=ROOT): calls = [] net_file = '%s/data/sumo/eich.net.xml' % root_path rou_file = '%s/data/sumo/eich.rou.xml' % root_path add_file = '%s/data/sumo/eich.add.xml' % root_path trip_file = '%s/temp/eich.trip.xml' % root_path out_file = '%s/temp/eich.out.xml' % root_path trip_generator = '%s/tools/trip/randomTrips.py' % SUMO_HOME args = dict_to_list({ 'python': trip_generator, '-e': SECONDS_IN_HOUR, '-n': net_file, '-o': trip_file, '-r': rou_file, }) calls.append(args) args = { '--net-file': net_file, '--route-files': rou_file, '--additional-files': add_file, '--begin': 0, '--end': SECONDS_IN_HOUR, '--time-to-teleport': 0, } calls.append(['sumo', '-W'] + dict_to_list(args)) return calls
def find_all_equilibria(self): """Attempt to find all nash equilibria for a game. Returns a list of dicts, keys are symbols, values are probabilities (numbers or symbols.""" result = [] action_shape = self.payoffs.shape[:-1] possible_actions = [ list(range(player_actions)) for player_actions in action_shape ] #print(possible_actions) # we should first eliminate dominated strategies. we will skip that step for now for acombo in iter_subset_combos(possible_actions): if is_pure(acombo): # print(acombo) profile = [[[player_action[0], 1]] for player_action in acombo] if not self.is_dominated(profile): profile_dict = [ list_to_dict(player_profile) for player_profile in profile ] result.append(profile_dict) else: #print("checking combo", acombo) combo_solutions = self._get_indifference_probs(acombo) for asol in combo_solutions: # print('asol', asol) #listy = [dict_to_list(psol) for psol in asol] #print('psol', psol) carnate = self._carnate_profile(asol) listy = [dict_to_list(elm) for elm in carnate] if not self.is_dominated(listy): result.append(asol) return result
def __str__(self): """ Converts the command to a canonical form close to that which was parsed in originally. """ # TODO Make strings map to bracketed expressions so that the str/parse relationship holds. expr = dict_to_list(self.args) expr[:0] = [self.head] return ' '.join(map(str, expr))
def altruism_calls(out_path=DEFAULT_OUT_PATH): calls = [] args = { '--model': '%s/models/Sample Models/Biology/Evolution/Altruism.nlogo' % NETLOGO_HOME, '--setup-file': '%s/data/altruism/experiment.xml' % ROOT, '--experiment': 'TestExperiment', '--table': out_path, } calls.append(['%s/netlogo-headless.sh' % NETLOGO_HOME] + dict_to_list(args)) calls.append(['cat', out_path]) return calls
def output_markdown(doc, stream): stream.write("# pfSense\n") stream.write("Version {}\n".format(doc.pfsense.version)) stream.write("\n") stream.write("## System\n") info = obj_to_dict(doc.pfsense.system, ('hostname', 'domain', 'timeservers', 'timezone', 'language', 'dnsserver')) output_markdown_table(stream, ('Option', 'Value'), info.items()) stream.write("\n") if hasattr_r(doc.pfsense, 'interfaces'): stream.write("## Interfaces\n") interfaces = sorted(doc.pfsense.interfaces.data.items(), key=lambda interface: interface[0]) interfaces = [ [interface_name] + dict_to_list(interface_data, ('enable', 'descr', 'if', 'ipaddr', 'subnet')) for interface_name, interface_data in interfaces ] output_markdown_table(stream, ('Name', 'Enabled', 'Description', 'Interface', 'Address', 'Subnet'), interfaces) stream.write("\n") if hasattr_r(doc.pfsense, 'vlans.vlan'): stream.write("## VLANs\n") vlans = [ obj_to_list(vlan, ('vlanif', 'tag', 'if', 'descr')) for vlan in doc.pfsense.vlans.vlan ] output_markdown_table(stream, ('Name', 'Tag', 'Interface', 'Description'), vlans) stream.write("\n") if hasattr_r(doc.pfsense, 'bridges.bridged'): stream.write("## Bridges\n") bridges = [ obj_to_list(bridge, ('bridgeif', 'members', 'descr')) for bridge in doc.pfsense.bridges.bridged ] output_markdown_table(stream, ('Name', 'Members', 'Description'), bridges) stream.write("\n") if hasattr_r(doc.pfsense, 'gateways.gateway_item'): stream.write("## Gateways\n") gateways = [ obj_to_list(gateway, ('defaultgw', 'name', 'interface', 'gateway', 'weight', 'ipprotocol', 'descr')) for gateway in doc.pfsense.gateways.gateway_item ] output_markdown_table(stream, ('Default', 'Name', 'Interface', 'Gateway', 'Weight', 'IP', 'Description'), gateways) stream.write("\n") if hasattr_r(doc.pfsense, 'staticroutes.route'): stream.write("## Static routes\n") routes = [ obj_to_list(route, ('network', 'gateway', 'descr')) for route in doc.pfsense.staticroutes.route ] output_markdown_table(stream, ('Network', 'Gateway', 'Description'), routes) stream.write("\n") if hasattr_r(doc.pfsense, 'dhcpd'): stream.write("## DHCP ranges\n") for dhcpd_interface_name in sorted(doc.pfsense.dhcpd.data.keys()): dhcpd_interface = PfSenseRuleInterface(parent=doc.pfsense.dhcpd) dhcpd_interface.string = dhcpd_interface_name stream.write("### DHCPd configuration for {}\n".format( format_markdown_cell(dhcpd_interface))) dhcpd = getattr(doc.pfsense.dhcpd, dhcpd_interface_name) dhcpd_dict = obj_to_dict( dhcpd, ('enable', 'defaultleasetime', 'maxleasetime')) output_markdown_table(stream, ('Option', 'Value'), dhcpd_dict.items()) stream.write("\n") if hasattr_r(dhcpd, 'range'): stream.write("#### Ranges\n") ranges = [ obj_to_list(range, ('from', 'to')) for range in dhcpd.range ] output_markdown_table(stream, ('From', 'To'), ranges) stream.write("\n") if hasattr_r(dhcpd, 'staticmap'): stream.write("#### Static mappings\n") staticmaps = [ obj_to_list(staticmap, ('mac', 'ipaddr', 'hostname')) for staticmap in dhcpd.staticmap ] output_markdown_table(stream, ('MAC', 'Address', 'Hostname'), staticmaps) stream.write("\n") stream.write("\n") if hasattr_r(doc.pfsense, 'aliases.alias'): stream.write("## Aliases\n") aliases = [ obj_to_list(alias, ('name', 'type', 'address', 'descr', 'detail')) for alias in doc.pfsense.aliases.alias ] output_markdown_table( stream, ('Name', 'Type', 'Address', 'Description', 'Detail'), aliases) stream.write("\n") if hasattr_r(doc.pfsense, 'nat.rule'): stream.write("## NAT rules\n") rules = [ obj_to_list(rule, ('disabled', 'interface', 'source', 'destination', 'protocol', 'target', 'local_port', 'descr')) for rule in doc.pfsense.nat.rule ] output_markdown_table( stream, ('Disabled', 'Interface', 'Source', 'Destination', 'Protocol', 'Target', 'Local port', 'Description'), rules) stream.write("\n") if hasattr_r(doc.pfsense, 'nat.outbound.rule'): stream.write("## Outbound NAT rules\n") rules = [ obj_to_list(rule, ('disabled', 'interface', 'source', 'destination', 'dstport', 'protocol', 'target', 'descr')) for rule in doc.pfsense.nat.outbound.rule ] output_markdown_table( stream, ('Disabled', 'Interface', 'Source', 'Destination', 'Destination port', 'Protocol', 'Target', 'Description'), rules) stream.write("\n") if hasattr_r(doc.pfsense, 'filter.rule'): stream.write("## Filter rules\n") rules = [ obj_to_list(rule, ('disabled', 'interface', 'type', 'ipprotocol', 'protocol', 'source', 'destination', 'descr')) for rule in doc.pfsense.filter.rule ] output_markdown_table( stream, ('Disabled', 'Interface', 'Type', 'IP', 'Protocol', 'Source', 'Destination', 'Description'), rules) stream.write("\n") if hasattr_r(doc.pfsense, 'dnsmasq'): stream.write("## DNSmasq configuration\n") dnsmasq = obj_to_dict(doc.pfsense.dnsmasq, ('enable', 'regdhcp', 'regdhcpstatic', 'strict_order', 'custom_options', 'interface')) output_markdown_table(stream, ('Option', 'Value'), dnsmasq.items()) stream.write("\n") if hasattr_r(doc.pfsense.dnsmasq, 'hosts'): stream.write("### Host overrides\n") hosts = [ obj_to_dict(host, ('host', 'domain', 'ip', 'descr', 'aliases')) for host in doc.pfsense.dnsmasq.hosts ] hostlists = [[host] + list( map( lambda item: (setattr(item, 'ip', host['ip']), setattr(item, 'descr', item.description), item.data)[-1], getattr(host['aliases'], 'item', []))) for host in hosts] hosts = [ dict_to_list(host, ('host', 'domain', 'ip', 'descr')) for hostlist in hostlists for host in hostlist ] output_markdown_table(stream, ('Host', 'Domain', 'IP', 'Description'), hosts) stream.write("\n") if hasattr_r(doc.pfsense.dnsmasq, 'domainoverrides'): stream.write("### Domain overrides\n") domains = [ obj_to_list(domain, ('domain', 'ip', 'descr')) for domain in doc.pfsense.dnsmasq.domainoverrides ] output_markdown_table(stream, ('Domain', 'IP', 'Description'), domains) stream.write("\n") if hasattr_r(doc.pfsense, 'openvpn.openvpn_server'): stream.write("## OpenVPN servers\n") openvpn_servers = [ obj_to_dict(openvpn_server, ('vpnid', 'mode', 'authmode', 'protocol', 'dev_mode', 'interface', 'ipaddr', 'local_port', 'crypto', 'digest', 'tunnel_network', 'remote_network', 'local_network', 'dynamic_ip', 'pool_enable', 'topology', 'description', 'custom_options')) for openvpn_server in doc.pfsense.openvpn.openvpn_server ] for openvpn_server in openvpn_servers: stream.write("### {}\n".format( format_markdown_cell(openvpn_server['description']))) output_markdown_table(stream, ('Option', 'Value'), openvpn_server.items()) stream.write("\n") if hasattr_r(doc.pfsense, 'openvpn.openvpn_client'): stream.write("## OpenVPN clients\n") openvpn_clients = [ obj_to_dict(openvpn_client, ('vpnid', 'auth_user', 'mode', 'protocol', 'dev_mode', 'interface', 'ipaddr', 'local_port', 'server_addr', 'server_port', 'crypto', 'digest', 'tunnel_network', 'remote_network', 'local_network', 'topology', 'description', 'custom_options')) for openvpn_client in doc.pfsense.openvpn.openvpn_client ] for openvpn_client in openvpn_clients: stream.write("### {}\n".format( format_markdown_cell(openvpn_client['description']))) output_markdown_table(stream, ('Option', 'Value'), openvpn_client.items()) stream.write("\n") if hasattr_r(doc.pfsense, 'openvpn.openvpn_csc'): stream.write("## OpenVPN client specific overrides\n") cscs = [ obj_to_list(csc, ('server_list', 'common_name', 'description', 'tunnel_network')) for csc in doc.pfsense.openvpn.openvpn_csc ] output_markdown_table( stream, ('VPN IDs', 'Common Name', 'Description', 'Tunnel Network'), cscs) stream.write("\n") if hasattr_r(doc.pfsense, 'syslog'): stream.write("## Syslog configuration\n") syslog = obj_to_dict( doc.pfsense.syslog, ('enable', 'logall', 'logfilesize', 'nentries', 'remoteserver', 'remoteserver2', 'remoteserver3', 'sourceip', 'ipproto')) output_markdown_table(stream, ('Option', 'Value'), syslog.items()) stream.write("\n") if hasattr_r(doc.pfsense, 'sysctl.item'): stream.write("## System tunables\n") tunables = [ obj_to_list(tunable, ('tunable', 'value', 'descr')) for tunable in doc.pfsense.sysctl.item ] output_markdown_table(stream, ('Name', 'Value', 'Description'), tunables) stream.write("\n")
def put(self): request.json['recaptcha_forms'] = util.dict_to_list(request.json['recaptcha_forms']) CONFIG_DB.populate(**request.json) CONFIG_DB.put() return make_empty_ok_response()
def put(self): request.json['recaptcha_forms'] = util.dict_to_list( request.json['recaptcha_forms']) CONFIG_DB.populate(**request.json) CONFIG_DB.put() return make_empty_ok_response()
async def patch_plano_trabalho( cod_plano: str, plano_trabalho: schemas.PlanoTrabalhoUpdateSchema, db: Session = Depends(get_db), token: str = Depends(oauth2_scheme), user: User = Depends(fastapi_users.current_user(active=True)) ): "Atualiza um plano de trabalho existente nos campos informados." # Validações da entrada conforme regras de negócio if cod_plano != plano_trabalho.cod_plano: raise HTTPException( status.HTTP_422_UNPROCESSABLE_ENTITY, detail="Parâmetro cod_plano diferente do conteúdo do JSON") db_plano_trabalho = crud.get_plano_trabalho(db, user.cod_unidade, cod_plano) if db_plano_trabalho is None: raise HTTPException( status.HTTP_404_NOT_FOUND, detail="Só é possível aplicar PATCH em um recurso"+ " existente.") if db_plano_trabalho.cod_unidade != user.cod_unidade: raise HTTPException( status.HTTP_403_FORBIDDEN, detail="Usuário não pode alterar Plano de Trabalho"+ " de outra unidade.") # atualiza os atributos, exceto atividades merged_plano_trabalho = util.sa_obj_to_dict(db_plano_trabalho) patch_plano_trabalho = plano_trabalho.dict(exclude_unset=True) if patch_plano_trabalho.get("atividades", None): del patch_plano_trabalho["atividades"] merged_plano_trabalho.update(patch_plano_trabalho) # atualiza as atividades # traz a lista de atividades que está no banco db_atividades = util.list_to_dict( [ util.sa_obj_to_dict(atividade) for atividade in getattr(db_plano_trabalho, "atividades", list()) ], "id_atividade" ) # cada atividade a ser modificada patch_atividades = util.list_to_dict( plano_trabalho.dict(exclude_unset=True).get("atividades", list()), "id_atividade" ) merged_atividades = util.merge_dicts(db_atividades, patch_atividades) merged_plano_trabalho["atividades"] = util.dict_to_list( merged_atividades, "id_atividade" ) # tenta validar o esquema # para validar o esquema, é necessário ter o atributo atividades, # mesmo que seja uma lista vazia if merged_plano_trabalho.get("atividades", None) is None: merged_plano_trabalho["atividades"] = [] # valida o esquema do plano de trabalho atualizado try: merged_schema = schemas.PlanoTrabalhoSchema(**merged_plano_trabalho) except ValidationError as e: raise HTTPException( status.HTTP_422_UNPROCESSABLE_ENTITY, detail=json.loads(e.json()) ) crud.update_plano_trabalho(db, merged_schema, user.cod_unidade) return merged_plano_trabalho
def generate(): """ URL: `/` Use submitted values to generate a profile. """ # Retain selected values profile_notes_list = [] cache_d = get_cache() dropdown_data = cache_d["dropdown_data"] for coord, cat_data in dropdown_data.items(): profile_notes_list.append("{}: {}".format(cat_data["category"], request.values.get(coord, ""))) selected_value = request.values.get(coord, "") try: selected_index = cat_data["values"].index(selected_value) except ValueError: selected_index = 0 cat_data["selected_idx"] = selected_index profile_notes = ", ".join(profile_notes_list) profile_title = request.values.get("profile_title") if not profile_title: profile_title = "DEPG" # Calculate params based on provided input sol = xl.calculate( inputs={addr(k): v for k, v in dict(request.values).items()} ) # Pull out values we care about and round as needed result_d = {} for key, coord in get_config()["result_coords"].items(): result_rounding = get_config()["result_rounding"][key] value = sol[addr(coord)].value[0,0] if result_rounding is not None: result_d[key] = round(decimal.Decimal(value), result_rounding) else: result_d[key] = value # Add profile_title, target weight, pressure peak, and shot time as well profile_notes += "\n\nPressure peak: {}, Stop at weight: {}, Time: {}" . \ format(result_d["graph_pressure_peak"], result_d["graph_stop_on_weight"], result_d["graph_time"]) result_d["profile_notes"] = profile_notes result_d["profile_title"] = profile_title # Fill in profile base and advanced steps base = {} for k,v in get_config()["profile"].items(): if k == "advanced_shot": continue if type(v) is str: base[k] = v.format(**result_d) else: base[k] = v steps = [] for raw_step in get_config()["profile"]["advanced_shot"]: step = {} for k,v in raw_step.items(): if type(v) is str: step[k] = v.format(**result_d) else: step[k] = v steps.append(step) # Convert to TCL steps_tcl = [tkinter._stringify(list(dict_to_list(e))) for e in steps] profile = "advanced_shot {" + " ".join(steps_tcl) + "}\n" for k,v in base.items(): profile += "{} {}\n".format(tkinter._stringify(k), tkinter._stringify(v)) return render_template('index.html', profile_title=profile_title, \ dropdown_data=cache_d["dropdown_data"], profile=profile, properties=cache_d["properties"])
def test_dict_to_list(input_pt: dict): """Testa a transformação de lista em dicionário. """ atividades = util.dict_to_list(atividades_dict, "id_atividade") assert atividades == input_pt["atividades"]