def test_conversion(self): converter = Converter(ExchangeRateApi()) result = float(converter.convert(1, "EUR", "USD")) url = "https://api.exchangerate.host/latest" test_exchange_rates = StubExchangeRateApi(url) stub = test_exchange_rates.get_rates() self.assertAlmostEqual(result, float(stub["rates"]["USD"]), 4)
def _test_one_model(mdl_dir: Path, left_context: int, right_context: int) -> float: """Convert one model and check output. Args: mdl_dir: model dir for test. left_context: left context of kaldi model. right_context: right context of kaldi model. Returns: max err percent between tensorflow pb output and kaldi output. """ feat_input = np.loadtxt(str(mdl_dir / 'input.txt'), dtype=np.float32) feat_ivector = np.loadtxt(str(mdl_dir / 'ivector.txt'), dtype=np.float32) kaldi_output = np.loadtxt(str(mdl_dir / 'output.txt'), dtype=np.float32) feed_dict = {'input:0': feat_input, 'ivector:0': feat_ivector} with TemporaryDirectory() as tmp_dir: pb_file = Path(tmp_dir) / 'tf.pb' converter = Converter(mdl_dir / 'final.txt', left_context, right_context) converter.convert('tf', pb_file) tf.compat.v1.reset_default_graph() with tf.compat.v1.Session() as session: with gfile.FastGFile(str(pb_file), 'rb') as pb_file: graph_def = tf.compat.v1.GraphDef() graph_def.ParseFromString(pb_file.read()) tf.import_graph_def(graph_def, name="") out_tensor = session.graph.get_tensor_by_name('output.affine:0') output = session.run(out_tensor, feed_dict) return np.amax(np.absolute(np.subtract(output, kaldi_output)))
def converter_tests(): test_conv = Converter() assert_equals(test_conv.tempConverter("c","k",10), decimal.Decimal('283.15')) assert_equals(test_conv.tempConverter("c","f",10), 50) assert_equals(test_conv.tempConverter("k","c",10), decimal.Decimal('-263.15')) assert_equals(test_conv.tempConverter("k","f",10), decimal.Decimal('-441.67')) assert_equals(test_conv.tempConverter("f","c",10), decimal.Decimal('-12.222')) assert_equals(test_conv.tempConverter("f","k",10), decimal.Decimal('260.93'))
def main(): """ Main function used to convert datadog monitors into a terraform file according to there type. """ #all_monitors_def = api.Monitor.get_all() # print(all_monitors_def) # for monitor_def in all_monitors_def: # print(f"Monitor {type(monitor_def)}=> \n {monitor_def}") #return #print("hererererer") # read monitors ids json file. args = get_arguments() # get input arguments. set_logging(args.verbose) terraform_code = "" # output terraform code. try: with open(args.input, "r") as f: monitors_list_dict = json.load(f) for monitor, monitor_ids in monitors_list_dict.items(): count = 0 terraform_code_monitors_group = "" # output terraform code per monitors group. for monitor_id in monitor_ids: count += 1 # extract monitor definition from datadog. monitor_def = get_monitor_by_id(monitor_id) # print(monitor_id,type(monitor_def),monitor_def.keys()) # monitor_def = api.Monitor.get(monitor_id) # reorg monitor dict. monitor_definition = redefined_monitor_definition(monitor_def) # convert monitor definition dict to terraform code. converter = Converter(datadog_type="monitor", json_dict=monitor_definition) # Append terraform code to monitor platform file(according to monitor type). terraform_code_monitors_group += converter.to_Terraform_Code(monitor + '_' + str(count)) + "\n\n\n" # Write out terraform code for this group of monitor.. if args.all: with open(monitor+".tf","w") as f: f.write(terraform_code_monitors_group) logging.info(f"Terraform code file for group {monitor} has been created...") terraform_code += f"#####\n#\n# {monitor}\n#\n#####\n" + terraform_code_monitors_group # Write out terraform code. with open(args.output,args.mode) as f: f.write(terraform_code) logging.info(f"Terraform code file {args.output} has been created successfully...") except BaseException as e: logging.exception("Uncaught exception: %s: %s", type(e).__name__, str(e))
def convertResolution(self): all_ISINs = list(self.getAllUniqueIsin()) counter = 0 for isin in all_ISINs: converter = Converter() converter.convert(self.session, isin, ActionHour, 9, ActionDay) # converter.convert(self.session, isin, Action, 60, ActionHour) # converter.convert(self.session, isin, Action, 30, Action30Minutes) # converter.convert(self.session, isin, Action, 10, Action10Minutes) counter += 1 print "[+] {0} done {1}/{2}".format(isin, counter, len(all_ISINs))
def test_conversion(self, mock_rates): amount = 100.0 input_currency = 'EUR' output_currency = 'CZK' converter = Converter(amount, input_currency, output_currency) try: result = converter.convert() self.assertListEqual(list(result.get('output').values()), [2512.0]) except RatesNotAvailableError: self.assertTrue( False, 'Conversion failed. Should not have raised an exception.')
def test_required_keywords_init(required_keywords, initialized_required_keywords): converter = Converter(data_keywords_mapper={"all": []}, kwarg_to_schema_key_mapper={}, required_keywords=required_keywords) assert converter.required_keywords == initialized_required_keywords
def test_correct_json_format_contains_input_and_output_fields(self): amount = 100.5 input_currency = 'CAD' output_currency = 'CZK' converter = Converter(amount, input_currency, output_currency) try: result = converter.convert() self.assertListEqual(list(result.keys()), ['input', 'output'], msg='Result json should contain input, and ' 'output fields.') except RatesNotAvailableError: self.assertTrue( False, 'Conversion failed. Should not have raised an exception.')
class ConverterTestCase(TestCase): def setUp(self): self.converter = Converter() def _test(self, input_file, expected_output_file): with open(Path(f"{STATIC_DIR}/{input_file}"), encoding="utf-8") as f: input_text = f.read() with open(Path(f"{STATIC_DIR}/{expected_output_file}"), encoding="utf-8") as f: expected_text = f.read() converted_text = self.converter.convertText(input_text) self.assertEqual(converted_text, expected_text) # def test_text1(self): # self._test("text1.txt", "conv1.txt") def test_text2(self): self._test("text2.txt", "conv2.txt") def test_words1(self): self._test("owords1.txt", "cwords1.txt") def test_words2(self): self._test("owords2.txt", "cwords2.txt") def test_words3(self): self._test("owords3.txt", "cwords3.txt")
class Api(object): def __init__(self): self.converter = Converter() @cherrypy.expose def index(self): return "<h1>Api Root</h1><p>Available API Functions:</p>.../api/list and .../api/convert_value" @cherrypy.expose @cherrypy.tools.json_out() def convert_value(self, de, para, valor): #TODO: MAKE CHECKING FOR CORRECT DATA TYPES WORK output = { 'errors': "Undefined", 'success': "Undefined", 'value': "Undefined" } #BUG: value comming in from ajax call is not int, float or str ?! # makes it impossible to check if de is None or not de or para is None or not para or valor is None or not valor: errors = [] output['success'] = False if not de: errors.append("Invalid FROM option was inputted!") if not para: errors.append("Invalid TO option was inputted!") if not valor: errors.append("Invalid value was inputted!") output['errors'] = errors else: output['errors'] = 'No errors' output['success'] = True output['value'] = self.converter.convert(de, para, valor) return output @cherrypy.expose @cherrypy.tools.json_out() def list(self): output = { 'errors': 'No errors', 'success': "Undefined", 'rate_list': "Undefined" } output['rate_list'] = self.converter.list() output['success'] = True return output
def test_correct_json_format_output_field(self): amount = 100.5 input_currency = 'CAD' output_currency = '$' converter = Converter(amount, input_currency, output_currency) try: result = converter.convert() self.assertListEqual( list(result.get('output').keys()), ['AUD', 'CAD', 'MXN', 'NZD', 'SGD', 'USD'], msg='Result json input field should contain keys: ' '"AUD", "CAD", "MXN", "NZD", "SGD", "USD"') except RatesNotAvailableError: self.assertTrue( False, 'Conversion failed. Should not have raised an exception.')
def __init__(self, root): self._root = root self._amount = IntVar() self._ran = False self._root.bind('<Return>', self._run_convert) self.default_from_curr = StringVar() self.default_from_curr.set("EUR") self.default_to_curr = StringVar() self.default_to_curr.set("USD") self.font_style = font.Font(family="Futura", size=10) self.style = ttk.Style() self.style.configure('TButton', font=self.font_style) self.converter = Converter(ExchangeRateApi()) self.currencies = self.converter.currencies
def test_check_for_required(input_kwargs, required_keywords, should_error): converter = Converter(data_keywords_mapper={"all": []}, kwarg_to_schema_key_mapper={ "program_description": "description", "program_name": "name", "program_url": "url" }, required_keywords=required_keywords) if should_error: with pytest.raises(ValueError) as exceptionMsg: output = converter.trigger_conversion(input_kwargs) assert "Missing kwargs! Please include values for the following fields" in str( exceptionMsg.value) assert ", ".join(required_keywords) in str(exceptionMsg.value) else: output = converter.trigger_conversion(input_kwargs) assert output == {}
def main(*args, **kwargs): ct = Converter(**kwargs) print(ct.head()) print(ct.take(10)) ct.write() if ct.validate(): print("convert successed!") else: raise ValueError("Convert faild!")
def dd_resource_to_terraform(args, dd_resource_ids): """ """ # dd_resources = get_dd_resources(args.type) # print(dd_resources) terraform_code = "" if args.input == "ALL": count = 0 dd_resources = get_dd_resources(args.type) for dd_resource in dd_resources: count += 1 # if count == 2 or count == 4 or count == 8 or count == 12 or count == 15: continue # print(count) # print(dd_resource) # print("==========================================================================") # convert monitor definition dict to terraform code. converter = Converter(datadog_type=args.type, json_dict=dd_resource) # Append terraform code to monitor platform file(according to monitor type). terraform_code += converter.to_Terraform_Code( args.type + '_' + str(count)) + "\n\n\n" else: if args.type != "dashboard": dd_resources = get_dd_resources(args.type) for dd_resource_group, dd_resource_group_ids in dd_resource_ids.items( ): count = 0 terraform_code_monitors_group = "" # output terraform code per dd resource group. for dd_resource_group_id in dd_resource_group_ids: count += 1 if args.type != "dashboard": dd_resource_dict = get_dd_resource_def( dd_resource_group_id, dd_resources, args.type) else: dd_resource_dict = dd_resource_remove_keys( api.Dashboard.get(dd_resource_group_id), args.type) # convert monitor definition dict to terraform code. converter = Converter(datadog_type=args.type, json_dict=dd_resource_dict) # Append terraform code to monitor platform file(according to monitor type). terraform_code_monitors_group += converter.to_Terraform_Code( dd_resource_group + '_' + str(count)) + "\n\n\n" # Write out terraform code for this group of dd resource... if args.all: with open(dd_resource_group + ".tf", "w") as f: f.write(terraform_code_monitors_group) logging.info( f"Terraform code file for group {dd_resource_group} has been created..." ) terraform_code += f"#####\n#\n# {dd_resource_group}\n#\n#####\n" + terraform_code_monitors_group return terraform_code
def __init__(self, params, dataset='ML1M', n_epoch=1): self.params = params # number of epochs for the batch training self.n_epoch = n_epoch # load dataset self.data = Converter().convert(dataset=dataset) logger.debug('[exp] %s | n_epoch = %d' % (dataset, n_epoch)) logger.debug('[exp] n_sample = %d; %d (20%%) + %d (10%%) + %d (70%%)' % ( self.data.n_sample, self.data.n_batch_train, self.data.n_batch_test, self.data.n_test)) logger.debug('[exp] n_user = %d, n_item = %d' % (self.data.n_user, self.data.n_item))
def test_parquet_to_csv_overwrite(self): copytree(self.kwargs.get('parquet'), self.parquet_copy) ct = Converter(input=self.parquet_copy, output=self.csv_copy, mode='overwrite') ct.write() df_in = ct.df df_out = ct.sqlCtx.read.csv(self.csv_copy, header=True) self.assertTrue(self.assertRDDEquals(df_in.rdd, df_out.rdd)) ct.tearDown()
def test_csv_to_parquet_overwrite(self): copyfile(self.kwargs.get('csv'), self.csv_copy) ct = Converter(input=self.csv_copy, output=self.parquet_copy, mode='overwrite') ct.write() df_in = ct.df df_out = ct.sqlCtx.read.format(ct.out_format).load(self.parquet_copy) self.assertTrue(self.assertRDDEquals(df_in.rdd, df_out.rdd)) ct.tearDown()
def add_currency_pair(): # TODO: dokumentacja -> wymagany `Content-Type: application/json` w requeście # TODO: dokumentacja -> wymagany odpowiedni format daty datetime.datetime.now() data_json = request.json insert_data = dict() # TODO: przygotować metodę w MongoConnector, która odpowiednio przygotuje dane for key, value in data_json.items(): if type(value) is float: insert_data[key] = Converter.change_type(value, float, 'mongo') else: insert_data[key] = value insert_data['timestamp'] = datetime.datetime.now() res_msg = mongo_connector.insert(EXCHANGE_RATES_COLLECTION_NAME, insert_data) if res_msg.status: return res_msg.message, 200 else: return res_msg.message, 400
def handle_assets(): if request.method == 'GET': res_msg = mongo_connector.get(ASSETS_COLLECTION_NAME, {}) if res_msg.status: return res_msg.message, 200 return res_msg, 400 elif request.method == 'PUT': # TODO: usunąć zduplikowany kod data_json = request.json insert_data = dict() for key, value in data_json.items(): if type(value) is float: insert_data[key] = Converter.change_type(value, float, 'mongo') else: insert_data[key] = value insert_data['timestamp'] = datetime.datetime.now() res_msg = mongo_connector.insert(ASSETS_COLLECTION_NAME, insert_data) if res_msg.status: return res_msg.message, 200 else: return res_msg.message, 400 else: return 501, 'Method {} is not implemented'.format(request.method)
def get(): if request.args.get('amount') is not None: if request.args.get('input_currency') is not None and request.args.get('output_currency') is not None: try: # If all arguments are present converter = Converter(float(request.args.get('amount')), request.args.get('input_currency'), request.args.get('output_currency')) return jsonify(converter.convert()), status.HTTP_200_OK except (RatesNotAvailableError, TooManyInputCurrencies, ValueError) as e: return json.dumps({'error': str(e)}), status.HTTP_400_BAD_REQUEST elif request.args.get('input_currency') is not None: try: # If output_currency is ommited converter = Converter(float(request.args.get('amount')), request.args.get('input_currency'), None) return jsonify(converter.convert()), status.HTTP_200_OK except (RatesNotAvailableError, TooManyInputCurrencies, ValueError) as e: return json.dumps({'error': str(e)}), status.HTTP_400_BAD_REQUEST else: return jsonify({'error': 'Input argument was not set.'}), status.HTTP_400_BAD_REQUEST else: return jsonify({'error': 'Amount was not specified in parameters.'}), status.HTTP_400_BAD_REQUEST
formatter = logging.Formatter("[%(asctime)s] %(name)s: %(message)s") # add formatter to console console.setFormatter(formatter) # create logger logger = logging.getLogger("converter.py") logger.setLevel(logging.DEBUG) # add console to logger logger.addHandler(console) if online: session = engine.bind_online() else: session = engine.bind_offline() from converter.converter import Converter converter = Converter() from confdb_v2.queries import ConfDbQueries queries = ConfDbQueries() ver = queries.getConfigurationByName(name, session, logger) cnf = -1 import os workdir = os.getcwd() config_file_name = converter.createConfig(ver, cnf, session, online, workdir, use_cherrypy=False)
def dd_resources_to_terraform(args, dd_resource_ids): """ Convert Datadog resource to terraform code. Args: . args: input Arguments . dd_resource_ids: Datadog resource ID numbers to convert. Output: . return terraform code string that represent Datadog resource. """ terraform_code = "" if args.input == "ALL": """ In case we need to convert all Datadog resources of specific type. """ count = 0 dd_resources = fetch_dd_resources( args.type) # Fetch all Datadog resources. for dd_resource in dd_resources: count += 1 # convert Datadog resource definition dict to terraform code. converter = Converter(datadog_type=args.type, json_dict=dd_resource) logging.debug(f'Datadog resource {count} => {dd_resource}') # Append terraform code to. terraform_code += converter.to_Terraform_Code( args.type + '_' + str(count)) + "\n\n\n" else: if args.type != "dashboard": dd_resources = fetch_dd_resources(args.type) for dd_resource_group, dd_resource_group_ids in dd_resource_ids.items( ): count = 0 terraform_code_monitors_group = "" # output terraform code per dd resource group. for dd_resource_group_id in dd_resource_group_ids: count += 1 if args.type != "dashboard": dd_resource_dict = fetch_dd_resource_def( dd_resource_group_id, dd_resources, args.type) else: # dd_resource_dict = dd_resource_remove_keys(api.Dashboard.get(dd_resource_group_id), args.type) dd_resource_dict = fetch_dd_resource( dd_resource_group_id, args.type) # convert Datadog resource definition dict to terraform code. converter = Converter(datadog_type=args.type, json_dict=dd_resource_dict) logging.debug( f'Datadog resource {count} => {dd_resource_dict}') # Append terraform code. terraform_code_monitors_group += converter.to_Terraform_Code( dd_resource_group + '_' + str(count)) + "\n\n\n" # Write out terraform code for this group of dd resource... if args.group: with open(dd_resource_group + ".tf", "w") as f: f.write(terraform_code_monitors_group) logging.info( f"Terraform code file for group {dd_resource_group} has been created..." ) terraform_code += f"#####\n#\n# {dd_resource_group}\n#\n#####\n" + terraform_code_monitors_group return terraform_code
def setUp(self): self.converter = Converter()
def test_correct_date(self): converter = Converter(ExchangeRateApi()) converter_date = converter.date() now = datetime.now() now = now.strftime("%d.%m.%Y") self.assertEqual(converter_date, now)
def educational_occupational_programs_converter(**kwargs): educational_occupational_programs_converter = Converter( data_keywords_mapper, kwarg_to_schema_key_mapper, required_keywords) return educational_occupational_programs_converter.trigger_conversion( kwargs)
def test_convert_success(self): converter = Converter(ExchangeRateApi()) self.assertEqual(converter.success, True)
def test_failed_conversion(self): converter = Converter(ExchangeRateApi()) result = converter.convert("", "USD", "EUR") self.assertEqual(result, False)
def main(): """ Main function used to convert a datadog json file into a terraform file. """ # with open("../project8/dashboard.json") as f: # with open("../project8/mongodb.json") as f: # with open("../project8/mongo.json") as f: # with open("../project8/business.json") as f: # with open("../project8/uptime.json") as f: # with open("../project8/alb_performance.json") as f: # with open("../project8/status_errors.json") as f: # with open("../project8/status_errors.json") as f: # with open("../project8/response_time.json") as f: # with open("../project8/aws_elb.json") as f: # with open("../project8/jvm_metrics.json") as f: # with open("../project8/k8s_pods.json") as f: # with open("../project8/aws_ec2.json") as f: # with open("../project8/jvm_overview.json") as f: # with open("../project8/alb_cloned.json") as f: # with open("../project8/k8s_services.json") as f: # with open("../project8/aws_ec2_cloned.json") as f: # with open("../project8/trace_analytics.json") as f: # with open("../project8/system_metrics.json") as f: # with open("../project8/aws_mq.json") as f: # with open("../project8/aws_autoscaling.json") as f: # with open("../project8/aws_billing.json") as f: # with open("../project8/aws_s3.json") as f: # with open("../project8/azure_api.json") as f: # with open("../project8/azure_overview.json") as f: # with open("../project8/aws_document.json") as f: # with open("../project8/redis.json") as f: # with open("../project8/aws_kinesis.json") as f: # with open("../project8/aws_kinesis_firehose.json") as f: # with open("../project8/aws_lambda.json") as f: # with open("../project8/aws_rds.json") as f: # with open("../project8/aws_sqs.json") as f: # with open("../project8/aws_step_functions.json") as f: # with open("../project8/aws_trusted_advisor.json") as f: # with open("../project8/azure_app_service.json") as f: # with open("../project8/azure_batch.json") as f: # with open("../project8/azure_cosmosdb.json") as f: # with open("../project8/azur_dbmsql.json") as f: # with open("../project8/azure_dbpostgres.json") as f: # with open("../project8/azure_event_hub.json") as f: # with open("../project8/azure_functions.json") as f: # with open("../project8/azure_iot_hub.json") as f: # with open("../project8/azure_loadbalancing.json") as f: # with open("../project8/azure_logicapp.json") as f: # with open("../project8/azure_overview#1.json") as f: # with open("../project8/azure_databases.json") as f: # with open("../project8/azure_usage.json") as f: # with open("../project8/azure_vm.json") as f: # with open("../project8/azure_vm_scale.json") as f: # with open("../project8/azure_cont.json") as f: # with open("../project8/azure_coredns.json") as f: # with open("../project8/docker_overview.json") as f: # with open("../project8/host_count.json") as f: # with open("../project8/k8s_daemonset.json") as f: # with open("../project8/k8s_deployment.json") as f: # with open("../project8/k8s_replicaset.json") as f: # with open("../project8/k8s_overview.json") as f: # with open("../project8/run_errors.json") as f: # with open("../project8/rum_mobile.json") as f: # with open("../project8/system_diskio.json") as f: # with open("../project8/system_networking.json") as f: # with open("../project8/troubleshoot.json") as f: # with open("../project8/load_test.json") as f: # content = json.load(f) args = get_arguments() set_logging(args.verbose) try: datadog_type = args.type datadog_json = args.json terraform_file = args.output terraform_resource_name = args.resource # if datadog_type in ["dashboard","monitor"]: logging.info(f" Converting Json file {datadog_json} of type {datadog_type} ...") # read datadog json file. with open(datadog_json, "r") as f: datadog_json_dict = json.load(f) # initiliaze converter instance. converter = Converter(datadog_type=datadog_type, json_dict=datadog_json_dict) # converter = Dashboard(datadog_json_dict) if datadog_type == "dashboard" else Monitor(datadog_json_dict) # create terraform code. with open(terraform_file,"w") as f: f.write(converter.to_Terraform_Code(terraform_resource_name)) logging.info(f" Terraform file [ {terraform_file} ] has been created") logging.info(f" For reformating run => [ terraform fmt {terraform_file} ]") except BaseException as e: logging.exception("Uncaught exception: %s: %s", type(e).__name__, str(e))
def __init__(self): self.converter = Converter()
# create console handler and set level to debug console = logging.StreamHandler() console.setLevel(logging.DEBUG) # create formatter formatter = logging.Formatter("[%(asctime)s] %(name)s: %(message)s") # add formatter to console console.setFormatter(formatter) # create logger logger = logging.getLogger("converter.py") logger.setLevel(logging.DEBUG) # add console to logger logger.addHandler(console) if online: session = engine.bind_online() else: session = engine.bind_offline() from converter.converter import Converter converter = Converter() from confdb_v2.queries import ConfDbQueries queries = ConfDbQueries() ver = queries.getConfigurationByName(name, session, logger) cnf = -1 import os workdir = os.getcwd() config_file_name = converter.createConfig(ver, cnf, session, online, workdir, use_cherrypy = False)
class UI: def __init__(self, root): self._root = root self._amount = IntVar() self._ran = False self._root.bind('<Return>', self._run_convert) self.default_from_curr = StringVar() self.default_from_curr.set("EUR") self.default_to_curr = StringVar() self.default_to_curr.set("USD") self.font_style = font.Font(family="Futura", size=10) self.style = ttk.Style() self.style.configure('TButton', font=self.font_style) self.converter = Converter(ExchangeRateApi()) self.currencies = self.converter.currencies def start(self): image = Image.open("./src/images/background.jpg") self.background = ImageTk.PhotoImage(image) self.background_label = ttk.Label(master=self._root, image=self.background) self._from_curr_menu = ttk.Combobox( master=self._root, textvariable=self.default_from_curr, values=self.currencies) self._to_curr_menu = ttk.Combobox(master=self._root, textvariable=self.default_to_curr, values=self.currencies) self._swap_currencies_button = ttk.Button( master=self._root, text="<>", command=self._swap_currencies) self._amount = ttk.Entry(master=self._root) self._convert_button = ttk.Button(master=self._root, text="Convert", command=self._run_convert) self._from_curr_menu.configure(font=self.font_style) self._to_curr_menu.configure(font=self.font_style) self._swap_currencies_button.configure(style="TButton") self._convert_button.configure(style="TButton") self.background_label.place(x=0, y=0) self._from_curr_menu.grid(row=0, column=0, sticky=(constants.W), padx=20, pady=20) self._swap_currencies_button.grid(row=0, column=1, padx=20, pady=20) self._to_curr_menu.grid(row=0, column=2, sticky=(constants.E), padx=20, pady=20) self._amount.grid(row=1, column=1, pady=20) self._convert_button.grid(row=2, column=1, pady=20) self._root.grid_columnconfigure(1, weight=1, minsize=300) def _run_convert(self, event=None): count = self._amount.get() from_curr = self._from_curr_menu.get() to_curr = self._to_curr_menu.get() try: count = float(count) except: messagebox.showinfo( "Error", "Wrong input, format input like this: 12.5 (not 12,5 €)") return self.result = self.converter.convert(count, from_curr, to_curr) output = f"{self.result} {to_curr}" date = f"as of {self.converter.date()}" if self._ran: self.result_label.destroy() self.date_label.destroy() self._ran = True self.result_label = ttk.Label(master=self._root, text=output) self.date_label = ttk.Label(master=self._root, text=date) self.copy_button = ttk.Button(master=self._root, text="Copy", command=self._copy_to_clipboard) self.result_label.configure(font=self.font_style) self.date_label.configure(font=self.font_style) self.copy_button.configure(style="TButton") self.result_label.grid(row=3, column=1, pady=20) self.date_label.grid(row=4, column=1) self.copy_button.grid(row=5, column=1, pady=20) def _copy_to_clipboard(self): self._root.clipboard_clear() self._root.clipboard_append(self.result) messagebox.showinfo("", "Copied to clipboard!") def _swap_currencies(self): current_from = self._from_curr_menu.get() current_to = self._to_curr_menu.get() self.new_from = StringVar() self.new_from.set(current_to) self.new_to = StringVar() self.new_to.set(current_from) self._from_curr_menu.destroy() self._to_curr_menu.destroy() self._from_curr_menu = ttk.Combobox(master=self._root, textvariable=self.new_from, values=self.currencies) self._to_curr_menu = ttk.Combobox(master=self._root, textvariable=self.new_to, values=self.currencies) self._from_curr_menu.configure(font=self.font_style) self._to_curr_menu.configure(font=self.font_style) self._from_curr_menu.grid(row=0, column=0, sticky=(constants.W), padx=20, pady=20) self._to_curr_menu.grid(row=0, column=3, sticky=(constants.E), padx=20, pady=20)
def work_based_programs_converter(**kwargs): work_based_programs_converter = Converter(data_keywords_mapper, kwarg_to_schema_key_mapper) return work_based_programs_converter.trigger_conversion(kwargs)