Пример #1
0
def dd_resource_to_terraform(args, dd_resource_ids):
    """
    """
    # dd_resources = get_dd_resources(args.type)
    # print(dd_resources)
    terraform_code = ""

    if args.input == "ALL":
        count = 0
        dd_resources = get_dd_resources(args.type)
        for dd_resource in dd_resources:
            count += 1
            # if count == 2 or count == 4 or count == 8 or count == 12 or count == 15: continue
            # print(count)
            # print(dd_resource)
            # print("==========================================================================")
            # convert monitor definition dict to terraform code.
            converter = Converter(datadog_type=args.type,
                                  json_dict=dd_resource)

            # Append terraform code to monitor platform file(according to monitor type).
            terraform_code += converter.to_Terraform_Code(
                args.type + '_' + str(count)) + "\n\n\n"
    else:
        if args.type != "dashboard": dd_resources = get_dd_resources(args.type)

        for dd_resource_group, dd_resource_group_ids in dd_resource_ids.items(
        ):
            count = 0
            terraform_code_monitors_group = ""  # output terraform code per dd resource group.

            for dd_resource_group_id in dd_resource_group_ids:
                count += 1

                if args.type != "dashboard":
                    dd_resource_dict = get_dd_resource_def(
                        dd_resource_group_id, dd_resources, args.type)
                else:
                    dd_resource_dict = dd_resource_remove_keys(
                        api.Dashboard.get(dd_resource_group_id), args.type)

                # convert monitor definition dict to terraform code.
                converter = Converter(datadog_type=args.type,
                                      json_dict=dd_resource_dict)

                # Append terraform code to monitor platform file(according to monitor type).
                terraform_code_monitors_group += converter.to_Terraform_Code(
                    dd_resource_group + '_' + str(count)) + "\n\n\n"

            # Write out terraform code for this group of dd resource...
            if args.all:
                with open(dd_resource_group + ".tf", "w") as f:
                    f.write(terraform_code_monitors_group)
                logging.info(
                    f"Terraform code file for group {dd_resource_group} has been created..."
                )

            terraform_code += f"#####\n#\n# {dd_resource_group}\n#\n#####\n" + terraform_code_monitors_group

    return terraform_code
Пример #2
0
 def test_conversion(self):
     converter = Converter(ExchangeRateApi())
     result = float(converter.convert(1, "EUR", "USD"))
     url = "https://api.exchangerate.host/latest"
     test_exchange_rates = StubExchangeRateApi(url)
     stub = test_exchange_rates.get_rates()
     self.assertAlmostEqual(result, float(stub["rates"]["USD"]), 4)
Пример #3
0
def _test_one_model(mdl_dir: Path, left_context: int,
                    right_context: int) -> float:
    """Convert one model and check output.

  Args:
    mdl_dir: model dir for test.
    left_context: left context of kaldi model.
    right_context: right context of kaldi model.

  Returns:
    max err percent between tensorflow pb output and kaldi output.
  """
    feat_input = np.loadtxt(str(mdl_dir / 'input.txt'), dtype=np.float32)
    feat_ivector = np.loadtxt(str(mdl_dir / 'ivector.txt'), dtype=np.float32)
    kaldi_output = np.loadtxt(str(mdl_dir / 'output.txt'), dtype=np.float32)
    feed_dict = {'input:0': feat_input, 'ivector:0': feat_ivector}

    with TemporaryDirectory() as tmp_dir:
        pb_file = Path(tmp_dir) / 'tf.pb'
        converter = Converter(mdl_dir / 'final.txt', left_context,
                              right_context)
        converter.convert('tf', pb_file)

        tf.compat.v1.reset_default_graph()
        with tf.compat.v1.Session() as session:
            with gfile.FastGFile(str(pb_file), 'rb') as pb_file:
                graph_def = tf.compat.v1.GraphDef()
                graph_def.ParseFromString(pb_file.read())
                tf.import_graph_def(graph_def, name="")

            out_tensor = session.graph.get_tensor_by_name('output.affine:0')
            output = session.run(out_tensor, feed_dict)

    return np.amax(np.absolute(np.subtract(output, kaldi_output)))
Пример #4
0
def test_required_keywords_init(required_keywords,
                                initialized_required_keywords):
    converter = Converter(data_keywords_mapper={"all": []},
                          kwarg_to_schema_key_mapper={},
                          required_keywords=required_keywords)

    assert converter.required_keywords == initialized_required_keywords
Пример #5
0
def converter_tests():
    test_conv = Converter()
    assert_equals(test_conv.tempConverter("c","k",10), decimal.Decimal('283.15'))
    assert_equals(test_conv.tempConverter("c","f",10), 50)
    assert_equals(test_conv.tempConverter("k","c",10), decimal.Decimal('-263.15'))
    assert_equals(test_conv.tempConverter("k","f",10), decimal.Decimal('-441.67'))
    assert_equals(test_conv.tempConverter("f","c",10), decimal.Decimal('-12.222'))
    assert_equals(test_conv.tempConverter("f","k",10), decimal.Decimal('260.93'))
Пример #6
0
def main(*args, **kwargs):
    ct = Converter(**kwargs)
    print(ct.head())
    print(ct.take(10))
    ct.write()
    if ct.validate():
        print("convert successed!")
    else:
        raise ValueError("Convert faild!")
Пример #7
0
 def convertResolution(self):
     all_ISINs = list(self.getAllUniqueIsin())
     counter = 0
     for isin in all_ISINs:
         converter = Converter()
         converter.convert(self.session, isin, ActionHour, 9, ActionDay)
         # converter.convert(self.session, isin, Action, 60, ActionHour)
         # converter.convert(self.session, isin, Action, 30, Action30Minutes)
         # converter.convert(self.session, isin, Action, 10, Action10Minutes)
         counter += 1
         print "[+] {0} done {1}/{2}".format(isin, counter, len(all_ISINs))
Пример #8
0
def main():
    """
    Main function used to convert datadog monitors into a terraform file according to there type.
    """
    #all_monitors_def = api.Monitor.get_all()
    # print(all_monitors_def)
    # for monitor_def in all_monitors_def:
        # print(f"Monitor {type(monitor_def)}=> \n {monitor_def}")
    #return
    #print("hererererer")
    # read monitors ids json file.
    args = get_arguments()          # get input arguments.
    set_logging(args.verbose)
    terraform_code = ""             # output terraform code.
    try:
        with open(args.input, "r") as f:
            monitors_list_dict = json.load(f)

        for monitor, monitor_ids in monitors_list_dict.items():
            count = 0
            terraform_code_monitors_group = ""       # output terraform code per monitors group.
            for monitor_id in monitor_ids:
                count += 1

                # extract monitor definition from datadog.
                monitor_def = get_monitor_by_id(monitor_id)
                # print(monitor_id,type(monitor_def),monitor_def.keys())      
                # monitor_def = api.Monitor.get(monitor_id)

                # reorg monitor dict.
                monitor_definition = redefined_monitor_definition(monitor_def)

                # convert monitor definition dict to terraform code.
                converter = Converter(datadog_type="monitor", json_dict=monitor_definition)

                # Append terraform code to monitor platform file(according to monitor type).
                terraform_code_monitors_group += converter.to_Terraform_Code(monitor + '_' + str(count)) + "\n\n\n"

            # Write out terraform code for this group of monitor..
            if args.all:
                with open(monitor+".tf","w") as f:
                    f.write(terraform_code_monitors_group)
                logging.info(f"Terraform code file for group {monitor} has been created...")

            terraform_code += f"#####\n#\n# {monitor}\n#\n#####\n" + terraform_code_monitors_group

        # Write out terraform code. 
        with open(args.output,args.mode) as f:
            f.write(terraform_code)

        logging.info(f"Terraform code file {args.output} has been created successfully...")

    except BaseException as e:
        logging.exception("Uncaught exception: %s: %s", type(e).__name__, str(e))
    def test_parquet_to_csv_overwrite(self):
        copytree(self.kwargs.get('parquet'), self.parquet_copy)

        ct = Converter(input=self.parquet_copy,
                       output=self.csv_copy,
                       mode='overwrite')
        ct.write()
        df_in = ct.df
        df_out = ct.sqlCtx.read.csv(self.csv_copy, header=True)
        self.assertTrue(self.assertRDDEquals(df_in.rdd, df_out.rdd))
        ct.tearDown()
    def test_csv_to_parquet_overwrite(self):
        copyfile(self.kwargs.get('csv'), self.csv_copy)

        ct = Converter(input=self.csv_copy,
                       output=self.parquet_copy,
                       mode='overwrite')
        ct.write()
        df_in = ct.df
        df_out = ct.sqlCtx.read.format(ct.out_format).load(self.parquet_copy)
        self.assertTrue(self.assertRDDEquals(df_in.rdd, df_out.rdd))

        ct.tearDown()
Пример #11
0
def get():
    if request.args.get('amount') is not None:
        if request.args.get('input_currency') is not None and request.args.get('output_currency') is not None:
            try:
                # If all arguments are present
                converter = Converter(float(request.args.get('amount')), request.args.get('input_currency'),
                                      request.args.get('output_currency'))
                return jsonify(converter.convert()), status.HTTP_200_OK
            except (RatesNotAvailableError, TooManyInputCurrencies, ValueError) as e:
                return json.dumps({'error': str(e)}), status.HTTP_400_BAD_REQUEST
        elif request.args.get('input_currency') is not None:
            try:
                # If output_currency is ommited
                converter = Converter(float(request.args.get('amount')), request.args.get('input_currency'), None)
                return jsonify(converter.convert()), status.HTTP_200_OK
            except (RatesNotAvailableError, TooManyInputCurrencies, ValueError) as e:
                return json.dumps({'error': str(e)}), status.HTTP_400_BAD_REQUEST
        else:
            return jsonify({'error': 'Input argument was not set.'}), status.HTTP_400_BAD_REQUEST
    else:
        return jsonify({'error': 'Amount was not specified in parameters.'}), status.HTTP_400_BAD_REQUEST
Пример #12
0
    def __init__(self, params, dataset='ML1M', n_epoch=1):
        self.params = params

        # number of epochs for the batch training
        self.n_epoch = n_epoch

        # load dataset
        self.data = Converter().convert(dataset=dataset)

        logger.debug('[exp] %s | n_epoch = %d' % (dataset, n_epoch))
        logger.debug('[exp] n_sample = %d; %d (20%%) + %d (10%%) + %d (70%%)' % (
            self.data.n_sample, self.data.n_batch_train, self.data.n_batch_test, self.data.n_test))
        logger.debug('[exp] n_user = %d, n_item = %d' % (self.data.n_user, self.data.n_item))
Пример #13
0
    def test_conversion(self, mock_rates):
        amount = 100.0
        input_currency = 'EUR'
        output_currency = 'CZK'

        converter = Converter(amount, input_currency, output_currency)

        try:
            result = converter.convert()
            self.assertListEqual(list(result.get('output').values()), [2512.0])
        except RatesNotAvailableError:
            self.assertTrue(
                False,
                'Conversion failed. Should not have raised an exception.')
Пример #14
0
    def test_correct_json_format_contains_input_and_output_fields(self):
        amount = 100.5
        input_currency = 'CAD'
        output_currency = 'CZK'

        converter = Converter(amount, input_currency, output_currency)

        try:
            result = converter.convert()
            self.assertListEqual(list(result.keys()), ['input', 'output'],
                                 msg='Result json should contain input, and '
                                 'output fields.')
        except RatesNotAvailableError:
            self.assertTrue(
                False,
                'Conversion failed. Should not have raised an exception.')
Пример #15
0
    def test_correct_json_format_output_field(self):
        amount = 100.5
        input_currency = 'CAD'
        output_currency = '$'

        converter = Converter(amount, input_currency, output_currency)

        try:
            result = converter.convert()
            self.assertListEqual(
                list(result.get('output').keys()),
                ['AUD', 'CAD', 'MXN', 'NZD', 'SGD', 'USD'],
                msg='Result json input field should contain keys: '
                '"AUD", "CAD", "MXN", "NZD", "SGD", "USD"')
        except RatesNotAvailableError:
            self.assertTrue(
                False,
                'Conversion failed. Should not have raised an exception.')
Пример #16
0
    def __init__(self, root):
        self._root = root
        self._amount = IntVar()
        self._ran = False

        self._root.bind('<Return>', self._run_convert)

        self.default_from_curr = StringVar()
        self.default_from_curr.set("EUR")
        self.default_to_curr = StringVar()
        self.default_to_curr.set("USD")

        self.font_style = font.Font(family="Futura", size=10)
        self.style = ttk.Style()
        self.style.configure('TButton', font=self.font_style)

        self.converter = Converter(ExchangeRateApi())
        self.currencies = self.converter.currencies
Пример #17
0
def test_check_for_required(input_kwargs, required_keywords, should_error):
    converter = Converter(data_keywords_mapper={"all": []},
                          kwarg_to_schema_key_mapper={
                              "program_description": "description",
                              "program_name": "name",
                              "program_url": "url"
                          },
                          required_keywords=required_keywords)

    if should_error:
        with pytest.raises(ValueError) as exceptionMsg:
            output = converter.trigger_conversion(input_kwargs)

        assert "Missing kwargs! Please include values for the following fields" in str(
            exceptionMsg.value)
        assert ", ".join(required_keywords) in str(exceptionMsg.value)
    else:
        output = converter.trigger_conversion(input_kwargs)
        assert output == {}
Пример #18
0
 def test_correct_date(self):
     converter = Converter(ExchangeRateApi())
     converter_date = converter.date()
     now = datetime.now()
     now = now.strftime("%d.%m.%Y")
     self.assertEqual(converter_date, now)
Пример #19
0
 def setUp(self):
     self.converter = Converter()
def educational_occupational_programs_converter(**kwargs):
    educational_occupational_programs_converter = Converter(
        data_keywords_mapper, kwarg_to_schema_key_mapper, required_keywords)

    return educational_occupational_programs_converter.trigger_conversion(
        kwargs)
Пример #21
0
 def test_failed_conversion(self):
     converter = Converter(ExchangeRateApi())
     result = converter.convert("", "USD", "EUR")
     self.assertEqual(result, False)
def main():
    """
    Main function used to convert a datadog json file into a terraform file.
    """
    # with open("../project8/dashboard.json") as f:
    # with open("../project8/mongodb.json") as f:
    # with open("../project8/mongo.json") as f:
    # with open("../project8/business.json") as f:
    # with open("../project8/uptime.json") as f:
    # with open("../project8/alb_performance.json") as f:
    # with open("../project8/status_errors.json") as f:
    # with open("../project8/status_errors.json") as f:
    # with open("../project8/response_time.json") as f:
    # with open("../project8/aws_elb.json") as f:
    # with open("../project8/jvm_metrics.json") as f:
    # with open("../project8/k8s_pods.json") as f:
    # with open("../project8/aws_ec2.json") as f:
    # with open("../project8/jvm_overview.json") as f:
    # with open("../project8/alb_cloned.json") as f:
    # with open("../project8/k8s_services.json") as f:
    # with open("../project8/aws_ec2_cloned.json") as f:
    # with open("../project8/trace_analytics.json") as f:
    # with open("../project8/system_metrics.json") as f:
    # with open("../project8/aws_mq.json") as f:
    # with open("../project8/aws_autoscaling.json") as f:
    # with open("../project8/aws_billing.json") as f:
    # with open("../project8/aws_s3.json") as f:
    # with open("../project8/azure_api.json") as f:
    # with open("../project8/azure_overview.json") as f:
    # with open("../project8/aws_document.json") as f:
    # with open("../project8/redis.json") as f:
    # with open("../project8/aws_kinesis.json") as f:
    # with open("../project8/aws_kinesis_firehose.json") as f:
    # with open("../project8/aws_lambda.json") as f:
    # with open("../project8/aws_rds.json") as f:
    # with open("../project8/aws_sqs.json") as f:
    # with open("../project8/aws_step_functions.json") as f:
    # with open("../project8/aws_trusted_advisor.json") as f:
    # with open("../project8/azure_app_service.json") as f:
    # with open("../project8/azure_batch.json") as f:
    # with open("../project8/azure_cosmosdb.json") as f:
    # with open("../project8/azur_dbmsql.json") as f:
    # with open("../project8/azure_dbpostgres.json") as f:
    # with open("../project8/azure_event_hub.json") as f:
    # with open("../project8/azure_functions.json") as f:
    # with open("../project8/azure_iot_hub.json") as f:
    # with open("../project8/azure_loadbalancing.json") as f:
    # with open("../project8/azure_logicapp.json") as f:
    # with open("../project8/azure_overview#1.json") as f:
    # with open("../project8/azure_databases.json") as f:
    # with open("../project8/azure_usage.json") as f:
    # with open("../project8/azure_vm.json") as f:
    # with open("../project8/azure_vm_scale.json") as f:
    # with open("../project8/azure_cont.json") as f:
    # with open("../project8/azure_coredns.json") as f:
    # with open("../project8/docker_overview.json") as f:
    # with open("../project8/host_count.json") as f:
    # with open("../project8/k8s_daemonset.json") as f:
    # with open("../project8/k8s_deployment.json") as f:
    # with open("../project8/k8s_replicaset.json") as f:
    # with open("../project8/k8s_overview.json") as f:
    # with open("../project8/run_errors.json") as f:
    # with open("../project8/rum_mobile.json") as f:
    # with open("../project8/system_diskio.json") as f:
    # with open("../project8/system_networking.json") as f:
    # with open("../project8/troubleshoot.json") as f:
    # with open("../project8/load_test.json") as f:
        # content = json.load(f)
    
    args = get_arguments()
    set_logging(args.verbose)
    
    try:
        datadog_type = args.type
        datadog_json = args.json
        terraform_file = args.output
        terraform_resource_name = args.resource

        # if datadog_type in ["dashboard","monitor"]:
        logging.info(f" Converting Json file {datadog_json} of type {datadog_type} ...")
        
        # read datadog json file.
        with open(datadog_json, "r") as f:
            datadog_json_dict = json.load(f)

        # initiliaze converter instance.
        converter = Converter(datadog_type=datadog_type, json_dict=datadog_json_dict)
        # converter = Dashboard(datadog_json_dict) if datadog_type == "dashboard" else Monitor(datadog_json_dict)

        # create terraform code.
        with open(terraform_file,"w") as f:
            f.write(converter.to_Terraform_Code(terraform_resource_name))

        logging.info(f" Terraform file [ {terraform_file} ] has been created")
        logging.info(f" For reformating run => [ terraform fmt {terraform_file} ]")

    except BaseException as e:
        logging.exception("Uncaught exception: %s: %s", type(e).__name__, str(e))
def dd_resources_to_terraform(args, dd_resource_ids):
    """
    Convert Datadog resource to terraform code.

        Args:
        . args:             input Arguments 
        . dd_resource_ids:  Datadog resource ID numbers to convert.

        Output:
        . return terraform code string that represent Datadog resource.
    """

    terraform_code = ""

    if args.input == "ALL":
        """
        In case we need to convert all Datadog resources of specific type.
        """
        count = 0
        dd_resources = fetch_dd_resources(
            args.type)  # Fetch all Datadog resources.
        for dd_resource in dd_resources:
            count += 1
            # convert Datadog resource definition dict to terraform code.
            converter = Converter(datadog_type=args.type,
                                  json_dict=dd_resource)

            logging.debug(f'Datadog resource {count} => {dd_resource}')

            # Append terraform code to.
            terraform_code += converter.to_Terraform_Code(
                args.type + '_' + str(count)) + "\n\n\n"
    else:
        if args.type != "dashboard":
            dd_resources = fetch_dd_resources(args.type)

        for dd_resource_group, dd_resource_group_ids in dd_resource_ids.items(
        ):
            count = 0
            terraform_code_monitors_group = ""  # output terraform code per dd resource group.

            for dd_resource_group_id in dd_resource_group_ids:
                count += 1

                if args.type != "dashboard":
                    dd_resource_dict = fetch_dd_resource_def(
                        dd_resource_group_id, dd_resources, args.type)
                else:
                    # dd_resource_dict = dd_resource_remove_keys(api.Dashboard.get(dd_resource_group_id), args.type)
                    dd_resource_dict = fetch_dd_resource(
                        dd_resource_group_id, args.type)

                # convert Datadog resource definition dict to terraform code.
                converter = Converter(datadog_type=args.type,
                                      json_dict=dd_resource_dict)

                logging.debug(
                    f'Datadog resource {count} => {dd_resource_dict}')

                # Append terraform code.
                terraform_code_monitors_group += converter.to_Terraform_Code(
                    dd_resource_group + '_' + str(count)) + "\n\n\n"

            # Write out terraform code for this group of dd resource...
            if args.group:
                with open(dd_resource_group + ".tf", "w") as f:
                    f.write(terraform_code_monitors_group)
                logging.info(
                    f"Terraform code file for group {dd_resource_group} has been created..."
                )

            terraform_code += f"#####\n#\n# {dd_resource_group}\n#\n#####\n" + terraform_code_monitors_group

    return terraform_code
Пример #24
0
 def test_convert_success(self):
     converter = Converter(ExchangeRateApi())
     self.assertEqual(converter.success, True)
Пример #25
0
formatter = logging.Formatter("[%(asctime)s] %(name)s: %(message)s")
# add formatter to console
console.setFormatter(formatter)
# create logger
logger = logging.getLogger("converter.py")
logger.setLevel(logging.DEBUG)
# add console to logger
logger.addHandler(console)

if online:
    session = engine.bind_online()
else:
    session = engine.bind_offline()

from converter.converter import Converter
converter = Converter()

from confdb_v2.queries import ConfDbQueries
queries = ConfDbQueries()
ver = queries.getConfigurationByName(name, session, logger)
cnf = -1

import os
workdir = os.getcwd()
config_file_name = converter.createConfig(ver,
                                          cnf,
                                          session,
                                          online,
                                          workdir,
                                          use_cherrypy=False)
Пример #26
0
def work_based_programs_converter(**kwargs):
    work_based_programs_converter = Converter(data_keywords_mapper,
                                              kwarg_to_schema_key_mapper)

    return work_based_programs_converter.trigger_conversion(kwargs)