Esempio n. 1
0
        def reducer(acc, store):
            if 'street_crossing_name' in store.values:
                acc['street_crossing_name'] = _.get(acc, 'street_crossing_name', 0) + 1
            elif 'first_name' in store.values:
                acc['first_name'] = _.get(acc, 'first_name', 0) + 1
            elif 'street_address' in store.values:
                acc['street_address'] = _.get(acc, 'street_address', 0) + 1

            return acc
Esempio n. 2
0
def create_modified_json(reference_files):
    config = _get_json_data(os.path.join(BASE_DIR, 'config.json'))
    refer_link = _.get(config, 'refer_link')
    origin_json_files = _get_json_file_path(TARGET_DIR)

    if len(origin_json_files) == 0:
        _error(f"JSON not found at ({TARGET_DIR})")
    else:
        total_dictionary = {}
        for json_path in origin_json_files:
            fname = get_file_syntax(json_path)
            full_single_json = _get_json_data(json_path)
            single = _.get(full_single_json, 'files')
            total_dictionary[key_path_creator(json_path)] = full_single_json
            for obj in single:
                new_desc = _modify_yaml_to_json(obj['description'])
                _.set(obj, 'description', new_desc)
                package = obj['package']
                enums = get_enums_dict(obj['enums'])
                services = _.get(obj, 'services')
                messages = _.get(obj, 'messages')
                message_names = [a['name'] for a in messages]

                for service in services:
                     for method in service['methods']:
                         method_status = is_exists_in_same_file(message_names, method, fname)

                         svc_desc = _modify_yaml_to_json(method['description'])
                         method['description'] = svc_desc
                         google_api = _.get(method, 'options')

                         if(google_api is not None):
                            rest_apis = google_api['google.api.http']['rules']
                            method['restAPI'] = rest_apis

                         if (method['requestFullType'] in [*refer_link] and not method_status['req_status']):
                             method['requestFullType_link'] = refer_link[method['requestFullType']]

                         if (method['responseFullType'] in [*refer_link] and not method_status['res_status']):
                             method['responseFullType_link'] = refer_link[method['responseFullType']]

                for message in messages:
                    for field in message["fields"]:
                        field_long_type_wrapper(message, field, enums)
                        field['description'] = field_desc_wrapper(field['description'])
                        field_full_type_wrapper(fname, field, refer_link, package)

            full_single_json['file_info'] = fname
        return total_dictionary
Esempio n. 3
0
def fetch_release_notes_from_prs(
        github_helper: GitHubRepositoryHelper,
        pr_numbers_in_range: typing.Set[str],
        cn_current_repo: ComponentName) -> [ReleaseNote]:
    # we should consider adding a release-note label to the PRs
    # to reduce the number of search results
    prs_iter = github_helper.search_issues_in_repo('type:pull is:closed')

    release_notes = list()
    for pr_iter in prs_iter:
        pr_dict = pr_iter.as_dict()

        pr_number = str(pr_dict['number'])
        if pr_number not in pr_numbers_in_range:
            continue

        release_notes_pr = extract_release_notes(
            reference_id=pr_number,
            text=pr_dict['body'],
            user_login=_.get(pr_dict, 'user.login'),
            cn_current_repo=cn_current_repo,
            reference_type=REF_TYPE_PULL_REQUEST)
        if not release_notes_pr:
            continue

        release_notes.extend(release_notes_pr)
    return release_notes
Esempio n. 4
0
def _get_human_managed_documents_links() -> dict:
    """
        Get human managed documents URL link from config.json
           Args:

           Returns:
               dict
    """
    config = _get_json_data(os.path.join(BASE_DIR, 'config.json'))
    human_doc_links = _.get(config, 'human_doc_links')
    return human_doc_links
Esempio n. 5
0
        if directory == 'googleEs':
            find['googleEs'] = args.name
            continue

        # handle gcid condition
        if directory == 'gcid':
            find['gcid'] = "gcid:{}".format(args.gcid)
            continue

        # handle gcid condition
        if directory == 'gmb':
            find['gmb'] = args.gcid
            continue

        data = getDirectory(directory)
        cats = _.get(data, 'categories', [])

        # select the right key to look for
        key = "title"
        if directory == "apple":
            key = "name"

        # display selection
        print("")
        print(directory.upper())
        if _.get(find, directory, None) is not None:
            print("current category: {}".format(_.get(find, directory,
                                                      'None')))

        # set the initial search criteria
        selection = args.search
Esempio n. 6
0
def _generate_summary_mds(context_input, managed_link, history):
    table_of_contents = {}
    title_index = []
    config = _get_json_data(os.path.join(BASE_DIR, 'config.json'))

    for path in context_input:

        file_info = _get_file_name_syntax(path)
        full_path = path[path.find('api/') + 4:len(path)].replace(
            '.json', '.md')
        parsed_path = full_path.split('/')
        parsed_path[len(parsed_path) - 1] = parsed_path[len(parsed_path) -
                                                        1].replace('_', '-')
        full_file_name = '/'.join(parsed_path)
        parsed_path[len(parsed_path) - 1] = file_info['title']
        base_bullet = '* '
        check_key = ""

        for i in range(len(parsed_path)):
            if i != 0:
                check_key = check_key + '.' + parsed_path[i]
                base_bullet = '  ' + base_bullet
            else:
                check_key = parsed_path[i]

            title_no_under_bar = parsed_path[i].replace('_', ' ')
            updated_header = base_bullet + '[' + title_no_under_bar + ']'

            if _.get(table_of_contents, check_key, None) is None:
                if i == (len(parsed_path) - 1):
                    _.set(table_of_contents, check_key, {})
                    title_index.append({
                        'title': updated_header.title(),
                        'url': full_file_name.lower()
                    })
                else:
                    mk_path = check_key.replace('.', '/')
                    readme_url = mk_path + '/' + 'README.md'
                    readme_path = os.path.join(BASE_DIR, mk_path, 'README.md')

                    if not os.path.exists(readme_path):
                        _generate_md_file(readme_path, TEMPLATE_NAMES[2], '')

                    _.set(table_of_contents, check_key, {})
                    title_index.append({
                        'title': updated_header.title(),
                        'url': readme_url
                    })

    output_to_create = os.path.join(BASE_DIR, 'SUMMARY.md')
    normalized_link = _normalize_managed_link(managed_link)
    context_input = {
        'toc': title_index,
        'managed_link': normalized_link,
        'history': {
            'cur_version': history
        }
    }

    # Updating All SUMMARY.md
    _generate_md_file(output_to_create, TEMPLATE_NAMES[1], context_input)

    # Updating All README.md
    output_to_create = os.path.join(BASE_DIR, 'README.md')
    readme_output = _.get(config, 'intro_comment')
    version_info = history if history.startswith('v') else 'v' + history
    _.set(readme_output, 'version', version_info)
    _generate_md_file(output_to_create, TEMPLATE_NAMES[2], readme_output)
Esempio n. 7
0
def _create_modified_json() -> dict:
    """
        Generate JSON2 that has mapping data according to Jinja templates
           Args:

           Returns:
               dict
    """
    config = _get_json_data(os.path.join(BASE_DIR, 'config.json'))
    refer_link = _.get(config, 'refer_link')
    origin_json_files = _get_json_file_path(TARGET_DIR)

    if len(origin_json_files) == 0:
        _error(f"JSON not found at ({TARGET_DIR})")
    else:
        total_dictionary = {}

        for json_path in origin_json_files:
            # Get file all file paths such as full path, file name, file name in lower case, etc
            file_name_syntax = _get_file_name_syntax(json_path)
            # Get full JSON data by its file path
            full_single_json = _get_json_data(json_path)
            # Get only proto buffer data from JSON file, No scalarValueTypes
            single = full_single_json.get('files')
            # Set full data json by its path and JSON file name
            total_dictionary[_key_path_creator(json_path)] = full_single_json

            # iterate over single dictionary file data
            for obj in single:

                obj['description'] = _modify_yaml_to_json(
                    obj.get('description'))
                package = obj.get('package')
                enums = _get_enums_dict(obj.get('enums'))
                services = obj.get('services')
                messages = obj.get('messages')
                message_names = [msg['name'] for msg in messages]

                # processing services in JSON data
                for service in services:
                    for method in service.get('methods', []):
                        # find referred contents in the service to insert link in markdown
                        method_status = _is_exists_in_same_file(
                            message_names, method, file_name_syntax)
                        # categorize and set description into managed key for markdown template
                        method['description'] = _desc_wrapper(
                            method['description'])
                        # get rest API such as post, or get method
                        google_api = method.get('options')

                        # parsing all all service data into dict with appropriate key to mapping into Jinja template
                        if google_api is not None:
                            rest_apis = google_api.get('google.api.http',
                                                       {}).get('rules', [])
                            method['restAPI'] = rest_apis

                        if method.get('requestFullType') in [
                                *refer_link
                        ] and not method_status['req_status']:
                            method['requestFullType_link'] = refer_link[
                                method['requestFullType']]

                        if method.get('responseFullType') in [
                                *refer_link
                        ] and not method_status['res_status']:
                            method['responseFullType_link'] = refer_link[
                                method['responseFullType']]

                # processing messages in JSON data
                for message in messages:
                    # Check is_required column may requires in markdown
                    is_require_exits = False
                    for idx, field in enumerate(message["fields"]):
                        # set fields referred to ENUM such as google.protobuf.Struct.
                        _field_long_type_wrapper(message, field, enums)
                        # categorize and set description into managed key for markdown template
                        field['description'] = _desc_wrapper(
                            field['description'])
                        _field_full_type_wrapper(file_name_syntax, field,
                                                 refer_link, package)
                        # set key is_required and its value for required column in markdown
                        if not is_require_exits and 'is_required' in field[
                                'description']:
                            is_require_exits = True

                    if not is_require_exits:
                        message['no_requires'] = 'true'

            full_single_json['file_info'] = file_name_syntax

        return total_dictionary
Esempio n. 8
0
 def get_auto_wbnb_price():
     pancake_swap_price = requests.get(PANCAKE_PRICE_URL).json()
     auto_price_usd = Decimal(_.get(pancake_swap_price, 'prices.AUTO', 0))
     wbnb_price_usd = Decimal(_.get(pancake_swap_price, 'prices.WBNB', 0))
     return auto_price_usd, wbnb_price_usd
Esempio n. 9
0
def auto_compund(wallet_address, private_key, min_amount_to_harvest):
    w3 = Web3(Web3.HTTPProvider('https://bsc-dataseed1.binance.org:443'))
    auto_contract = w3.eth.contract(address=AUTO_CONTRACT, abi=AUTO_ABI)
    pancake_swap_contract = w3.eth.contract(address=PANCAKE_SWAP_CONTRACT,
                                            abi=PANCAKE_SWAP_ABI)

    auto_farms_data = requests.get(AUTO_FARM_INFO_URL).json()
    auto_pool_ids = _.get(auto_farms_data, 'pools', {}).keys()

    def get_auto_wbnb_price():
        pancake_swap_price = requests.get(PANCAKE_PRICE_URL).json()
        auto_price_usd = Decimal(_.get(pancake_swap_price, 'prices.AUTO', 0))
        wbnb_price_usd = Decimal(_.get(pancake_swap_price, 'prices.WBNB', 0))
        return auto_price_usd, wbnb_price_usd

    auto_price_usd, wbnb_price_usd = get_auto_wbnb_price()

    if not auto_pool_ids or not auto_price_usd or not wbnb_price_usd:
        print("Missing pool and/or price data. Exiting...")
        exit()

    def withdraw_auto_token_if_necessary(acc_auto_withdraw_gwei, pool_id):
        pool_id = int(pool_id)
        current_pending_auto_gwei = auto_contract.functions.pendingAUTO(
            pool_id, wallet_address).call()
        current_pending_auto_eth = w3.fromWei(current_pending_auto_gwei,
                                              'ether')

        if current_pending_auto_eth * auto_price_usd > min_amount_to_harvest:
            print(
                f"- Current pending auto for pool id {pool_id}: {current_pending_auto_eth} = ${current_pending_auto_eth * auto_price_usd:.2f}. Withdrawing...",
                end='')
            # withdrawing a pool with 0 only withdraw the reward = Harvest
            tx = auto_contract.functions.withdraw(pool_id, 0).buildTransaction(
                {
                    'from': wallet_address,
                    'nonce': w3.eth.getTransactionCount(wallet_address),
                    'gas': 500000
                })
            signed_tx = w3.eth.account.signTransaction(tx, private_key)
            tx_hash = w3.eth.sendRawTransaction(signed_tx.rawTransaction)
            tx_receipt = w3.eth.waitForTransactionReceipt(tx_hash)
            print(
                f"...done.\n tx hash = {tx_receipt['transactionHash'].hex()}")
            return acc_auto_withdraw_gwei + current_pending_auto_gwei
        return acc_auto_withdraw_gwei

    connected = w3.isConnected()

    if connected:
        print(
            f"Checking each pool to see if auto rewards meet the ${min_amount_to_harvest} threshold"
        )
        harvested_auto_amt_gwei = _.reduce(auto_pool_ids,
                                           withdraw_auto_token_if_necessary, 0)
        harvested_auto_amt_eth = w3.fromWei(harvested_auto_amt_gwei, 'ether')
        print(
            f"- Total harvested: Auto {harvested_auto_amt_eth} = ${harvested_auto_amt_eth * auto_price_usd:.2f}"
        )
        if harvested_auto_amt_gwei > 0:
            ## SELL HALF AUTO FOR WBNB

            half_harvested_auto_amt_gwei = int(harvested_auto_amt_gwei / 2)
            # get expceted WBNB amount from pancake
            amounts_out = pancake_swap_contract.functions.getAmountsOut(
                amountIn=half_harvested_auto_amt_gwei,
                path=[AUTO_TOKEN_CONTRACT, WRAP_BNB_TOKEN_CONTRACT],
            ).call()

            wbnb_amt_gwei = int(amounts_out[1] *
                                (1 - PANCAKE_ASSUMED_SLIPPAGE))

            print(
                f"- Swapping AUTO {half_harvested_auto_amt_gwei} for WBNB {wbnb_amt_gwei} on pancakeswap...",
                end='')
            tx = pancake_swap_contract.functions.swapExactTokensForTokens(
                amountIn=half_harvested_auto_amt_gwei,
                amountOutMin=wbnb_amt_gwei,
                path=[AUTO_TOKEN_CONTRACT, WRAP_BNB_TOKEN_CONTRACT],
                to=wallet_address,
                deadline=int(time()) + 60 * 5  # give 5 minutes deadline
            ).buildTransaction({
                'from':
                wallet_address,
                'nonce':
                w3.eth.getTransactionCount(wallet_address),
                'gas':
                500000
            })
            signed_tx = w3.eth.account.signTransaction(tx, private_key)
            tx_hash = w3.eth.sendRawTransaction(signed_tx.rawTransaction)
            tx_receipt = w3.eth.waitForTransactionReceipt(tx_hash)
            print(
                f"...done.\n tx hash = {tx_receipt['transactionHash'].hex()}")

            transfer_log = _.find(
                tx_receipt['logs'],
                lambda x: x['address'] == WRAP_BNB_TOKEN_CONTRACT)
            if not transfer_log or not transfer_log.get('data'):
                print(
                    f"Error: Could not find log for the WBNB transfer. Transaction must have failed."
                )
                exit(-1)

            ## ADD LIQUIDITY TO PANCAKE SWAP. SWAP AUTO & WBNB FOR WBNB-AUTO LP TOKEN
            received_wbnb_amt_gwei = int(transfer_log['data'], 16)

            wbnb_amt_gwei = int(received_wbnb_amt_gwei *
                                (1 - PANCAKE_ASSUMED_SLIPPAGE))
            print(
                f"- Add lidquidity AUTO-WBNB: AUTO {half_harvested_auto_amt_gwei} for WBNB {wbnb_amt_gwei} on pancakeswap...",
                end='')
            tx = pancake_swap_contract.functions.addLiquidity(
                tokenA=AUTO_TOKEN_CONTRACT,
                tokenB=WRAP_BNB_TOKEN_CONTRACT,
                amountADesired=half_harvested_auto_amt_gwei,
                amountBDesired=wbnb_amt_gwei,
                amountAMin=0,
                amountBMin=0,
                to=wallet_address,
                deadline=int(time()) + 60 * 5  # give 5 minutes deadline
            ).buildTransaction({
                'from':
                wallet_address,
                'nonce':
                w3.eth.getTransactionCount(wallet_address),
                'gas':
                500000
            })
            signed_tx = w3.eth.account.signTransaction(tx, private_key)
            tx_hash = w3.eth.sendRawTransaction(signed_tx.rawTransaction)
            tx_receipt = w3.eth.waitForTransactionReceipt(tx_hash)
            print(
                f"...done.\n tx hash = {tx_receipt['transactionHash'].hex()}")

            def find_transfer_lp_logs(log):
                # Search for the log transfering money to your wallet
                # Transfer log have 3 topics: function hash, from, to.
                # Looking for `from = 0x0` & `to = wallet_address`
                topics = log['topics']
                if len(topics) == 3 and topics[1].hex() == ROOT_ADDR:
                    # extract the leading 0 from the address in the log
                    addr = '0x' + topics[2].hex()[ADDR_SIZE -
                                                  len(wallet_address) + 2:]
                    return addr == wallet_address.lower()

            transfer_lp_log = _.find(tx_receipt['logs'], find_transfer_lp_logs)
            if not transfer_lp_log or not transfer_lp_log.get('data'):
                print(
                    f"Error: Could not find log for the LP transfer. Transaction must have failed."
                )
                exit(-1)

            liquidity_created = int(transfer_lp_log['data'], 16)

            print(
                f"- Add {w3.fromWei(liquidity_created, 'ether')} token back into AUTO-WBNB LP vault...",
                end='')
            tx = auto_contract.functions.deposit(
                AUTO_WBNB_POOL_ID, liquidity_created).buildTransaction({
                    'from':
                    wallet_address,
                    'nonce':
                    w3.eth.getTransactionCount(wallet_address),
                    'gas':
                    500000
                })
            signed_tx = w3.eth.account.signTransaction(tx, private_key)
            tx_hash = w3.eth.sendRawTransaction(signed_tx.rawTransaction)
            tx_receipt = w3.eth.waitForTransactionReceipt(tx_hash)
            print(f"...end.\n tx hash = {tx_receipt['transactionHash'].hex()}")
    else:
        print("Connection Error!")
Esempio n. 10
0
role_config_group_resource_api = cm_client.RoleConfigGroupsResourceApi(
    api_client)

config_by_roles = role_config_group_resource_api.read_role_config_groups(
    cluster_name=cluster, service_name=service)

config_by_group = {}
role_config_group_name_displayname_mapping = {}
for item in config_by_roles.items:
    role_group_name = item.name
    _.set(role_config_group_name_displayname_mapping, "" + item.display_name,
          role_group_name)
    for subitem in item.config.items:
        _.set(
            config_by_group, "" + subitem.name,
            _.push(_.get(config_by_group, "" + subitem.name, []),
                   role_group_name))

updating_counter = 0
updated_counter = 0
for config in args.config:
    picked_role_config_group = None
    configuration = _.replace(config[0], '.', '_')
    value = config[1]
    specified_role_config_group = _.get(config, '[2]', None)
    specified_role_config_group_display_name = _.get(config, '[2]', None)
    if (specified_role_config_group_display_name
            and _.get(role_config_group_name_displayname_mapping,
                      "" + specified_role_config_group_display_name, None)):
        specified_role_config_group = role_config_group_name_displayname_mapping[
            specified_role_config_group_display_name]
Esempio n. 11
0
query = "*"
if args.since:
    query = "date%%3A>now-%s" % args.since
if args.query:
    query = "%s AND (%s)" % (query, args.query)

if args.action == "showbrands":
    r = session.get("https://pro.urlscan.com/api/v1/pro/kits")
    if not r.status_code == requests.codes.ok:
        logging.error("Error fetching brand definitions: %s" % r.json())
        sys.exit(1)
    for kit in r.json()["kits"]:
        print("="*80)
        print("%s - %s (%s)\nKey: %s\nWhitelisted domains: %s" % (
            kit["name"],
            _.head(_.get(kit, "vertical", [])),
            _.head(_.get(kit, "country", [])),
            kit["key"],
            _.get(kit, "terms.domains", [])
        ))
        print("URL: https://pro.urlscan.com/search?filter=%%24phishing_%s" % kit["key"])
        print("API: https://pro.urlscan.com/api/v1/pro/search?filter=%%24phishing_%s" % kit["key"])
elif args.action == "showlatest":
    r = session.get("https://pro.urlscan.com/api/v1/pro/search?q=%s&filter=$phishing_%s&size=%d" % (query, args.brand, args.limit))
    print("\nSearching for brand '%s' with query '%s' and limit '%d'" % (args.brand, query, args.limit))
    print("Show in Pro: https://pro.urlscan.com/search?query=%s&filter=$phishing_%s" % (query, args.brand))
    if not r.status_code == requests.codes.ok:
        logging.error("Error fetching brand definitions: %s" % r.json())
        sys.exit(1)
    print("%d/%d results returned\n\n" % (len(r.json()["results"]), r.json()["total"]))
    for res in r.json()["results"]:
Esempio n. 12
0
def get_or_call(obj, path):
    value = _.get(obj, path)
    if callable(value):
        return value()
    return value
Esempio n. 13
0
def _get_human_doc_links():
    config = _get_json_data(os.path.join(BASE_DIR, 'config.json'))
    human_doc_links = _.get(config, 'human_doc_links')
    return human_doc_links
Esempio n. 14
0
def _generate_summary_mds(context_input, managed_link, history):
    table_of_contents = {}
    title_index = []
    config = _get_json_data(os.path.join(BASE_DIR, 'config.json'))
    for path in context_input:

        file_info = get_file_syntax(path)
        full_path = path[path.find('api/') + 4:len(path)].replace('.json', '.md')
        parsed_path = full_path.split('/')
        parsed_path[len(parsed_path) - 1] = parsed_path[len(parsed_path) - 1].replace('_', '-')
        full_file_name = '/'.join(parsed_path)
        parsed_path[len(parsed_path)-1] = file_info['title']
        base_bullet = '* '
        check_key = ""

        for i in range(len(parsed_path)):
            if(i != 0 ):
                check_key = check_key+'.'+parsed_path[i]
                base_bullet = '  ' + base_bullet
            else:
                check_key = parsed_path[i]

            updated_header = base_bullet + '[' + parsed_path[i] + ']'
            if(_.get(table_of_contents, check_key, None) == None ):
                if(i == (len(parsed_path)-1)):
                    _.set(table_of_contents, check_key, {})
                    title_index.append({'title': updated_header.title(), 'url': full_file_name})
                else:
                    mk_path = check_key.replace('.', '/')
                    readme_url = mk_path + '/'+ 'README.md'
                    readme_path = os.path.join(BASE_DIR, mk_path, 'README.md')

                    if not os.path.exists(readme_path):
                        _generate_md_file(readme_path, TEMPLATE_NAMES[2], '')

                    _.set(table_of_contents, check_key, {})
                    title_index.append({'title': updated_header.title(), 'url': readme_url})

    if ('version_record' in history and len(history['version_record']) > 0):
        vtable_of_contents = []
        gitbook_space = _.get(config, 'refer_link.git_book_space')
        previous_version_md = os.path.join(BASE_DIR, 'previous_version', 'README.md')

        for version in history['version_record']:
            line = f'* [{version}]({gitbook_space}previous-versions/previous_version/{version}/)'
            vtable_of_contents.append(line)

        _generate_md_file(previous_version_md, TEMPLATE_NAMES[3], {'list': vtable_of_contents})

    output_to_create = os.path.join(BASE_DIR, 'SUMMARY.md')
    context_input = {'toc': title_index}
    context_input['managed_link'] = _normalize_managed_link(managed_link)
    context_input['history'] = history

    # Updating All SUMMARY.md
    _generate_md_file(output_to_create, TEMPLATE_NAMES[1], context_input)

    # Updating All README.md
    output_to_create = os.path.join(BASE_DIR, 'README.md')
    README_OUTPUT = _.get(config, 'intro_comment')
    _.set(README_OUTPUT, 'version', history['cur_version'])
    _generate_md_file(output_to_create, TEMPLATE_NAMES[2], README_OUTPUT)
Esempio n. 15
0
cm_client.configuration.username = cm_client_username
cm_client.configuration.password = cm_client_password

url = f'{api_protocol}://{api_host}:{port}/api/{api_version}'

api_client = cm_client.ApiClient(url)

role_config_group_resource_api = cm_client.RoleConfigGroupsResourceApi(api_client)

config_by_roles = role_config_group_resource_api.read_role_config_groups(cluster_name=cluster, service_name=service)

config_by_group = {}
for item in config_by_roles.items:
    role_group_name = item.name
    for subitem in item.config.items:
        _.set(config_by_group, f'{subitem.name}', _.push(_.get(config_by_group, f'{subitem.name}', []), role_group_name))

for config in args.config:
    picked_role_config_group = None
    configuration = _.replace(config[0], '.', '_')
    value = config[1]
    specified_role_config_group = _.get(config, '[2]', None)

    if configuration in config_by_group:
        structures_for_config = config_by_group[configuration]

        picked_role_config_group = specified_role_config_group or _.find(structures_for_config, lambda x: _.ends_with(x, '-BASE'))

        if _.index_of(structures_for_config, picked_role_config_group) != -1:
            # updating configuration
            print(f"Updating: '{configuration}' for '{picked_role_config_group}' config group")