Пример #1
0
def main():
    log = logger.Logger().get_logger()
    log.info("Starting application...")

    arguments_parser = argumentsparser.ArgumentsParser(log)
    arguments = arguments_parser.parse(setup_arguments())

    client = apiclient.ApiClient(log, arguments)
    notifier_object = notifier.Notifier(log, arguments)
    condition_checker = conditionchecker.ConditionChecker(
        log, client, notifier_object)

    while True:
        for queue in arguments["server_queues"]:
            arguments["server_queue"] = queue
            queue_conditions = arguments["conditions"][queue]

            if queue_conditions["conditions_ready_queue_size"] is not None \
                    or queue_conditions["conditions_unack_queue_size"] is not None \
                    or queue_conditions["conditions_total_queue_size"] is not None \
                    or queue_conditions["conditions_queue_consumers_connected"] is not None:
                condition_checker.check_queue_conditions(arguments)

        generic_conditions = arguments["generic_conditions"]
        if generic_conditions["conditions_nodes_running"] is not None \
                or generic_conditions["conditions_node_memory_used"] is not None:
            condition_checker.check_node_conditions(arguments)
        if generic_conditions["conditions_open_connections"] is not None:
            condition_checker.check_connection_conditions(arguments)
        if generic_conditions["conditions_consumers_connected"] is not None:
            condition_checker.check_consumer_conditions(arguments)

        time.sleep(arguments["server_check_rate"])
Пример #2
0
    def parse(self, parser):
        arguments = vars(parser.parse_args())

        model = argument.Argument(self.log, arguments)

        # parse the standard arguments (created with argparse)
        for group in parser._action_groups:
            group_title = group.title
            for group_argument in group._group_actions:
                arguments[group_argument.dest] = model.get_value(
                    group_title, group_argument)

        if arguments["server_queues_discovery"] == True:
            arguments["server_queues"] = apiclient.ApiClient(
                self.log, arguments).get_queues()
        else:
            arguments["server_queues"] = arguments["server_queues"].split(",")

        if arguments["email_to"] is not None:
            arguments["email_to"] = arguments["email_to"].split(",")

        # parse non standard arguments on files
        arguments["queue_conditions"] = dict()
        for queue in arguments["server_queues"]:
            group_title = "Conditions:" + queue
            if not model.files_have_group(group_title):
                continue

            arguments["queue_conditions"][queue] = dict()
            for condition in QUEUE_CONDITIONS:
                group_argument = model.create_argument_object(
                    condition, int, None)
                arguments["queue_conditions"][queue][
                    condition] = model.get_value(group_title, group_argument)

        self.validate(arguments)

        conditions = self.format_conditions(arguments)
        arguments = dict(arguments.items() + conditions.items())

        return arguments
Пример #3
0
def main():
    # set the global options
    set_options()

    # get the cluster connection info
    get_options()

    # disable insecure connection warnings
    # please be advised and aware of the implications in a production environment!
    urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)

    # make sure all required info has been provided
    if not cluster_ip:
        raise Exception("Cluster IP is required.")
    elif not username:
        raise Exception("Username is required.")
    elif not password:
        raise Exception("Password is required.")
    else:
        # get a list of all blueprints in the specified directory
        dir_path = directory.rstrip("/")
        blueprint_list = glob.glob(dir_path + '/*.json')

        if (len(blueprint_list) > 0):

            # first check if the user has specified a project for the imported blueprints
            # if they did, we need to make sure the project exists
            if project != 'none':
                project_found = False
                client = apiclient.ApiClient('post', cluster_ip,
                                             "projects/list",
                                             '{ "kind": "project" }', username,
                                             password)
                results = client.get_info()
                for current_project in results["entities"]:
                    if current_project["status"]["name"] == project:
                        project_found = True
                        project_uuid = current_project["metadata"]["uuid"]

            # was the project found?
            if project_found:
                print('\nProject' + project + ' exists.')
            else:
                # project wasn't found
                # exit at this point as we don't want to assume all blueprints should then hit the 'default' project
                print('\nProject ' + project +
                      ' was not found.  Please check the name and retry.')
                sys.exit()

            # make sure the user knows what's happening ... ;-)
            print(
                len(blueprint_list) +
                ' JSON files found. Starting import ...\n')

            # go through the blueprint JSON files found in the specified directory
            for blueprint in blueprint_list:
                start_time = localtime()
                # open the JSON file from disk
                with open(blueprint, "r") as f:
                    raw_json = f.read()

                    # if no project was specified on the commane line, we've already pre-set the project variable to 'none'
                    # if a project was specified, we need to add it into the JSON data
                    if project != 'none':
                        parsed = json.loads(raw_json)
                        parsed["metadata"]["project_reference"] = {}
                        parsed["metadata"]["project_reference"][
                            "kind"] = "project"
                        parsed["metadata"]["project_reference"][
                            "uuid"] = project_uuid

                        # ADD VARIABLES if the current blueprint being analized is the EraServerDeployment.json
                        if "EraServerDeployment" in parsed["metadata"]["name"]:
                            variable_list_of_dictionaries = parsed["spec"][
                                "resources"]["service_definition_list"][0][
                                    "variable_list"]
                            for var_dict in variable_list_of_dictionaries:
                                variable_name = (var_dict["name"].lower())
                                # if the variable was defined as and arg, add to the json file, skip otherwise
                                if dict_variables[variable_name] != 'none':
                                    var_dict["value"] = dict_variables[
                                        variable_name]

                        # ensure the changes just made are saved into the json
                        raw_json = json.dumps(parsed)

                    # remove the "status" key from the JSON data this is included on export but is invalid on import
                    pre_process = json.loads(raw_json)
                    if "status" in pre_process:
                        pre_process.pop("status")
                    if "product_version" in pre_process:
                        pre_process.pop("product_version")

                    # after removing the non-required keys, make sure the data is back in the correct format
                    raw_json = json.dumps(pre_process)

                    # try and get the blueprint name
                    # if this fails, it's either a corrupt/damaged/edited blueprint JSON file or not a blueprint file at all
                    try:
                        blueprint_name = json.loads(raw_json)['spec']['name']
                    except json.decoder.JSONDecodeError:
                        print(
                            blueprint +
                            ' : Unprocessable JSON file found. Is this definitely a Nutanix Calm blueprint file?'
                        )
                        sys.exit()

                    # got the blueprint name - this is probably a valid blueprint file
                    # we can now continue and try the upload
                    client = apiclient.ApiClient('post', cluster_ip,
                                                 "blueprints/import_json",
                                                 raw_json, username, password)
                    try:
                        json_result = client.get_info()
                    except json.decoder.JSONDecodeError:
                        print(blueprint +
                              ': No processable JSON response available.')
                        sys.exit()

                # calculate how long the import took
                end_time = localtime()
                difference = mktime(end_time) - mktime(start_time)

                try:
                    message = blueprint + ' : ' + json_result['message_list'][
                        0]['message'] + '.'
                except KeyError:
                    message = blueprint + ' : Successfully imported in ' + difference + ' seconds.'

                # tell the user what happened, including any failures
                print(message)

        else:
            print('\nNo JSON files found in' + directory +
                  ' ... nothing to import!')

        # w00t
        print("\nFinished!\n")
Пример #4
0
# <http://www.gnu.org/licenses/>.
"""The Betdaq API methods."""

import apimethod
import apiclient

# time in seconds to sleep between calling APIGetPrices (when called
# with > 50 market ids).
_PRICETHROTTLE = 10

# create suds clients.  There is only 1 WSDL file, but this has two
# 'services'.  The services are for 'readonly' methods and 'secure'
# methods. Secure methods use an https:// url and require the user's
# Betdaq username and password in the SOAP headers, read-only methods
# use http:// and only require username.
_rcl = apiclient.ApiClient('readonly')
_scl = apiclient.ApiClient('secure')


def set_user(name, password):
    """
    Set username and password for SOAP headers.  Note that these are
    automatically set to be const.BDAQUSER and const.BDAQPASS,
    respectively, so we only need to call this method if we don't have
    these values set.
    """

    _rcl.set_headers(name, password)
    _scl.set_headers(name, password)

Пример #5
0
CARD_CACHE_DIR = BASE + "data/cards/"
EMBLEM_CACHE_DIR = BASE + "data/emblems/"
BANNER_CACHE_DIR = BASE + "data/banners/"
INFO_CACHE_DIR = BASE + "data/info/"
SNAPSHOT_DIR = BASE + "data/snap/"
RESOURCES_DIR = BASE + "data/resources/"

THROTTLE = 2
RES_POLL = 600

LOG_FILE = BASE + "log/info.log"

DEF_MAX_AGE = 300

g_client = apiclient.ApiClient(account.user_id, account.viewer_id,
                               account.udid)
g_lock = threading.Lock()
g_last_fetch = 0
g_last_check = 0
g_resmgr = resource_mgr.ResourceManager(g_client.res_ver, RESOURCES_DIR,
                                        app.logger)


class RequestFormatter(logging.Formatter):
    def format(self, record):
        s = logging.Formatter.format(self, record)
        try:
            return '[%s] [%d] [%s] [%s %s] ' % (
                self.formatTime(record), account.index, request.remote_addr,
                request.method, request.path) + s
        except:
Пример #6
0
def main():
        
        # set the global options
        set_options()

        # get the cluster connection info
        get_options()

        # disable insecure connection warnings
        # please be advised and aware of the implications in a production environment!
        urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)

        # make sure all required info has been provided
        if not cluster_ip:
            raise Exception("Cluster IP is required.")
        elif not username:
            raise Exception("Username is required.")
        elif not password:
            raise Exception("Password is required.")
        else:
            
            # get a list of all blueprints in the specified directory
            blueprint_list = glob.glob(f'{directory.rstrip("/")}/*.json')

            if(len(blueprint_list) > 0):

                # first check if the user has specified a project for the imported blueprints
                # if they did, we need to make sure the project exists
                if project != 'none':
                    project_found = False
                    client = apiclient.ApiClient('post', cluster_ip, "projects/list", '{ "kind": "project" }', username, password )
                    results = client.get_info()
                    for current_project in results["entities"]:
                        if current_project["status"]["name"] == project:
                            project_found = True
                            project_uuid = current_project["metadata"]["uuid"]

                # was the project found?
                if project_found:
                    print(f"\nProject {project} exists.")
                else:
                    # project wasn't found
                    # exit at this point as we don't want to assume all blueprints should then hit the 'default' project
                    print(f"\nProject {project} was not found.  Please check the name and retry.")
                    sys.exit()

                # make sure the user knows what's happening ... ;-)
                print(f"\n{len(blueprint_list)} JSON files found. Starting import ...\n")

                # go through the blueprint JSON files found in the specified directory
                for blueprint in blueprint_list:
                    start_time = localtime()
                    # open the JSON file from disk
                    with open(f"{blueprint}", "r") as f:
                        raw_json = f.read()

                        # if no project was specified on the commane line, we've already pre-set the project variable to 'none'
                        # if a project was specified, we need to add it into the JSON data
                        if project != 'none':
                            parsed = json.loads(raw_json)
                            parsed["metadata"]["project_reference"] = {}
                            parsed["metadata"]["project_reference"]["kind"] = "project"
                            parsed["metadata"]["project_reference"]["uuid"] = project_uuid
                            raw_json = json.dumps(parsed)

                        # remove the "status" key from the JSOn data
                        # this is included on export but is invalid on import
                        pre_process = json.loads(raw_json)
                        if "status" in pre_process:
                            pre_process.pop("status")

                        # after removing the non-required keys, make sure the data is back in the correct format
                        raw_json = json.dumps(pre_process)
                        
                        # try and get the blueprint name
                        # if this fails, it's either a corrupt/damaged/edited blueprint JSON file or not a blueprint file at all
                        try:
                            blueprint_name = json.loads(raw_json)['spec']['name']
                        except json.decoder.JSONDecodeError:
                            print(f"{blueprint}: Unprocessable JSON file found. Is this definitely a Nutanix Calm blueprint file?")
                            sys.exit()
                        # got the blueprint name - this is probably a valid blueprint file
                        # we can now continue and try the upload
                        client = apiclient.ApiClient(
                            'post',
                            cluster_ip,
                            "blueprints/import_json",
                            raw_json,
                            username,
                            password
                        )
                        try:
                            json_result = client.get_info()
                        except json.decoder.JSONDecodeError:
                            print(f'{blueprint}: No processable JSON response available.')
                            sys.exit()
                        
                    # calculate how long the import took
                    end_time = localtime()
                    difference = mktime(end_time) - mktime(start_time)

                    try:
                        message = f"{blueprint}: {json_result['message_list'][0]['message']}."
                    except KeyError:
                        message = f"{blueprint}: Successfully imported in {difference} seconds."

                    # tell the user what happened, including any failures
                    print(f"{message}")

            else:
                print(f"\nNo JSON files found in {directory} ... nothing to import!")
                
            # w00t
            print("\nFinished!\n")
Пример #7
0
def main():

    # set the global options
    set_options()

    # get the cluster connection info
    get_options()
    """
    disable insecure connection warnings
    please be advised and aware of the implications
    in a production environment!
    """
    urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)

    # make sure all required info has been provided
    if not cluster_ip:
        raise Exception("Cluster IP is required.")
    elif not username:
        raise Exception("Username is required.")
    elif not password:
        raise Exception("Password is required.")
    else:
        """
        do a preliminary check to see if this is AOS or CE
        not used in this script but is could be useful for
        later modifications
        """
        client = apiclient.ApiClient(
            "post",
            cluster_ip,
            "clusters/list",
            '{ "kind": "cluster" }',
            username,
            password,
        )
        results = client.get_info()
        is_ce = False
        for cluster in results["entities"]:
            if ("-ce-" in cluster["status"]["resources"]["config"]["build"]
                ["full_version"]):
                is_ce = True

        endpoints = {}
        endpoints["blueprints"] = [
            "blueprint", (f'"length":{ENTITY_RESPONSE_LENGTH}')
        ]

        # get all blueprints
        for endpoint in endpoints:
            if endpoints[endpoint][1] != "":
                client = apiclient.ApiClient(
                    "post",
                    cluster_ip,
                    (f"{endpoints[endpoint][0]}s/list"),
                    (f'{{ "kind": "{endpoints[endpoint][0]}", {endpoints[endpoint][1]} }}'
                     ),
                    username,
                    password,
                )
            else:
                client = apiclient.ApiClient(
                    "post",
                    cluster_ip,
                    (f"{endpoints[endpoint][0]}s/list"),
                    (f'{{ "kind": "{endpoints[endpoint][0]}" }}'),
                    username,
                    password,
                )
            results = client.get_info()

        # make sure the user knows what's happening ... ;-)
        print(
            f"\n{len(results['entities'])} blueprints collected from {cluster_ip}\n"
        )
        '''
        go through all the blueprints and export them to appropriately named files
        filename will match the blueprint name and should work find if blueprint name contains spaces (tested on Ubuntu Linux)
        '''
        for blueprint in results["entities"]:
            day = strftime("%d-%b-%Y", localtime())
            time = strftime("%H%M%S", localtime())
            blueprint_filename = f"{day}_{time}_{blueprint['status']['name']}.json"
            client = apiclient.ApiClient(
                "get",
                cluster_ip,
                f"blueprints/{blueprint['status']['uuid']}/export_file",
                '{ "kind": "cluster" }',
                username,
                password,
            )
            exported_json = client.get_info()
            with open(f"./{blueprint_filename}", "w") as f:
                json.dump(exported_json, f)
                print(
                    f"Successfully exported blueprint '{blueprint['status']['name']}'"
                )
        print("\nFinished!\n")