Exemplo n.º 1
0
def sync_assets(assets_destination=config['assets_destination'],
                asset_files_dir=config['asset_files_dir']):
    # TODO: right now this is just a little nginx server where these files can be collected by clients.
    if not os.path.isdir(assets_destination):
        log.info("Directory %s does not exist, attempting to create it..." %
                 assets_destination)
        try:
            os.mkdir(assets_destination)
        except os.PermissionError:
            log.error(
                "Permission error while attempting to create destination directory %s"
                % assets_destination)
            return False
    cmd = [
        "rsync", "-r", "--include", "*.tar.gz", asset_files_dir + "/",
        assets_destination
    ]
    log.debug(cmd)
    try:
        subprocess.check_call(cmd)
    except (FileNotFoundError, PermissionError,
            subprocess.CalledProcessError) as e:
        log.error("rsync failed!")
        log.error(e)
        return False
    log.info("Successfully synched assets to destination folder.")
    return True
Exemplo n.º 2
0
def import_defs():

	check_defs=True
	asset_defs=True
	filter_defs=True
	handler_defs=True
	errors=True
	assets=True

	if not sensu_connect.test_connection():
		exit(1)

	sensu_connect.api_auth()

	# sync asset definitions

	## if definitions already exist in the folder, these can just be caught with the main case
	## otherwise if we're building, we need to make sure that we build first and that this step
	## succeeds before moving on.

	if asset_list:
		for file in asset_list:
			asset_def_succeeded=sensu_connect.sync_definition('asset', file)
			if not asset_def_succeeded:
				asset_defs=False
				break
			if asset_defs:
				log.info("Asset definitions successfully imported.")
		assets=sync_assets()

	# sync definitions
	# config['api_items']['asset']['endpoint']['dir']
	for item_name, item_obj in config['api_items'].items():
		try:
			for file in os.listdir(item_obj['dir']):
				import_succeeded=sensu_connect.sync_definition(item_name, file)
				if not import_succeeded:
					log.error("Failed importing "+item_name+" "+file)
					errors = True
					break
			if import_succeeded:
				log.info("All "+item_name+" definitions succesfully imported.")
		except KeyError:
			log.debug("No directory key for api item "+item_name)
			pass

	if (assets and asset_defs and check_defs):
		log.info("All file and API operations completed successfully!")
		exit(0)
	else:
		log.info("Some sync operations did not complete, failing the job.")
		if not assets:
			log.error("Asset rsync failed.")
		if not asset_defs:
			log.error("API sync for asset definitions failed.")
		if not check_defs:
			log.error("API sync for check definitions failed.")
		exit(1)
Exemplo n.º 3
0
def import_defs():

	asset_list = build_assets()

	check_defs=True
	asset_defs=True
	filter_defs=True
	handler_defs=True
	errors=True
	assets=True

	if not sensu_connect.test_connection():
		exit(1)

	sensu_connect.api_auth()

	# sync asset definitions

	if asset_list:
		for file in asset_list:
			asset_def_succeeded=sensu_connect.sync_definition('asset', file)
			if not asset_def_succeeded:
				asset_defs=False
				break
			if asset_defs:
				log.info("Asset definitions successfully imported.")
		assets=sync_assets()

	# sync definitions
	# config['api_items']['asset']['endpoint']['dir']
	for item_name, item_obj in config['api_items'].items():
		try:
			for file in os.listdir(item_obj['dir']):
				import_succeeded=sensu_connect.sync_definition(item_name, file)
				if not import_succeeded:
					log.error("Failed importing "+item_name+" "+file)
					errors = True
					break
			if import_succeeded:
				log.info("All "+item_name+" definitions succesfully imported.")
		except KeyError:
			pass

	if (assets and asset_defs and check_defs):
		log.info("All file and API operations completed successfully!")
		exit(0)
	else:
		log.info("Some sync operations did not complete, failing the job.")
		if not assets:
			log.error("Asset rsync failed.")
		if not asset_defs:
			log.error("API sync for asset definitions failed.")
		if not check_defs:
			log.error("API sync for check definitions failed.")
		exit(1)
Exemplo n.º 4
0
def main():
    validation_message = {
        'checks': validate_names(config['check_defs_dir'], "check"),
        'assets': validate_names(config['asset_defs_dir'], "asset"),
        'filters': validate_names(config['filter_defs_dir'], "filter"),
        'handlers': validate_names(config['handler_defs_dir'], "handler")
    }

    # can use config['api_items'] for name and messaging, as in import_definitions.py
    # for item_name, item_obj in config['api_items']:

    for k, v in validation_message.items():
        if not v:
            log.info("Failed validation at %s stage.", k)
            exit(1)
    # if not (checks_result and assets_result and filters_result and handlers_result):
    # 	log.info("Validation failed!")

    # 	exit(1)
    log.info("All tests passed!")
    exit(0)
Exemplo n.º 5
0
def validate_names(dir_to_validate, object_type):
    obj_names = []
    for file in os.listdir(dir_to_validate):
        log.debug(file)
        try:
            contents = json.load(open(os.path.join(dir_to_validate, file)))
        except (json.decoder.JSONDecodeError, ValueError):
            log.info("Directory contains a file with invalid JSON: " +
                     os.path.join(dir_to_validate, file))
            return False
        filename = os.path.splitext(file)[0]
        try:
            objname = contents["metadata"]["name"]
        except KeyError:
            log.error(
                "A name for the check must be specified inside the 'metadata' block of the configuration file."
            )
            return False
        if filename != objname:
            log.error(
                "The filename of the definition json is required to match the 'name' attribute within the definition."
            )
            return False
        obj_names.append(objname)
        if obj_names.count(objname) > 1:
            log.error(
                "There is more than one check with the same name.  Failing check: "
                + objname)
            return False
        if object_type == "check":
            try:
                if (contents["ttl"] <= contents["interval"]):
                    log.error(
                        "The ttl must be greater than the check interval")
                    return False
            except (KeyError, ValueError):
                pass

    log.info("All " + object_type + " tests passed successfully.")
    return True
Exemplo n.º 6
0
 def test_connection(self):
     try:
         health = requests.get(config['api_healthcheck_endpoint'],
                               timeout=10)
     except requests.exceptions.ConnectionError:
         log.error('Could not reach API health check')
         return False
     try:
         health.raise_for_status()
     except requests.exceptions.ConnectionError:
         log.error(
             'Could not reach API health check - network or DNS error')
         return False
     except requests.exceptions.HTTPError:
         log.error('Could not reach API health check - HTTP error %s' %
                   str(health.status_code))
         log.debug('Response headers: %s' %
                   json.dumps(dict(health.headers)))
         return False
     log.info('API health check completed successfully.')
     log.debug('Status code: %s' % health.status_code)
     return True
Exemplo n.º 7
0
def build_assets():

    asset_list = []

    output = os.listdir(config['check_scripts_dir'])
    asset_list = os.listdir(config['asset_defs_dir'])

    try:
        output.remove('.DS_Store')
    except ValueError:
        pass

    if not os.path.exists(config['asset_files_dir']):
        os.mkdir(config['asset_files_dir'])
        log.debug(output)
    for item in output:
        asset_hash = ''
        basename = os.path.basename(item)
        basepath = os.path.join(config['check_scripts_dir'], basename)
        archive_name = basename + ".tar"
        archive_path = os.path.join(config['asset_files_dir'], archive_name)
        archive_file = open(archive_path, 'w')
        tarproc = subprocess.Popen([
            "tar", "--mtime", "'1970-01-01 00:00:00'", "--owner", "root",
            "-cv", "-C", basepath, 'bin'
        ],
                                   stdout=archive_file)
        tarproc.wait()
        archive_file.close()
        log.debug(basename)
        log.debug("This should be empty:")
        log.debug(asset_hash)
        asset_hash = hashlib.sha512(open(archive_path,
                                         'rb').read()).hexdigest()
        log.debug("This should have a correct hash:")
        log.debug(asset_hash)
        log.debug("archive_path = {}".format(archive_path))
        asset_definition_file = os.path.join(config['asset_defs_dir'],
                                             basename + ".json")
        log.info(asset_definition_file)
        if not os.path.exists(asset_definition_file):
            handler = open(asset_definition_file, "w+")
            asset_obj = {
                "url": parse.urljoin(config['api_base_url'], archive_name),
                "sha512": asset_hash,
                "metadata": {
                    "name": basename,
                    "Content-Type": "application/zip",
                    "namespace": "default"
                }
            }
            json.dump(asset_obj, handler, indent=4)
            handler.close()
        else:
            asset_file_handler = open(asset_definition_file, "r+")
            asset_obj = json.load(asset_file_handler)
            asset_file_handler.close()
            asset_obj["sha512"] = asset_hash
            log.debug(asset_obj)
            new_handler = open(asset_definition_file, "w")
            json.dump(asset_obj, new_handler, indent=4)
            new_handler.close()

    return asset_list