Ejemplo n.º 1
0
def sync_assets(assets_destination=config['assets_destination'],
                asset_files_dir=config['asset_files_dir']):
    # TODO: right now this is just a little nginx server where these files can be collected by clients.
    if not os.path.isdir(assets_destination):
        log.info("Directory %s does not exist, attempting to create it..." %
                 assets_destination)
        try:
            os.mkdir(assets_destination)
        except os.PermissionError:
            log.error(
                "Permission error while attempting to create destination directory %s"
                % assets_destination)
            return False
    cmd = [
        "rsync", "-r", "--include", "*.tar.gz", asset_files_dir + "/",
        assets_destination
    ]
    log.debug(cmd)
    try:
        subprocess.check_call(cmd)
    except (FileNotFoundError, PermissionError,
            subprocess.CalledProcessError) as e:
        log.error("rsync failed!")
        log.error(e)
        return False
    log.info("Successfully synched assets to destination folder.")
    return True
Ejemplo n.º 2
0
def import_defs():

	check_defs=True
	asset_defs=True
	filter_defs=True
	handler_defs=True
	errors=True
	assets=True

	if not sensu_connect.test_connection():
		exit(1)

	sensu_connect.api_auth()

	# sync asset definitions

	## if definitions already exist in the folder, these can just be caught with the main case
	## otherwise if we're building, we need to make sure that we build first and that this step
	## succeeds before moving on.

	if asset_list:
		for file in asset_list:
			asset_def_succeeded=sensu_connect.sync_definition('asset', file)
			if not asset_def_succeeded:
				asset_defs=False
				break
			if asset_defs:
				log.info("Asset definitions successfully imported.")
		assets=sync_assets()

	# sync definitions
	# config['api_items']['asset']['endpoint']['dir']
	for item_name, item_obj in config['api_items'].items():
		try:
			for file in os.listdir(item_obj['dir']):
				import_succeeded=sensu_connect.sync_definition(item_name, file)
				if not import_succeeded:
					log.error("Failed importing "+item_name+" "+file)
					errors = True
					break
			if import_succeeded:
				log.info("All "+item_name+" definitions succesfully imported.")
		except KeyError:
			log.debug("No directory key for api item "+item_name)
			pass

	if (assets and asset_defs and check_defs):
		log.info("All file and API operations completed successfully!")
		exit(0)
	else:
		log.info("Some sync operations did not complete, failing the job.")
		if not assets:
			log.error("Asset rsync failed.")
		if not asset_defs:
			log.error("API sync for asset definitions failed.")
		if not check_defs:
			log.error("API sync for check definitions failed.")
		exit(1)
Ejemplo n.º 3
0
def validate_names(dir_to_validate, object_type):
    obj_names = []
    for file in os.listdir(dir_to_validate):
        log.debug(file)
        try:
            contents = json.load(open(os.path.join(dir_to_validate, file)))
        except (json.decoder.JSONDecodeError, ValueError):
            log.info("Directory contains a file with invalid JSON: " +
                     os.path.join(dir_to_validate, file))
            return False
        filename = os.path.splitext(file)[0]
        try:
            objname = contents["metadata"]["name"]
        except KeyError:
            log.error(
                "A name for the check must be specified inside the 'metadata' block of the configuration file."
            )
            return False
        if filename != objname:
            log.error(
                "The filename of the definition json is required to match the 'name' attribute within the definition."
            )
            return False
        obj_names.append(objname)
        if obj_names.count(objname) > 1:
            log.error(
                "There is more than one check with the same name.  Failing check: "
                + objname)
            return False
        if object_type == "check":
            try:
                if (contents["ttl"] <= contents["interval"]):
                    log.error(
                        "The ttl must be greater than the check interval")
                    return False
            except (KeyError, ValueError):
                pass

    log.info("All " + object_type + " tests passed successfully.")
    return True
Ejemplo n.º 4
0
 def test_connection(self):
     try:
         health = requests.get(config['api_healthcheck_endpoint'],
                               timeout=10)
     except requests.exceptions.ConnectionError:
         log.error('Could not reach API health check')
         return False
     try:
         health.raise_for_status()
     except requests.exceptions.ConnectionError:
         log.error(
             'Could not reach API health check - network or DNS error')
         return False
     except requests.exceptions.HTTPError:
         log.error('Could not reach API health check - HTTP error %s' %
                   str(health.status_code))
         log.debug('Response headers: %s' %
                   json.dumps(dict(health.headers)))
         return False
     log.info('API health check completed successfully.')
     log.debug('Status code: %s' % health.status_code)
     return True
Ejemplo n.º 5
0
 def perform_api_call(self, endpoint, method="PUT", request_body={}):
     auth_header = {'Authorization': self.token}
     auth_header.update(self.headers)
     request_url = parse.urljoin(config['api_url'], endpoint)
     log.debug(request_url)
     resp = requests.request(method,
                             request_url,
                             headers=auth_header,
                             data=request_body)
     try:
         resp.raise_for_status()
     except requests.exceptions.HTTPError:
         log.error('Error ' + str(resp.status_code))
         log.debug('Request URL: ' + request_url)
         log.debug('Response headers: %s' % json.dumps(dict(resp.headers)))
         log.error(str.resp.text)
         return False
     #og.debug(resp.content)
     self.last_resp_content = resp.content.decode('utf8')
     return True
Ejemplo n.º 6
0
 def sync_definition(self, item_type, file):
     try:
         directory = config['api_items'][item_type]["dir"]
         endpoint = config['api_items'][item_type]["endpoint"]
     except KeyError as e:
         log.error(
             "Couldn't find an entry for these parameters in the project configuration"
         )
         log.debug("Item type passed to function: " + item_type)
         log.debug("Filename passed to function: " + file)
         log.debug(e)
         return False
     fh = open(directory + "/" + file, 'r')
     try:
         item = json.load(fh)
     except (json.decoder.JSONDecodeError, UnicodeDecodeError):
         message = 'Error importing definition for file ' + file + ': invalid JSON.'
         log.error(message)
         return False
     name = "/" + file.split('.json')[0]
     result = self.perform_api_call(endpoint + name,
                                    request_body=json.dumps(item))
     fh.close()
     return result
Ejemplo n.º 7
0
def build_assets():

    asset_list = []

    output = os.listdir(config['check_scripts_dir'])
    asset_list = os.listdir(config['asset_defs_dir'])

    try:
        output.remove('.DS_Store')
    except ValueError:
        pass

    if not os.path.exists(config['asset_files_dir']):
        os.mkdir(config['asset_files_dir'])
        log.debug(output)
    for item in output:
        asset_hash = ''
        basename = os.path.basename(item)
        basepath = os.path.join(config['check_scripts_dir'], basename)
        archive_name = basename + ".tar"
        archive_path = os.path.join(config['asset_files_dir'], archive_name)
        archive_file = open(archive_path, 'w')
        tarproc = subprocess.Popen([
            "tar", "--mtime", "'1970-01-01 00:00:00'", "--owner", "root",
            "-cv", "-C", basepath, 'bin'
        ],
                                   stdout=archive_file)
        tarproc.wait()
        archive_file.close()
        log.debug(basename)
        log.debug("This should be empty:")
        log.debug(asset_hash)
        asset_hash = hashlib.sha512(open(archive_path,
                                         'rb').read()).hexdigest()
        log.debug("This should have a correct hash:")
        log.debug(asset_hash)
        log.debug("archive_path = {}".format(archive_path))
        asset_definition_file = os.path.join(config['asset_defs_dir'],
                                             basename + ".json")
        log.info(asset_definition_file)
        if not os.path.exists(asset_definition_file):
            handler = open(asset_definition_file, "w+")
            asset_obj = {
                "url": parse.urljoin(config['api_base_url'], archive_name),
                "sha512": asset_hash,
                "metadata": {
                    "name": basename,
                    "Content-Type": "application/zip",
                    "namespace": "default"
                }
            }
            json.dump(asset_obj, handler, indent=4)
            handler.close()
        else:
            asset_file_handler = open(asset_definition_file, "r+")
            asset_obj = json.load(asset_file_handler)
            asset_file_handler.close()
            asset_obj["sha512"] = asset_hash
            log.debug(asset_obj)
            new_handler = open(asset_definition_file, "w")
            json.dump(asset_obj, new_handler, indent=4)
            new_handler.close()

    return asset_list