def get_metadata_of_interest(title, description, metadata, get_attributes): """ Obtain a dictionary containing attributes of interest. :param title: The title for the attributes. :param description: Description of the data :param metadata: Pod metadata :param get_attributes: Keys ot attributes that are required in the dictionary. :return: Metadata dictionary indexed by the attribute keys. """ metadata_of_interest = {'TITLE': title, 'DESCRIPTION': description} found_keys = set() for line in metadata: logger.debug('Reading metdata: {data}'.format(data=line)) for info_key in get_attributes: if line.strip().startswith(info_key): info = line.strip().split(info_key) if len(info) == 2: metadata_of_interest[info_key] = info[1].strip(' :=') found_keys.add(info_key) logger.debug('Found {key}'.format(key=info_key)) continue assert found_keys == get_attributes return metadata_of_interest
def log_commons_version(self): """Report version of commons for pod's forgerock product.""" logger.debug('Check commons version for {name}:{commons_jar}'.format( name=self.name, commons_jar=AmsterPod.REPRESENTATIVE_COMMONS_JAR)) test_temp = os.path.join(AmsterPod.LOCAL_TEMP, self.name) stdout, _ignored = kubectl.exec(Pod.NAMESPACE, [ self.name, '-c', self.product_type, '--', 'find', AmsterPod.ROOT, '-name', 'amster-*.jar' ]) amster_filepath = stdout[0] _head, tail = os.path.split( amster_filepath) # get versioned amster jar name exploded_directory = os.path.join(test_temp, 'exploded') amster_jar_filepath = os.path.join(test_temp, tail) with zipfile.ZipFile(amster_jar_filepath) as commons_zip_file: commons_zip_file.extractall(exploded_directory) test_jar_properties_path = os.path.join( exploded_directory, 'META-INF', 'maven', 'org.forgerock.commons', AmsterPod.REPRESENTATIVE_COMMONS_JAR_NAME, 'pom.properties') logger.debug('Checking commons version in {path}'.format( path=test_jar_properties_path)) assert os.path.isfile( test_jar_properties_path), 'Failed to find {path}'.format( path=test_jar_properties_path) with open(test_jar_properties_path) as file_pointer: lines = file_pointer.readlines() attribute_of_interest = {'version', 'groupId', 'artifactId'} os_metadata = Pod.get_metadata_of_interest('Commons', self.name, lines, attribute_of_interest) Pod.print_table(os_metadata)
def check_http_status(http_result, expected_status, known_issue=None): """ Check HTTP status code :param http_result : request.models.Response - request response :param expected_status : int or list(int) - status codes to detect that application is deployed :param known_issue : Jira issue code (ex : OPENAM-567) - used to add a tag """ if isinstance(expected_status, list): is_success = (http_result.status_code in [int(x) for x in expected_status]) else: try: is_success = (http_result.status_code == int(expected_status)) except ValueError: is_success = False if not is_success: # if known_issue is not None: # set_known_issue(known_issue) pytest.fail( 'ERROR:\n-- http status --\nreturned %s, expected %s\n-- content --\n%s' % (http_result.status_code, expected_status, http_result.text)) else: success = 'SUCCESS:\n-- http status --\nreturned %s, expected %s' % ( http_result.status_code, expected_status) logger.info(success) content = '\n-- content --\n%s' % http_result.text logger.debug(content)
def cp_from_pod(namespace, pod_name, source, destination, container_name): """ Copy from source on pod to local destination. :param namespace: kubernetes namespace :param pod_name: Name of pod :param source: Path to items to be copied :param destination: Path of where to copy files to. :param product_type: Name of container within pod. :return: (stdout, stderr) from running the command """ logger.debug( 'Copying {source} to {destination} for {container_name}:{pod_name} in {namespace}' .format(source=source, destination=destination, container_name=container_name, pod_name=pod_name, namespace=namespace)) source_command = '{namespace}/{pod_name}:{source}'.format( namespace=namespace, pod_name=pod_name, source=source) command = ' '.join([ KUBECTL_COMMAND, 'cp', source_command, destination, '-c', container_name ]) return __run_cmd_process(command)
def setup_commons_check(self): """Setup for checking commons library version.""" logger.debug('Setting up for commons version check') source = os.path.join(AmsterPod.ROOT) destination = os.path.join(AmsterPod.LOCAL_TEMP, self.name) kubectl.cp_from_pod(Pod.NAMESPACE, self.name, source, destination, self.product_type)
def delete(time, unit): if not exists(time, unit): print(f"Cannot delete. Cron entry: '{time}, {unit}' doesnt exist") return cron_time = convert(time, unit) cron_string = get_cron_line(time, unit) cron_string = cron_string.replace("*", "[*]") os.system(f'crontab -l | grep -v "{cron_string}" | crontab -') log.debug(f"Deleted from cron: {cron_string}")
def add(time, unit): if exists(time, unit): log.debug( f"Skipping add cron. Cron entry: '{time}, {unit}' already exists") return cron_string = get_cron_line(time, unit) os.system(f'(crontab -l; echo "{cron_string}") | crontab -') log.debug(f"Added to cron: {cron_string}")
def log_commons_version(self): """Report version of commons for pod's forgerock product.""" logger.debug( 'Report commons version for {name}'.format(name=self.name)) representative_commons_jar = IDMPod.REPRESENTATIVE_COMMONS_JAR_NAME lib_path = os.path.join(os.sep, 'opt', 'openidm', 'bundle') super(IDMPod, self).log_versioned_commons_jar(lib_path, representative_commons_jar)
def log_os(self, attributes_of_interest): """ Report Operating System on the pod. :param attributes_of_interest: Set of attribute keys to check. """ logger.debug('Check OS version for {name}'.format(name=self.name)) metadata, _ignored = kubectl.exec(Pod.NAMESPACE, [ self.name, '-c', self.product_type, '--', 'cat', '/etc/os-release' ]) os_metadata = Pod.get_metadata_of_interest('OS', self.name, metadata, attributes_of_interest) Pod.print_table(os_metadata)
def add(time, unit): cron = CronTab(user=True) if exists(time, unit): log.debug( f"Skipping add cron. Cron entry: '{time}, {unit}' already exists") return cron_string = get_cron_line(time, unit) cron_time = convert(time, unit) job = cron.new(command=get_cron_command(time, unit)) job.setall(cron_time) cron.write() log.debug(f"Added to cron: {cron_string}")
def delete(time, unit): cron = CronTab(user=True) if not exists(time, unit): print(f"Cannot delete. Cron entry: '{time}, {unit}' doesnt exist") return cron_time = convert(time, unit) cron_string = get_cron_line(time, unit) cron_string = cron_string.replace("*", "[*]") iter = cron.find_time(cron_time) for job in iter: cron.remove(job) cron.write() log.debug(f"Deleted from cron: {cron_string}")
def are_legal_notices_present(self): """ Check if the representative license file is present on the pod, otherwise assert. """ file_of_interest = 'Forgerock_License.txt' stdout, _ignored = kubectl.exec(Pod.NAMESPACE, [ self.name, '-c', self.product_type, '--', 'find', '.', '-name', file_of_interest ]) file_path = stdout[0].strip() logger.debug('Found legal notice: {file}'.format(file=file_path)) assert (file_path.endswith('legal-notices/{file}'.format(file=file_of_interest))),\ 'Unable to find {file_of_interest}'.format(file_of_interest=file_of_interest)
def log_jdk(self, attributes_of_interest): """ Report Java versions for pod. :param attributes_of_interest: Set of attribute keys to check. """ logger.debug('Check Jaa version for {name}'.format(name=self.name)) _ignored, metadata = kubectl.exec( Pod.NAMESPACE, [self.name, '-c', self.product_type, '--', 'java', '-version' ]) # TODO: why stderr java_metadata = Pod.get_metadata_of_interest('Java', self.name, metadata, attributes_of_interest) Pod.print_table(java_metadata)
def is_binary(filename): """ :param filename: File to check. :returns: True if it's a binary file, otherwise False. """ logger.debug("is_binary: %(filename)r", locals()) # Check if the file extension is in a list of known binary types binary_extensions = [".pyc"] for ext in binary_extensions: if filename.endswith(ext): return True # Check if the starting chunk is a binary string chunk = get_starting_chunk(filename) return is_binary_string(chunk)
def load(): from lib.core.state import State file = Config.ADS_FILE if not os.path.exists(file): with open(file, "w") as stream: stream.write("{}") with open(file, "r") as stream: ads = yaml.safe_load(stream) for key in ads: log.debug(f"Total old ads from {key}: {len(ads[key])}") return ads
def version(self): """ Return the product version information. :return: Dictionary """ amcfg = AMConfig() logger.debug('Get admin token') headers = { 'X-OpenAM-Username': '******', 'X-OpenAM-Password': os.environ['AM_ADMIN_PWD'], 'Content-Type': 'application/json', 'Accept-API-Version': 'resource=2.0, protocol=1.0' } response = post(verify=amcfg.ssl_verify, url=amcfg.rest_authn_url, headers=headers) rest.check_http_status(http_result=response, expected_status=200) admin_token = response.json()['tokenId'] logger.debug('Get AM version') headers = { 'Content-Type': 'application/json', 'Accept-API-Version': 'resource=1.0', 'iplanetdirectorypro': admin_token } response = get(verify=amcfg.ssl_verify, url=amcfg.am_url + '/json/serverinfo/version', headers=headers) rest.check_http_status(http_result=response, expected_status=200) version = response.json()['version'] revision = response.json()['revision'] date = response.json()['date'] am_metadata = { 'TITLE': self.product_type, 'DESCRIPTION': self.name, 'VERSION': version, 'REVISION': revision, 'DATE': date } return am_metadata
def __run_cmd_process(cmd): """ Run a command as process. Checks the return code. :param cmd: command to run :return: Duple of string lists (stdout, stderr) """ logger.debug('Running following command as process: {cmd}'.format(cmd=cmd)) response = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True) stdout, stderr = response.communicate() assert response.returncode == 0, ' Unexpected return code {return_code} from cmd {stderr}'.format( return_code=response.returncode, stderr=stderr) return stdout.split('\n'), stderr.split('\n')
def log_commons_version(self): """Report version of commons for pod's forgerock product.""" logger.debug( 'Report commons version for {name}'.format(name=self.name)) representative_commons_jar = AMPod.REPRESENTATIVE_COMMONS_JAR_NAME lib_path = os.path.join( os.sep, 'usr', 'local', 'tomcat', 'webapps', 'am', 'WEB-INF', 'lib', ) super(AMPod, self).log_versioned_commons_jar(lib_path, representative_commons_jar)
def get_product_component_names(namespace, product_type): """ Get the names of the pods for the given platform product :param namespace: Name of kubernetes namespace :param product_type: Name of platform product :return: List of pod names """ command = ' '.join([ KUBECTL_COMMAND, '-n', namespace, 'get', 'pods', '--selector=component={product_type}'.format(product_type=product_type) ]) stdout, _ignored = __run_cmd_process(command) pod_names = [] for line in stdout: logger.debug('Found component {component}'.format(component=line)) line_contents = line.split(' ') if line_contents[0] != 'NAME' and len(line_contents) > 1: pod_names.append(line_contents[0]) return pod_names
def log_versioned_commons_jar(self, lib_path, jar_name): """ Report version of commons; obtained from the name of a sample commons .jar. :param lib_path: Path to jar library. :param jar_name: Jar file to check. """ logger.debug( 'Check commons version for {name}*.jar'.format(name=jar_name)) stdout, _ignored = kubectl.exec(Pod.NAMESPACE, [ self.name, '-c', self.product_type, '--', 'find', lib_path, '-name', jar_name + '-*.jar' ]) config_filepath = stdout[0] # first line of output start = config_filepath.find(jar_name) end = config_filepath.find('.jar') commons_version = config_filepath[start + len(jar_name) + 1:end] metadata = { 'TITLE': "Commons", 'DESCRIPTION': self.name, 'VERSION': commons_version, 'FILE': config_filepath } Pod.print_table(metadata)
def log_os(self): """Report Operating System on the pod.""" logger.debug('Report OS version for {name}'.format(name=self.name)) super(AMPod, self).log_os({'NAME', 'ID', 'VERSION_ID'})
def log_jdk(self): """Report Java version on the pod.""" logger.debug('Report Java version for {name}'.format(name=self.name)) return super(AMPod, self).log_jdk( {'openjdk version', 'openjdk version', 'openjdk version'})
def cleanup_commons_check(self): """Cleanup for checking commons library version.""" logger.debug('Cleaning up after commons version check') shutil.rmtree(os.path.join(AmsterPod.LOCAL_TEMP, self.name))
def is_binary_string(bytes_to_check): """ Uses a simplified version of the Perl detection algorithm, based roughly on Eli Bendersky's translation to Python: http://eli.thegreenplace.net/2011/10/19/perls-guess-if-file-is-text-or-binary-implemented-in-python/ This is biased slightly more in favour of deeming files as text files than the Perl algorithm, since all ASCII compatible character sets are accepted as text, not just utf-8. :param bytes: A chunk of bytes to check. :returns: True if appears to be a binary, otherwise False. """ # Empty files are considered text files if not bytes_to_check: return False # Now check for a high percentage of ASCII control characters # Binary if control chars are > 30% of the string low_chars = bytes_to_check.translate(None, _printable_ascii) nontext_ratio1 = float(len(low_chars)) / float(len(bytes_to_check)) logger.debug('nontext_ratio1: %(nontext_ratio1)r', locals()) # and check for a low percentage of high ASCII characters: # Binary if high ASCII chars are < 5% of the string # From: https://en.wikipedia.org/wiki/UTF-8 # If the bytes are random, the chances of a byte with the high bit set # starting a valid UTF-8 character is only 6.64%. The chances of finding 7 # of these without finding an invalid sequence is actually lower than the # chance of the first three bytes randomly being the UTF-8 BOM. high_chars = bytes_to_check.translate(None, _printable_high_ascii) nontext_ratio2 = float(len(high_chars)) / float(len(bytes_to_check)) logger.debug('nontext_ratio2: %(nontext_ratio2)r', locals()) is_likely_binary = ((nontext_ratio1 > 0.3 and nontext_ratio2 < 0.05) or (nontext_ratio1 > 0.8 and nontext_ratio2 > 0.8)) logger.debug('is_likely_binary: %(is_likely_binary)r', locals()) # then check for binary for possible encoding detection with chardet detected_encoding = chardet.detect(bytes_to_check) logger.debug('detected_encoding: %(detected_encoding)r', locals()) # finally use all the check to decide binary or text decodable_as_unicode = False if (detected_encoding['confidence'] > 0.9 and detected_encoding['encoding'] != 'ascii'): try: try: bytes_to_check.decode(encoding=detected_encoding['encoding']) except TypeError: # happens only on Python 2.6 unicode(bytes_to_check, encoding=detected_encoding['encoding']) # noqa decodable_as_unicode = True logger.debug( 'success: decodable_as_unicode: ' '%(decodable_as_unicode)r', locals()) except LookupError: logger.debug('failure: could not look up encoding %(encoding)s', detected_encoding) except UnicodeDecodeError: logger.debug( 'failure: decodable_as_unicode: ' '%(decodable_as_unicode)r', locals()) logger.debug('failure: decodable_as_unicode: ' '%(decodable_as_unicode)r', locals()) if is_likely_binary: if decodable_as_unicode: return False else: return True else: if decodable_as_unicode: return False else: if b'\x00' in bytes_to_check or b'\xff' in bytes_to_check: # Check for NULL bytes last logger.debug('has nulls:' + repr(b'\x00' in bytes_to_check)) return True return False
def scrape(source, notif_agents_list, include=[], exclude=[], colour_flag="", notify=True, force_tasks=False, force_agents=False, recent_ads=0, save_ads=True, ignore_old_ads=False): from lib.core.state import State import lib.core.notif_agent as notif_agent ads = State.get_ads() source_modules = State.get_source_modules() notif_agent_modules = State.get_notif_agent_modules() log.info_print(f"Source: {source.name}") log.info_print(f"Module: {source.module}") log.info_print(f"Module Properties: {source.module_properties}") if len(include): print(f"Including: {include}") if len(exclude): print(f"Excluding: {exclude}") module = source_modules[source.module] old_ads = [] if ignore_old_ads == False: if source.module in ads: old_ads = ads[source.module] log.debug(f"Total old ads: {len(old_ads)}") else: log.debug(f"No old ads found for module: {source.module}") else: log.info_print("Ignoring old ads...") new_ads, ad_title = module.scrape_for_ads(old_ads, exclude=exclude, **source.module_properties) info_string = f"Found {len(new_ads)} new ads" \ if len(new_ads) != 1 else "Found 1 new ad" log.info_print(info_string) num_ads = len(new_ads) if notify and num_ads: ads_to_send = new_ads if recent_ads > 0: # only notify the most recent notify_recent new_ads ads_to_send = ct.get_most_recent_items(recent_ads, new_ads) log.debug( f"Recent ads set to: {recent_ads} got: {len(ads_to_send)}") log.info_print(f"Total ads to notify about: {len(ads_to_send)}") if len(notif_agents_list) == 0: log.warning_print( "No notification agents set... nothing to notify") else: if len(notif_agents_list) > 1: log.info_print( f"Notifying agents: {notif_agent.get_names(notif_agents_list)}" ) for agent in notif_agents_list: if agent.enabled or force_agents == True: if agent.enabled == False and force_agents == True: log.info_print( "Notification agent was disabled but forcing...") notif_agent_modules[agent.module].send_ads( ads_to_send, ad_title, colour_flag, **agent.module_properties) else: log.info_print( f"Skipping... Notification agent disabled: {agent.name}" ) elif not notify and num_ads: log.info_print("Skipping notification") if save_ads: ads[source.module] = module.old_ad_ids log.debug(f"Total all-time processed ads: {len(module.old_ad_ids)}") else: log.info_print(f"Saving ads disabled. Skipping...") print() return ScrapeSummary(new_ads=new_ads, latest_ads=list(new_ads)[-3:], total_new_ads=len(new_ads))
def clear(): log.debug("Clearing crontab...") os.system(f'crontab -l | grep -v "{path}" | crontab -')
def clear(): cron = CronTab(user=True) log.debug("Clearing crontab...") cron.remove_all() cron.write()
def log_version(self): """Report the product version.""" logger.debug('Check version for {name}'.format(name=self.name)) Pod.print_table(self.version())