def validate(self, file_path): helm_source = file_path.rpartition("/")[0] check_file_path = "%s/Chart.yaml" % helm_source valeus_file_path = "%s/values.yaml" % helm_source template_dir_path = "%s/templates" % helm_source if all([ exists_file(check_file_path), exists_file(valeus_file_path), exists_dir(template_dir_path) ]): return True return False
def get_azure_data(snapshot_source): sub_data = {} if json_source(): dbname = config_value(DATABASE, DBNAME) collection = config_value(DATABASE, collectiontypes[STRUCTURE]) parts = snapshot_source.split('.') qry = {'name': parts[0]} sort = [sort_field('timestamp', False)] docs = get_documents(collection, dbname=dbname, sort=sort, query=qry, limit=1) logger.info('Number of Snapshot Documents: %s', len(docs)) if docs and len(docs): sub_data = docs[0]['json'] else: json_test_dir = get_test_json_dir() file_name = '%s.json' % snapshot_source if snapshot_source and not \ snapshot_source.endswith('.json') else snapshot_source azure_source = '%s/../%s' % (json_test_dir, file_name) logger.info('Azure source: %s', azure_source) if exists_file(azure_source): sub_data = json_from_file(azure_source) return sub_data
def get_google_data(snapshot_source): """ The Google source object to be fetched from database or the filesystem The initial configuration for database is 'validator' and collection is 'structures', whereas for the filesystem the path to fetch the 'structures' is $SOLUTIONDIR/realm/<structure>.json """ sub_data = {} if json_source(): dbname = config_value(DATABASE, DBNAME) collection = config_value(DATABASE, collectiontypes[STRUCTURE]) parts = snapshot_source.split('.') qry = {'name': parts[0]} sort = [sort_field('timestamp', False)] docs = get_documents(collection, dbname=dbname, sort=sort, query=qry, limit=1) logger.info('Number of Google structure Documents: %d', len(docs)) if docs and len(docs): sub_data = docs[0]['json'] else: json_test_dir = get_test_json_dir() file_name = '%s.json' % snapshot_source if snapshot_source and not \ snapshot_source.endswith('.json') else snapshot_source google_source = '%s/../%s' % (json_test_dir, file_name) logger.info('Google source: %s', google_source) if exists_file(google_source): sub_data = json_from_file(google_source) if not sub_data: logger.error("Google connector file %s does not exist, or it does not contains the valid JSON.", snapshot_source) return sub_data
def get_snaphotid_doc_old(self, sid, container): doc = None json_dir = get_test_json_dir() if exists_dir(json_dir): fname = '%s/%s/snapshots/%s' % (json_dir, container, sid) if exists_file(fname): json_data = json_from_file(fname) if json_data and 'json' in json_data: doc = json_data['json'] snapshot = { 'id': json_data['snapshotId'], 'structure': json_data['structure'], 'reference': json_data['reference'], 'source': json_data['source'], 'collection': json_data['collection'], 'type': json_data.get("node", {}).get('type'), 'region' : json_data.get('region', "") } if 'paths' in json_data: snapshot['paths'] = json_data['paths'] else: snapshot['path'] = json_data['path'] self.snapshots.append(snapshot) return doc
def get_config_data(config_file): """Return config data from the config file.""" config_data = None if exists_file(config_file): config_data = configparser.ConfigParser(allow_no_value=True) config_data.read(config_file) return config_data
def get_node_version(node, snapshot): """Url version of the resource.""" version = None apiversions = None logger.info("Get type's version") api_source = config_value('AZURE', 'api') if snapshot.isDb: parts = api_source.rsplit('/') name = parts[-1].split('.') qry = {'name': name[0]} docs = get_documents(snapshot.collection(STRUCTURE), dbname=snapshot.dbname, sort=snapshot.sort, query=qry, limit=1) logger.info('Number of Azure API versions: %s', len(docs)) if docs and len(docs): apiversions = docs[0]['json'] else: apiversions_file = '%s/%s' % (framework_dir(), api_source) logger.info(apiversions_file) if exists_file(apiversions_file): apiversions = json_from_file(apiversions_file) if apiversions: if node and 'type' in node and node['type'] in apiversions: version = apiversions[node['type']]['version'] return version
def json_from_file(jsonfile, escape_chars=None, object_pairs_hook=OrderedDict): """ Get json data from the file.""" jsondata = None try: if exists_file(jsonfile): file_data = None try: with open(jsonfile) as infile: file_data = infile.read() except UnicodeDecodeError: with open(jsonfile, 'r', encoding='utf-8') as infile: file_data = infile.read() if escape_chars and isinstance(escape_chars, list): for escape_char in escape_chars: file_data = file_data.replace(escape_char, '\\\%s' % escape_char) try: jsondata = json.loads(file_data, object_pairs_hook=object_pairs_hook) except JSONDecodeError: with open(jsonfile, 'r', encoding='utf-8-sig') as infile: file_data = infile.read() jsondata = json.loads(file_data, object_pairs_hook=object_pairs_hook) except Exception as ex: logger.debug('Failed to load json from file: %s, exception: %s', jsonfile, ex) return jsondata
def get_call_kwargs(node): """Get argument names and their values in kwargs""" kwargs = {"params": {}} logger.info("Get node's kwargs") params_source = config_value('GOOGLE', 'params') paramsversions = None if json_source(): dbname = config_value(DATABASE, DBNAME) collection = config_value(DATABASE, collectiontypes[STRUCTURE]) parts = params_source.rsplit('/') name = parts[-1].split('.') qry = {'name': name[0]} sort = [sort_field('timestamp', False)] docs = get_documents(collection, dbname=dbname, sort=sort, query=qry, limit=1) logger.info('Number of Google Params versions: %s', len(docs)) if docs and len(docs): paramsversions = docs[0]['json'] else: paramsversions_file = '%s/%s' % (framework_dir(), params_source) logger.info(paramsversions_file) if exists_file(paramsversions_file): paramsversions = json_from_file(paramsversions_file) path = node['path'] if paramsversions and "queryprameters" in paramsversions: if node['type'] in paramsversions["queryprameters"]: for param, parameter_type in paramsversions["queryprameters"][ node['type']].items(): add_argument_parameter(path, kwargs, param, parameter_type) return kwargs
def generate_snapshots_from_mastersnapshot_file(mastersnapshot_file): """ Each snapshot file from the filesystem is loaded as a json datastructue and generate all the nodes in this json datastructure. """ mastersnapshot_file_name = '%s.json' % mastersnapshot_file if mastersnapshot_file and not \ mastersnapshot_file.endswith('.json') else mastersnapshot_file mastersnapshot_json_data = json_from_file(mastersnapshot_file_name) if not mastersnapshot_json_data: logger.error("masterSnapshot file %s looks to be empty, next!...", mastersnapshot_file) return {}, {} if "connector" in mastersnapshot_json_data and "remoteFile" in mastersnapshot_json_data and mastersnapshot_json_data[ "connector"] and mastersnapshot_json_data["remoteFile"]: _, pull_response = pull_json_data(mastersnapshot_json_data) if not pull_response: return {}, {} logger.debug(json.dumps(mastersnapshot_json_data, indent=2)) parts = mastersnapshot_file_name.rsplit('.', 1) snapshot_file_name = '%s_gen.%s' % (parts[0], parts[1]) snapshot_json_data = json_from_file(snapshot_file_name) if not snapshot_json_data: snapshot_json_data = {} snapshot_data = generate_mastersnapshots_from_json( mastersnapshot_json_data, snapshot_json_data) # save_json_to_file(mastersnapshot_json_data, mastersnapshot_file) if exists_file(snapshot_file_name): remove_file(snapshot_file_name) save_json_to_file(snapshot_json_data, snapshot_file_name) return snapshot_data, mastersnapshot_json_data
def get_version_for_type(node): """Url version of the resource.""" version = None apiversions = None logger.info("Get type's version") api_source = config_value('AZURE', 'api') if json_source(): dbname = config_value(DATABASE, DBNAME) collection = config_value(DATABASE, collectiontypes[STRUCTURE]) parts = api_source.rsplit('/') name = parts[-1].split('.') qry = {'name': name[0]} sort = [sort_field('timestamp', False)] docs = get_documents(collection, dbname=dbname, sort=sort, query=qry, limit=1) logger.info('Number of Azure API versions: %s', len(docs)) if docs and len(docs): apiversions = docs[0]['json'] else: apiversions_file = '%s/%s' % (framework_dir(), api_source) logger.info(apiversions_file) if exists_file(apiversions_file): apiversions = json_from_file(apiversions_file) if apiversions: if node and 'type' in node and node['type'] in apiversions: version = apiversions[node['type']]['version'] return version
def process_template(self, paths): """ process the files stored at specified paths and returns the template """ template_json = None if paths and isinstance(paths, list): template_file_path = "" for path in paths: file_path = '%s/%s' % (self.dir_path, path) logger.info("Fetching data : %s ", path) if self.is_template_file(file_path): template_file_path = file_path else: logger.info( "\t\t WARN: %s contains invalid Kubernetes yaml") self.template_file = template_file_path if template_file_path and exists_file(template_file_path): kubernetes_template_parser = KubernetesTemplateParser( template_file_path) template_json = kubernetes_template_parser.parse( template_file_path) return template_json
def get_connector_data(self): """ get connector data from snapshot """ connector_data = {} if self.snapshots: isdb_fetch = get_dbtests() if isdb_fetch: connectors = get_documents( "structures", query={ "name" : self.snapshots[0].get("source"), "type" : "structure", "container": self.container }, dbname=self.dbname, limit=1 ) connector_data = connectors[0].get("json", {}) if connectors else {} else: json_test_dir = get_test_json_dir() snapshot_source = self.snapshots[0].get("source") file_name = '%s.json' % snapshot_source if snapshot_source and not \ snapshot_source.endswith('.json') else snapshot_source connector_path = '%s/../%s' % (json_test_dir, file_name) if exists_file(connector_path): connector_data = json_from_file(connector_path) return connector_data
def convert_terraform_to_json(terraform, output=None): if exists_file(terraform): if not output: parts = terraform.rsplit('.', -1) output = '%s.json' % parts[0] json_data = convert_to_json(terraform, 'terraform') if json_data: save_json_to_file(json_data, output)
def opa_binary(): opa_exe = None opa_enabled = parsebool(config_value("OPA", "opa"), False) if opa_enabled: opa_exe = os.getenv('OPAEXE', None) # print(opa_exe) if opa_exe and exists_file(opa_exe): # print('%' * 50) pass else: # print('$' * 50) opa_exe = config_value("OPA", "opaexe") if opa_exe and exists_file(opa_exe): pass else: opa_exe = None return opa_exe
def yaml_from_file(yamlfile): """ Get yaml data from the file in a dict.""" yamldata = None try: if exists_file(yamlfile): with open(yamlfile) as infile: yamldata = yaml.load(infile) except Exception as ex: print('Failed to load yaml from file: %s, exception: %s', yamlfile, ex) return yamldata
def get_rego_rule_filename(rego_file, container): rego_file_name = None json_dir = get_test_json_dir() if exists_dir(json_dir): rego_file_name = '%s/%s/%s' % (json_dir, container, rego_file) if exists_file(rego_file_name): pass else: rego_file_name = None return rego_file_name
def rego_rule_filename(self, rego_file, container): rego_file_name = None if 'dirpath' in self.testcase and self.testcase['dirpath']: rego_file_name = '%s/%s' % (self.testcase['dirpath'], rego_file) if exists_file(rego_file_name): pass else: rego_file_name = None return rego_file_name isdb_fetch = get_dbtests() #It give same value for DB and SNAPSHOT, So for SNAPSHOT, we'll check it in #db first and if file isn't there, then we are fetching it from file path ''' if isdb_fetch: dbname = self.dbname coll = 'structures' docs = get_documents(coll, { 'type': 'others', "container" : container}, dbname, sort=[('timestamp', pymongo.DESCENDING)], limit=1) # print('Number of other Documents: %s' % len(docs)) logger.debug('Number of other Documents: %s', len(docs)) if docs and len(docs): doc = docs[0]['json'] if doc and 'file' in doc and isinstance(doc['file'], list): for file_doc in doc['file']: name = get_field_value(file_doc, 'name') # print(name, rego_file) if name == rego_file: content = get_field_value(file_doc, 'container_file') if content: rego_file_name = '/tmp/%s' % rego_file open(rego_file_name, 'w', encoding="utf-8").write(content) return rego_file_name # print(doc) json_dir = get_test_json_dir() if exists_dir(json_dir): rego_file_name = '%s/%s/%s' % (json_dir, container, rego_file) if exists_file(rego_file_name): pass else: rego_file_name = None return rego_file_name
def get_snaphotid_doc(self, sid): doc = None isdb_fetch = get_dbtests() if isdb_fetch: dbname = self.dbname coll = self.collection_data[sid] if sid in self.collection_data else COLLECTION docs = get_documents(coll, {'snapshotId': sid}, dbname, sort=[('timestamp', pymongo.DESCENDING)], limit=1) logger.debug('Number of Snapshot Documents: %s', len(docs)) if docs and len(docs): doc = docs[0]['json'] snapshot = { 'id': docs[0]['snapshotId'], 'structure': docs[0]['structure'], 'reference': docs[0]['reference'], 'source': docs[0]['source'], 'collection': docs[0]['collection'], 'type': docs[0].get("node", {}).get('type'), 'region' : docs[0].get('region', "") } if 'paths' in docs[0]: snapshot['paths'] = docs[0]['paths'] else: snapshot['path'] = docs[0]['path'] self.snapshots.append(snapshot) else: json_dir = '%s%s' % (get_test_json_dir(), self.container) if exists_dir(json_dir): fname = '%s/snapshots/%s' % (json_dir, sid) if exists_file(fname): json_data = json_from_file(fname) if json_data and 'json' in json_data: doc = json_data['json'] snapshot_val = { 'id': json_data['snapshotId'], 'structure': json_data['structure'], 'reference': json_data['reference'], 'source': json_data['source'], 'collection': json_data['collection'], 'type': json_data.get("node", {}).get('type'), 'region' : json_data.get('region', "") } if 'paths' in json_data: snapshot_val['paths'] = json_data['paths'] else: snapshot_val['path'] = json_data['path'] singletest = get_from_currentdata(SINGLETEST) if singletest: snapshot_val['json'] = doc self.snapshots.append(snapshot_val) return doc
def get_custom_data(snapshot_source): """ Get source JSON data """ sub_data = {} json_test_dir = get_test_json_dir() file_name = '%s.json' % snapshot_source if snapshot_source and not \ snapshot_source.endswith('.json') else snapshot_source custom_source = '%s/../%s' % (json_test_dir, file_name) logger.info('Custom source: %s', custom_source) if exists_file(custom_source): sub_data = json_from_file(custom_source) return sub_data
def get_structure_data(self, snapshot_object): """ Get the structure from the filesystem.""" structure_data = {} json_test_dir = get_test_json_dir() snapshot_source = get_field_value(snapshot_object, "source") file_name = '%s.json' % snapshot_source if snapshot_source and not \ snapshot_source.endswith('.json') else snapshot_source custom_source = '%s/../%s' % (json_test_dir, file_name) logger.info('%s structure file is %s', Snapshot.LOGPREFIX, custom_source) if exists_file(custom_source): structure_data = json_from_file(custom_source) return structure_data
def valid_config_ini(config_ini): """ Valid config ini path and load the file and check """ error = None if exists_file(config_ini): config_data = get_config_data(config_ini) if config_data: # TODO: can also check for necessary sections and fields. pass else: error = "Configuration(%s) INI file is invalid, correct it and try again!" % config_ini else: error = "Configuration(%s) INI file does not exist!" % config_ini return error
def generate_template_json(self): """ generate the template json from template and parameter files """ gen_template_json = None template_json = None if self.get_template().endswith(".yaml") and exists_file( self.get_template()): template_json = self.yaml_to_json(self.get_template()) elif self.get_template().endswith(".json"): template_json = json_from_file(self.get_template(), object_pairs_hook=None) # template_json = self.json_from_file(self.get_template()) logger.info(self.get_template()) if not template_json: logger.error("Invalid path! No file found at : %s", self.get_template()) return gen_template_json if "AWSTemplateFormatVersion" not in template_json: logger.error( "Invalid file content : file does not contains 'AWSTemplateFormatVersion' field." ) return gen_template_json if template_json: gen_template_json = copy.deepcopy(template_json) if 'Parameters' in template_json: self.gparams = template_json['Parameters'] if self.parameter_file: parameters = json_from_file(self.parameter_file, object_pairs_hook=None) # parameters = self.json_from_file(self.parameter_file) if parameters: for param in parameters: if "ParameterKey" in param and "ParameterValue" in param: self.gparams[param["ParameterKey"]] = { "Default": param["ParameterValue"] } logger.info(self.gparams) if 'Mappings' in template_json: self.mappings = template_json['Mappings'] if 'Resources' in template_json: new_resources = [] for key, resource in template_json['Resources'].items(): new_resource = self.process_resource(resource) new_resources.append(new_resource) gen_template_json['Resources'] = new_resources return gen_template_json
def save_json_to_jinja_file(self, json_data, output_file, transform=False): """ convert Jinja file to json object """ try: if exists_file(output_file): with open(output_file, 'w') as fp: if transform: yaml.dump(json_data, fp, transform=self.revert) else: yaml.dump(json_data, fp) return True logger.info("File doesnot exist at given path : %s", output_file) except: logger.info("Failed to save json data into jinja file") logger.error(traceback.format_exc()) return False
def opa_binary(): opa_exe = None opa_enabled = parsebool(config_value("OPA", "opa"), False) if opa_enabled: opa_exe = os.getenv('OPAEXE', None) # print(opa_exe) if opa_exe and exists_file(opa_exe): # print('%' * 50) pass else: # print('$' * 50) opa_exe = config_value("OPA", "opaexe") if opa_exe and exists_file(opa_exe): pass else: opa_exe = None if not opa_exe: try: subprocess.Popen(['opa', "version"], stdout=subprocess.DEVNULL) opa_exe = "opa" except FileNotFoundError: opa_exe = None return opa_exe
def create_ssh_config(ssh_dir, ssh_key_file, ssh_user): ssh_config = '%s/config' % ssh_dir if exists_file(ssh_config): logger.error("Git config: %s already exists, cannot modify it!") return None with open(ssh_config, 'w') as f: f.write('Host *\n') # f.write('Host %s\n' % ssh_host) # f.write(' HostName %s\n' % ssh_host) f.write(' User %s\n' % ssh_user) f.write(' IdentityFile %s\n\n' % ssh_key_file) # f.write('Host *\n') f.write(' IdentitiesOnly yes\n') f.write(' ServerAliveInterval 100\n') return ssh_config
def populate_template_snapshot(self): """ process the snapshot and returns the updated `snapshot_data` which is require for run the test """ self.dir_path = get_field_value(self.connector_data, 'folderPath') if not self.dir_path: self.dir_path = self.repopath self.paths = get_field_value(self.node, 'paths') if not self.paths or not isinstance(self.paths, list): self.node['status'] = 'inactive' logger.error( "Invalid json : `paths` field is missing in node or it is not a list" ) return self.snapshot_data if is_multiple_yaml_convertion(self.paths[0]): multiple_source = '%s/%s.yaml' % (self.dir_path, ( self.paths[0]).split(MultipleConvertionKey)[0]) if exists_file(multiple_source): self.break_multiple_yaml_file(multiple_source) if is_helm_chart_convertion(self.paths[0]): helm_dir = '%s/%s' % (self.dir_path, self.paths[0].rpartition("/")[0]) if not exists_file('%s/%s' % (helm_dir, self.paths[0].rpartition("/")[-1])): self.process_helm_chart(helm_dir) self.processed_template = self.process_template(self.paths) if self.processed_template: self.store_data_record() self.node['status'] = 'active' else: self.node['status'] = 'inactive' return self.snapshot_data
def get_snaphotid_doc(self, sid): doc = None isdb_fetch = get_dbtests() if isdb_fetch: dbname = self.kwargs['dbname'] coll = self.kwargs['snapshots'][sid] if sid in self.kwargs[ 'snapshots'] else COLLECTION docs = get_documents(coll, {'snapshotId': sid}, dbname, sort=[('timestamp', pymongo.DESCENDING)], limit=1) logger.debug('Number of Snapshot Documents: %s', len(docs)) if docs and len(docs): doc = docs[0]['json'] self.snapshots.append({ 'id': docs[0]['snapshotId'], 'path': docs[0]['path'], 'structure': docs[0]['structure'], 'reference': docs[0]['reference'], 'source': docs[0]['source'] }) else: json_dir = '%s%s' % (get_test_json_dir(), self.kwargs['container']) if exists_dir(json_dir): fname = '%s/snapshots/%s' % (json_dir, sid) if exists_file(fname): json_data = json_from_file(fname) if json_data and 'json' in json_data: doc = json_data['json'] # self.snapshots.append({ # 'id': json_data['snapshotId'], # 'path': json_data['path'], # 'structure': json_data['structure'], # 'reference': json_data['reference'], # 'source': json_data['source'] # }) snapshot_val = { 'id': json_data['snapshotId'], 'path': json_data['path'], 'structure': json_data['structure'], 'reference': json_data['reference'], 'source': json_data['source'] } singletest = get_from_currentdata(SINGLETEST) if singletest: snapshot_val['json'] = doc self.snapshots.append(snapshot_val) return doc
def get_snaphotid_doc_old(self, sid, container): doc = None json_dir = get_test_json_dir() if exists_dir(json_dir): fname = '%s/%s/snapshots/%s' % (json_dir, container, sid) if exists_file(fname): json_data = json_from_file(fname) if json_data and 'json' in json_data: doc = json_data['json'] self.snapshots.append({ 'id': json_data['snapshotId'], 'path': json_data['path'], 'structure': json_data['structure'], 'reference': json_data['reference'], 'source': json_data['source'] }) return doc
def is_template_file(self, file_path): """ check for valid template file for parse cloudformation template """ if len(file_path.split(".")) > 0 and file_path.split(".")[-1] in [ "json", "yaml" ]: template_json = None if file_path.endswith(".yaml") and exists_file(file_path): with open(file_path) as yml_file: try: template_json = json.loads(to_json(yml_file.read())) except: pass elif file_path.endswith(".json"): template_json = json_from_file(file_path) if template_json and "AWSTemplateFormatVersion" in template_json: return True return False
def get_service_name(node_type): """ Get service name for init compute function """ service = None params_source = config_value('GOOGLE', 'params') paramsversions = None if json_source(): dbname = config_value(DATABASE, DBNAME) collection = config_value(DATABASE, collectiontypes[STRUCTURE]) parts = params_source.rsplit('/') name = parts[-1].split('.') qry = {'name': name[0]} sort = [sort_field('timestamp', False)] docs = get_documents(collection, dbname=dbname, sort=sort, query=qry, limit=1) logger.info('Number of Google Params versions: %s', len(docs)) if docs and len(docs): paramsversions = docs[0]['json'] else: paramsversions_file = '%s/%s' % (framework_dir(), params_source) logger.info(paramsversions_file) if exists_file(paramsversions_file): paramsversions = json_from_file(paramsversions_file) check_node_type = node_type node_type_list = node_type.split(".") if len(node_type_list) > 1: del node_type_list[-1] check_node_type = ".".join(node_type_list) if paramsversions and "serviceName" in paramsversions: for service_name, resource_list in paramsversions['serviceName'].items( ): if check_node_type in resource_list: service = service_name return service