def configure(self, config_dir, crypt_key_file=None): """ do the defered initial loading of the configuration :param config_dir: path to the folder with configuration files :param crypt_key_file: optional name of a file holding the key for encrypted configuration files :raises: AssertionError if called more than once if the `utils.conf` api is removed, the loading can be transformed to eager loading """ assert self.yaycl_config is None if crypt_key_file and os.path.exists(crypt_key_file): self.yaycl_config = yaycl.Config(config_dir=config_dir, crypt_key_file=crypt_key_file) else: self.yaycl_config = yaycl.Config(config_dir=config_dir)
def credentials(provider_data): if not os.path.exists("conf/.yaml_key"): pytest.fail("No yaml key in conf/.yaml_key") conf = yaycl.Config("conf/cfme-qe-yamls/complete", crypt_key_file="conf/.yaml_key") yaycl_crypt.decrypt_yaml(conf, "credentials", delete=False) try: with open("conf/cfme-qe-yamls/complete/credentials.yaml", "r") as stream: try: creds = yaml.safe_load(stream) except UnicodeDecodeError: os.remove("conf/cfme-qe-yamls/complete/credentials.yaml") pytest.fail("Unable to read decrypted credential file, " "did you put the correct key in conf/.yaml_key?") yield creds.get(provider_data.get("credentials")) # cleanup the file after the test finishes yaycl_crypt.encrypt_yaml(conf, "credentials", delete=True) except IOError: pytest.fail("Credential YAML file not found or not decrypted!")
def configure(self, config_dir): assert self.yaycl_config is None self.yaycl_config = yaycl.Config(config_dir=config_dir)
from pyspark.sql.session import SparkSession from pyspark import SparkContext import pyspark import yaml import yaycl_crypt import yaycl sc = SparkContext(master = 'local[*]') spark = SparkSession(sc) conf = yaycl.Config('/Internship_assessments/DonorsChooseCode/', crypt_key='/Internship_assessments/DonorsChooseCode/secret_text') #yaycl_crypt.encrypt_yaml(conf, 'config') yaycl_crypt.decrypt_yaml(conf, 'config') file1=open("/Internship_assessments/DonorsChooseCode/config.yaml") yaycl_crypt.encrypt_yaml(conf, 'config') cfg=yaml.load(file1) def colRename_wrtprq(DF ,prq_name): for col in DF.columns: DF = DF.withColumnRenamed(col,col.replace(" ", "_")) try: DF.write.parquet("hdfs:///inputs/DonorChoose/"+prq_name+".parquet") except: yield #read data from hdfs try: donationsDF = spark.read.parquet("hdfs:///inputs/DonorChoose/donations.parquet") except: donationsDF = spark.read.csv("hdfs:///inputs/DonorChoose/Donations.csv", header= True, inferSchema= True)
def conf(conf_dir): return yaycl.Config(conf_dir.strpath)
from pyspark import SparkContext from pyspark.sql.session import SparkSession import yaml import yaycl, yaycl_crypt sc = SparkContext(master='local') spark = SparkSession(sc) '''reading from HDFS and writing into ''' filteredDF = spark.read.parquet("hdfs:///files/accident/filteredDF") conf = yaycl.Config('/home/hduser/', crypt_key='my secret') yaycl_crypt.decrypt_yaml(conf, 'test') with open("/home/hduser/test.yaml", 'r') as ymlfile: cfg = yaml.load(ymlfile) yaycl_crypt.encrypt_yaml(conf, 'test') filteredDF.write.format(cfg['type'].get('conn_type')).options( url=cfg['url'].get('path') + cfg['url'].get('databaseName') + cfg['url'].get('user') + cfg['url'].get('password'), dbtable=cfg['dbtable'].get('name')).save() newDF = spark.read.format(cfg['type'].get('conn_type')).options( url=cfg['url'].get('path') + cfg['url'].get('databaseName') + cfg['url'].get('user') + cfg['url'].get('password'), dbtable=cfg['dbtable'].get('name')).load() '''writing into postgres '''
def conf(conf_dir): # a basic conf object return yaycl.Config(conf_dir.strpath)