class Root: token = read_file("token.txt") platform_list = read_file("platforms.txt") platforms = json.loads(platform_list) console_map = {} sorted_platforms = sorted(platforms, key=platforms.get) for platform_id in sorted_platforms: if platform_id not in console_map: console_map[platform_id] = GameRandomizer(platform_id, token) # for p_index, platform_id in enumerate(platforms): # if platform_id not in console_map: # console_map[platform_id] = GameRandomizer(platform_id, token) @cherrypy.expose() def index(self, pid=None): if pid: if pid in Root.console_map: return Root.console_map[pid].index() links = make_console_links(Root.console_map, Root.sorted_platforms) index_html = read_file("index.html").replace("{links}", links) return index_html
def generate_data(src_file, tgt_file, output_file): data = [] for src, tgt in zip(read_file(src_file), read_file(tgt_file)): src = src.strip().split() tgt = tgt.strip().split() if len(src) == 0 or len(tgt) == 0: continue data.append({'src': src, 'tgt': tgt}) save_json_lines(data, output_file)
def __init__(self, function_args): self.endpoint = utils.get_environment_variable('OPENFAAS_ENDPOINT') self.openfaas_envvars = {'read_timeout': '90', #{'sprocess': '/tmp/user_script.sh', 'write_timeout': '90'} self.openfaas_labels = {'com.openfaas.scale.zero': 'true'} self.set_function_args(function_args) self.basic_auth = None if (os.path.isfile('/var/secrets/basic-auth-user') and os.path.isfile('/var/secrets/basic-auth-password')): self.basic_auth = (utils.read_file('/var/secrets/basic-auth-user'), utils.read_file('/var/secrets/basic-auth-password'))
def get_user_script(self): script = "" if utils.is_variable_in_environment('INIT_SCRIPT_PATH'): file_content = utils.read_file(utils.get_environment_variable('INIT_SCRIPT_PATH'), 'rb') script = utils.utf8_to_base64_string(file_content) if utils.is_value_in_dict(self.lambda_instance.event, 'script'): script = self.lambda_instance.event['script'] return script
def index(self, pid=None): if pid: if pid in Root.console_map: return Root.console_map[pid].index() links = make_console_links(Root.console_map, Root.sorted_platforms) index_html = read_file("index.html").replace("{links}", links) return index_html
def upload_file_to_s3(self, bucket_name, bucket_folder, file_path): file_data = utils.read_file(file_path, 'rb') file_name = os.path.basename(file_path) file_key = "{0}".format(file_name) if bucket_folder and bucket_folder != "" and bucket_folder.endswith("/"): file_key = "{0}{1}".format(bucket_folder, file_name) else: file_key = "{0}/{1}".format(bucket_folder, file_name) logger.info("Uploading file '{0}' to bucket '{1}' with key '{2}'".format(file_path, bucket_name, file_key)) self.s3.upload_file(bucket_name, file_key, file_data)
def upload_file_to_S3_bucket(image_file, deployment_bucket, file_key): if(utils.get_tree_size(scar_temporal_folder) > MAX_S3_PAYLOAD_SIZE): error_msg = "Uncompressed image size greater than 250MB.\nPlease reduce the uncompressed image and try again." logger.error(error_msg) utils.delete_file(zip_file_path) exit(1) logger.info("Uploading '%s' to the '%s' S3 bucket" % (image_file, deployment_bucket)) file_data = utils.read_file(image_file, 'rb') S3().upload_file(deployment_bucket, file_key, file_data)
def __init__(self, command_prefix, self_bot) -> None: commands.Bot.__init__(self, command_prefix=command_prefix, self_bot=self_bot) self.command_prefix = command_prefix self.self_bot = self_bot self.manager = PlayerManager() self.list_names = [] self.add_commands() self.races = { key: value for key, value in read_file("src/assets/races") } self.classes = { key: value for key, value in read_file("src/assets/classes") } self.statusType = [ "Strenght", "Dexterity", "Creativity", "Cleverness", "Charm", "Insight", "Resistence", "NuclearResistence" ]
def upload_file(self, folder_name=None, file_path=None, file_key=None): kwargs = {'Bucket': self.properties['input_bucket']} kwargs['Key'] = self.get_file_key(folder_name, file_path, file_key) if file_path: kwargs['Body'] = utils.read_file(file_path, 'rb') if folder_name and not file_path: logger.info("Folder '{0}' created in bucket '{1}'".format( kwargs['Key'], kwargs['Bucket'])) else: logger.info( "Uploading file '{0}' to bucket '{1}' from '{2}'".format( kwargs['Key'], kwargs['Bucket'], file_path)) self.client.upload_file(**kwargs)
def get_payload(self): # Default payload payload = {} if 'run_script' in self.properties: file_content = utils.read_file(self.properties['run_script'], 'rb') # We first code to base64 in bytes and then decode those bytes to allow the json lib to parse the data # https://stackoverflow.com/questions/37225035/serialize-in-json-a-base64-encoded-data#37239382 payload = {"script": utils.utf8_to_base64_string(file_content)} if 'c_args' in self.properties: payload = {"cmd_args": json.dumps(self.properties['c_args'])} return json.dumps(payload)
def encrypt_data(filename_path, public_key_path): data = read_file(filename_path) logger.info(f"Encrypting file {filename_path}...") encrypted_filename = os.path.join( os.path.dirname(filename_path), f"{os.path.basename(filename_path).split('.')[0]}.bin", ) with open(encrypted_filename, "wb") as file_out: recipient_key = RSA.import_key(read_file(public_key_path)) session_key = get_random_bytes(16) # Encrypt the session key with the public RSA key cipher_rsa = PKCS1_OAEP.new(recipient_key) enc_session_key = cipher_rsa.encrypt(session_key) # Encrypt the data with the AES session key cipher_aes = AES.new(session_key, AES.MODE_EAX) ciphertext, tag = cipher_aes.encrypt_and_digest(data) [ file_out.write(x) for x in (enc_session_key, cipher_aes.nonce, tag, ciphertext) ] logger.info(f"File saved in '{encrypted_filename}'.") logger.info("File successfully encrypted.")
def __init__(self): # Get k8s api host and port self.kubernetes_service_host = utils.get_environment_variable('KUBERNETES_SERVICE_HOST') if not self.kubernetes_service_host: self.kubernetes_service_host = 'kubernetes.default' self.kubernetes_service_port = utils.get_environment_variable('KUBERNETES_SERVICE_PORT') if not self.kubernetes_service_port: self.kubernetes_service_port = '443' # Get k8s api token self.kube_token = utils.read_file('/var/run/secrets/kubernetes.io/serviceaccount/token') # Get k8s api certs if os.path.isfile('/var/run/secrets/kubernetes.io/serviceaccount/ca.crt'): self.cert_verify = '/var/run/secrets/kubernetes.io/serviceaccount/ca.crt' else: self.cert_verify = False
def set_function_code(self): package_args = self.get_function_code_args() # Zip all the files and folders needed codezip.prepare_lambda_payload(**package_args) if 'DeploymentBucket' in package_args: self.properties['code'] = { "S3Bucket": package_args['DeploymentBucket'], "S3Key": package_args['FileKey'] } else: self.properties['code'] = { "ZipFile": utils.read_file(self.get_property("zip_file_path"), mode="rb") }
def set_properties(self, args): # Set the command line parsed properties self.properties = utils.merge_dicts(self.properties, vars(args)) call_type = self.set_call_type(args.func.__name__) self.set_output_type() if ((call_type != CallType.LS) and (not self.delete_all()) and (call_type != CallType.PUT) and (call_type != CallType.GET)): if (call_type == CallType.INIT): if (not self.get_property("name")) or ( self.get_property("name") == ""): func_name = "function" if self.get_property("image_id") != "": func_name = self.get_property("image_id") elif self.get_property("image_file") != "": func_name = self.get_property("image_file").split( '.')[0] self.properties["name"] = self.create_function_name( func_name) self.set_tags() self.check_function_name() function_name = self.get_property("name") validators.validate_function_name(function_name, self.get_property("name_regex")) self.set_environment_variables() self.properties["handler"] = function_name + ".lambda_handler" self.properties["log_group_name"] = '/aws/lambda/' + function_name if (call_type == CallType.INIT): self.set_function_code() if (call_type == CallType.RUN): if self.get_argument_value(args, 'run_script'): file_content = utils.read_file( self.get_property("run_script"), 'rb') # We first code to base64 in bytes and then decode those bytes to allow json to work # https://stackoverflow.com/questions/37225035/serialize-in-json-a-base64-encoded-data#37239382 parsed_script = utils.utf8_to_base64_string(file_content) self.set_property('payload', {"script": parsed_script}) if self.get_argument_value(args, 'c_args'): parsed_cont_args = json.dumps(self.get_property("c_args")) self.set_property('payload', {"cmd_args": parsed_cont_args})
def launch_udocker_container(self): remaining_seconds = lambda_instance.get_invocation_remaining_seconds() logger.info("Executing udocker container. Timeout set to {0} seconds".format(remaining_seconds)) logger.debug("Udocker command: {0}".format(self.cmd_container_execution)) with subprocess.Popen(self.cmd_container_execution, stderr=subprocess.STDOUT, stdout=open(self.container_output_file, "w"), preexec_fn=os.setsid) as process: try: process.wait(timeout=remaining_seconds) except subprocess.TimeoutExpired: logger.info("Stopping process '{0}'".format(process)) utils.kill_process(process) logger.warning("Container timeout") raise if os.path.isfile(self.container_output_file): return utils.read_file(self.container_output_file)
def set_function_code(self): package_props = self.get_function_payload_props() # Zip all the files and folders needed FunctionPackageCreator(package_props).prepare_lambda_code() if 'DeploymentBucket' in package_props: self.aws_properties['s3']['input_bucket'] = package_props[ 'DeploymentBucket'] S3(self.aws_properties).upload_file( file_path=package_props['ZipFilePath'], file_key=package_props['FileKey']) self.properties['code'] = { "S3Bucket": package_props['DeploymentBucket'], "S3Key": package_props['FileKey'], } else: self.properties['code'] = { "ZipFile": utils.read_file(self.properties['zip_file_path'], mode="rb") }
def decrypt_data(filename_path, new_filename_path, private_key_path): with open(filename_path, "rb") as file_in: logger.info(f"Decrypting file {filename_path}...") private_key = RSA.import_key(read_file(private_key_path)) enc_session_key, nonce, tag, ciphertext = [ file_in.read(x) for x in (private_key.size_in_bytes(), 16, 16, -1) ] # Decrypt the session key with the private RSA key cipher_rsa = PKCS1_OAEP.new(private_key) session_key = cipher_rsa.decrypt(enc_session_key) # Decrypt the data with the AES session key cipher_aes = AES.new(session_key, AES.MODE_EAX, nonce) data = cipher_aes.decrypt_and_verify(ciphertext, tag) decrypt_filename = extract_or_create_filename(new_filename_path, filename_path) save_file(decrypt_filename, data) logger.info("File successfully decrypted.")
META, SETTINGS, PROJECT = "meta.json", "settings.json", "timetravel" utils.copy_settings(META, PROJECT, overwrite=True) # preserve existing user-defined settings utils.copy_settings(SETTINGS, PROJECT, overwrite=False) metadata = utils.read_json(utils.path_settings(PROJECT).joinpath(META)) #endregion setup(author=metadata['author'], keywords="python cli collaboration", name=metadata['name'], version=metadata['version'], description="Walltime utility script for collaboration.", long_description=utils.read_file("readme.md"), long_description_content_type="text/markdown", url="https://github.com/StefanGreve/timetravel", py_modules=[PROJECT], package_dir={'': 'src'}, install_requires=['click', 'colorama', 'python-dateutil'], python_requires=">=3.6.1", classifiers=[ "Natural Language :: English", "Environment :: Console", "Programming Language :: Python", "Programming Language :: Python :: 3.8", "Topic :: Utilities", "Development Status :: 4 - Beta", "Operating System :: OS Independent", "License :: OSI Approved :: GNU General Public License v3 (GPLv3)" ])
utils.copy_settings(META, PROJECT, overwrite=True) # preserve existing user-defined settings utils.copy_settings(SETTINGS, PROJECT, overwrite=False) METADATA = utils.read_json(utils.path_settings(PROJECT).joinpath(META)) #endregion setup(author=METADATA['author'], author_email=METADATA['author_email'], keywords="python cli utility weather", name=METADATA['name'], version=METADATA['version'], description=METADATA['description'], long_description=utils.read_file("README.md"), long_description_content_type="text/markdown", url=METADATA['url'], py_modules=[PROJECT], package_dir={'': 'src'}, install_requires=['click', 'colorama', 'pyowm'], python_version=">=3.6", classifiers=[ "Natural Language :: English", "Natural Language :: Japanese", "Natural Language :: German", "Programming Language :: Python", "Programming Language :: Python :: 3.8", "Topic :: Utilities", "Development Status :: 3 - Alpha", "Operating System :: OS Independent", "License :: OSI Approved :: GNU Public License v3 (GPLv3)" ])
def get_vocab(file): """return the cluster vocaburary""" return read_file(file)
# console_map[platform_id] = GameRandomizer(platform_id, token) @cherrypy.expose() def index(self, pid=None): if pid: if pid in Root.console_map: return Root.console_map[pid].index() links = make_console_links(Root.console_map, Root.sorted_platforms) index_html = read_file("index.html").replace("{links}", links) return index_html if __name__ == "__main__": host = read_file("host.txt").strip() cherrypy.config.update({ 'server.socket_port': 8080, 'server.socket_host': host, 'response.timeout': 1600000 }) conf = { "/css": { "tools.staticdir.on": True, "tools.staticdir.dir": os.path.abspath("../bin/css"), }, '/styles.css': { 'tools.staticfile.on': True, 'tools.staticfile.filename':