class MFNTest(): def __init__(self, test_name=None, timeout=None, workflow_filename=None, new_user=False): self._settings = self._get_settings() if new_user: random_str = str(random.randint(0, 10000)) + "_" + str(time.time()) random_user = hashlib.sha256(random_str.encode()).hexdigest() random_user = "******" + random_user[0:31] + "@knix.io" print("User: "******"User: "******"mfn_user"]) self._client = MfnClient() if workflow_filename is None: self._workflow_filename = self._settings["workflow_description_file"] else: self._workflow_filename = workflow_filename ind = self._workflow_filename.rfind("/") if ind != -1: self._workflow_folder = self._workflow_filename[:ind+1] else: self._workflow_folder = "./" print("Workflow folder: " + self._workflow_folder) self._workflow_description = self._get_json_file(self._workflow_filename) if "name" in self._workflow_description: self._workflow_name = self._workflow_description["name"] else: self._workflow_name = self._workflow_filename[0:self._workflow_filename.rfind(".")] if test_name is not None: self._test_name = test_name else: self._test_name = self._workflow_filename if timeout is not None: self._settings["timeout"] = timeout self._log_clear_timestamp = int(time.time() * 1000.0 * 1000.0) # will be the deployed workflow object in self._client self._workflow = None self._deployment_error = "" self._workflow_resources = [] self.upload_workflow() self.deploy_workflow() def _get_json_file(self, filename): json_data = {} if os.path.isfile(filename): with open(filename) as json_file: json_data = json.load(json_file) return json_data def _get_settings(self): settings = {} # read default global settings files settings.update(self._get_json_file("../settings.json")) # read test specific settings settings.update(self._get_json_file("settings.json")) if len(settings) == 0: raise Exception("Empty settings") return settings def _get_resource_info(self, resource_ref): #dir_list = next(os.walk('.'))[1] dir_list = next(os.walk(self._workflow_folder))[1] is_zip = False is_jar = False runtime = "" found = False if "zips" in dir_list: resource_filename = self._workflow_folder + "zips/" + resource_ref + ".zip" if os.path.isfile(resource_filename): found = True runtime = "Python 3.6" is_zip = True if not found: if "python" in dir_list: resource_filename = self._workflow_folder + "python/" + resource_ref + ".py" if os.path.isfile(resource_filename): found = True runtime = "Python 3.6" else: resource_filename = self._workflow_folder + resource_ref + ".py" if os.path.isfile(resource_filename): found = True runtime = "Python 3.6" if not found and "jars" in dir_list: resource_filename = self._workflow_folder + "jars/" + resource_ref + ".jar" if os.path.isfile(resource_filename): found = True runtime = "Java" is_jar = True if not found: if "java" in dir_list: resource_filename = self._workflow_folder + "java/" + resource_ref + ".java" if os.path.isfile(resource_filename): found = True runtime = "Java" else: resource_filename = self._workflow_folder + resource_ref + ".java" if os.path.isfile(resource_filename): found = True runtime = "Java" retval = {} retval["resource_filename"] = resource_filename retval["resource_runtime"] = runtime retval["is_zip"] = is_zip retval["is_jar"] = is_jar return retval def _get_resource_info_map(self, workflow_description=None, resource_info_map=None): if workflow_description is None: workflow_description = self._workflow_description if resource_info_map is None: resource_info_map = {} if "functions" in self._workflow_description: workflow_functions = workflow_description["functions"] for wf_function in workflow_functions: if "name" in wf_function: resource_name = wf_function["name"] resource_ref = resource_name if "resource" in wf_function: resource_ref = wf_function["resource"] if resource_ref not in resource_info_map.keys(): resource_info = self._get_resource_info(resource_ref) resource_info["resource_req_filename"] = "requirements/" + resource_ref + "_requirements.txt" resource_info["resource_env_filename"] = "environment_variables/" + resource_ref + "_environment_variables.txt" resource_info_map[resource_ref] = resource_info elif "States" in workflow_description: states = workflow_description["States"] for sname in states: state = states[sname] if "Resource" in state: resource_name = state["Resource"] if resource_name not in resource_info_map.keys(): resource_info = self._get_resource_info(resource_name) resource_info["resource_req_filename"] = "requirements/" + resource_name + "_requirements.txt" resource_info["resource_env_filename"] = "environment_variables/" + resource_name + "_environment_variables.txt" resource_info_map[resource_name] = resource_info if "Type" in state and state["Type"] == "Parallel": branches = state['Branches'] for branch in branches: resource_info_map = self._get_resource_info_map(branch, resource_info_map) if "Type" in state and state["Type"] == "Map": branch = state['Iterator'] #print(str(branch)) resource_info_map = self._get_resource_info_map(branch, resource_info_map) #print(str(resource_info_map)) else: print("ERROR: invalid workflow description.") assert False return resource_info_map def _delete_resource_if_existing(self, existing_resources, resource_name): for g in existing_resources: if g.name == resource_name: self._client.delete_function(g) break print("deleted resource: " + resource_name) def _create_and_upload_resource(self, resource_name, resource_info): print("Deploying resource: " + resource_name) resource_filename = resource_info["resource_filename"] is_zip = resource_info["is_zip"] is_jar = resource_info["is_jar"] resource_req_filename = resource_info["resource_req_filename"] resource_env_filename = resource_info["resource_env_filename"] resource_runtime = resource_info["resource_runtime"] self._workflow_resources.append(resource_name) try: # add the resource g = self._client.add_function(resource_name, runtime=resource_runtime) # upload the resource source print('Uploading file: ' + resource_filename) if is_zip or is_jar: g.upload(resource_filename) else: source_text = '' with open(resource_filename, 'r') as f: source_text = f.read() g.source = {"code": source_text} # upload the resource requirements if os.path.isfile(resource_req_filename): with open(resource_req_filename, "r") as f: reqs = f.read().strip() g.requirements = reqs #print("set requirements for function: " + resource_name + " " + reqs) # resource environment variables # upload the resource environment variables if os.path.isfile(resource_env_filename): with open(resource_env_filename, "r") as f: env_vars = f.read().strip() g.environment_variables = env_vars #print("set environment variables for function: " + resource_name + " " + env_vars) except Exception as e: print("ERROR: Could not create resource.") print(str(e)) assert False def upload_workflow(self): self.undeploy_workflow() resource_info_map = self._get_resource_info_map() existing_resources = self._client.functions for resource_name in resource_info_map.keys(): self._delete_resource_if_existing(existing_resources, resource_name) resource_info = resource_info_map[resource_name] self._create_and_upload_resource(resource_name, resource_info) def get_deployment_error(self): return self._deployment_error def deploy_workflow(self): try: wf = self._client.add_workflow(self._workflow_name) wf.json = json.dumps(self._workflow_description) wf.deploy(self._settings["timeout"]) self._workflow = wf if self._workflow.status != "failed": print("MFN workflow " + self._workflow_name + " deployed.") else: print("MFN workflow " + self._workflow_name + " could not be deployed.") self._deployment_error = self._workflow.get_deployment_error() except Exception as e: print("ERROR: Could not deploy workflow.") raise e assert False def undeploy_workflow(self): existing_workflows = self._client.workflows for wf in existing_workflows: if wf.name == self._workflow_name: if wf.status == "deployed": wf.undeploy(self._settings["timeout"]) print("Workflow undeployed.") self._client.delete_workflow(wf) break existing_resources = self._client.functions for resource_name in self._workflow_resources: self._delete_resource_if_existing(existing_resources, resource_name) self._client.disconnect() def get_test_workflow_endpoints(self): if self._workflow.status == "deployed": return self._workflow.endpoints def execute(self, message, timeout=None, check_duration=False): if timeout is None: timeout = self._settings["timeout"] return self._workflow.execute(message, timeout, check_duration) def get_workflow_logs(self, num_lines=500): data = self._workflow.logs(ts_earliest=self._log_clear_timestamp, num_lines=num_lines) return data def clear_workflow_logs(self): self._log_clear_timestamp = int(time.time() * 1000.0 * 1000.0) def report(self, success, inp, expected, actual): short_inp = self._get_printable(inp) if success: print(self._test_name + " test " + mfntestpassed + " with input data:", short_inp) else: print(self._test_name + " test " + mfntestfailed + " with input data:", short_inp + '(result: ' + json.dumps(actual) + ', expected: ' + json.dumps(expected) + ')') def exec_only(self, inp): any_failed_tests = False try: rn = self.execute(json.loads(inp)) return rn except Exception as e: any_failed_tests = True self.undeploy_workflow() print(str(e)) raise e finally: time.sleep(2) if any_failed_tests: self._print_logs(self._workflow.logs()) def exec_tests(self, testtuplelist, check_just_keys=False, check_duration=False, should_undeploy=True): any_failed_tests = False durations = [] time.sleep(2) try: for tup in testtuplelist: current_test_passed = False inp, res = tup if check_duration: rn, t_total = self.execute(json.loads(inp), check_duration=check_duration) else: rn = self.execute(json.loads(inp)) if check_duration: durations.append(t_total) #print("Total time to execute: " + str(t_total) + " (ms)") if check_just_keys: if set(rn.keys()) == set(res.keys()): current_test_passed = True else: if rn == json.loads(res): current_test_passed = True self.report(current_test_passed, inp, res, rn) any_failed_tests = any_failed_tests or (not current_test_passed) time.sleep(1) except Exception as e: print(str(e)) raise e finally: time.sleep(2) if check_duration: print("------") print("Request/response latency statistics:") print("Number of executions: " + str(len(durations))) print("Average (ms): " + str(statistics.mean(durations))) print("Median (ms): " + str(statistics.median(durations))) print("Minimum (ms): " + str(min(durations))) print("Maximum (ms): " + str(max(durations))) print("Stdev (ms): " + str(statistics.stdev(durations))) print("PStdev (ms): " + str(statistics.pstdev(durations))) percentiles = [0.0, 50.0, 90.0, 95.0, 99.0, 99.9, 99.99, 100.0] self.print_percentiles(durations, percentiles) print("------") if any_failed_tests: self._print_logs(self._workflow.logs()) if should_undeploy: self.undeploy_workflow() def _print_logs(self, logs): print(logs) for t in logs: if t == "timestamp": continue cur_log = logs[t] lines = cur_log.split("\n") for line in lines: print(line) print("------") def print_percentiles(self, data, percentiles): data.sort() for perc in percentiles: print(str(perc) + "th percentile (ms): " + str(self.percentile(data, perc/100.0))) def percentile(self, data, percent): k = (len(data)-1) * percent f = math.floor(k) c = math.ceil(k) if f == c: return data[int(k)] d0 = data[int(f)] * (c-k) d1 = data[int(c)] * (k-f) return d0 + d1 def _get_printable(self, text, max_len=50): if len(text) > max_len: return text[:max_len] + " ... (showing " + str(max_len) + "/" + str(len(text)) + " characters.)" return text def plot_latency_breakdown(self, num_last_executions=15): eidlist = self.extract_execution_ids(num_last_executions) eid_filename = "eidlist_" + self._test_name + ".txt" timestamps_filename = "timestamps_" + self._test_name + ".txt" eidlist = eidlist[len(eidlist) - num_last_executions:] with open(eid_filename, "w") as f: for eid in eidlist: f.write(eid + "\n") self.parse_metrics(eid_filename, timestamps_filename) cmd = "python3 ../plotmfnmetrics.py " + timestamps_filename output, error = run_command_return_output(cmd) # cleanup cmd = "rm esresult.json " + eid_filename + " " + timestamps_filename _, _ = run_command_return_output(cmd) def parse_metrics(self, eid_filename, timestamps_filename): cmd = "python3 ../mfnmetrics.py -eidfile " + eid_filename output, error = run_command_return_output(cmd) log_lines = combine_output(output, error) with open(timestamps_filename, "w") as f: for line in log_lines: f.write(line + "\n") def extract_execution_ids(self, num_last_executions, num_log_lines=2000): cmd = "python3 ../wftail.py -n " + str(num_log_lines) + " -wname " + self._workflow_name output, error = run_command_return_output(cmd) log_lines = combine_output(output, error) eidlist = [] for line in log_lines: line = line.strip() if line == "": continue tokens = line.split(" ") eid = tokens[7] if eid != "[0l]": eid = eid[1:-1] eidlist.append(eid) #print(eid) return eidlist def exec_keys_check(self, testtuplelist): self.exec_tests(testtuplelist, check_just_keys=True) # compatibility with older tests def cleanup(self): return
# # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import time import logging from mfn_sdk import MfnClient c = MfnClient() logging.basicConfig(level=logging.DEBUG) fn = c.add_function("echo") fn.source = { 'code': """ def handle(event, context): context.log("Echoing event: "+str(event)) return event """ } workflow = c.add_workflow("echo_wf") workflow.json = """{ "name": "echo_wf", "entry": "echo", "functions": [ {
import sys import datetime from zipfile import ZipFile from mfn_sdk import MfnClient import logging logging.basicConfig(level=logging.DEBUG) c = MfnClient('https://knix.io', '*****@*****.**', 'test123', proxies={}) """ This example uploads a given ZIP file to a function """ # Create a new function g = c.add_function('custom') # Create a zip file from the directory contents zip_name = "custom_function.zip" if os.path.exists(zip_name): os.remove(zip_name) for root, dirs, files in os.walk('.'): with ZipFile(zip_name, 'w') as zf: for fn in files: zf.write(fn) # upload the zip file g.upload(zip_name) """ Uploading the zip file is a combination of uploading 1MB file chunks and metadata that includes the zip archive listing. The process can be customized (e.g. archive listing), below is a manual version of chunking and uploading the function ZIP file.
""" trigger: a script that sets up a triggerable bucket and a function and workflow The workflow is associated with the triggerable bucket. Upon writing to the triggerable bucket, the workflow is executed. The function then writes the data to the general storage. The script tries to retrieve the data from the general storage. """ import base64 import time from mfn_sdk import MfnClient c = MfnClient() function = c.add_function("transform") function.code = """ def handle(event, context): context.log("Triggered "+str(event)) if 'key' in event and 'value' in event: context.put(event['key'], event['value']) return None """ workflow = c.add_workflow("workflow") workflow.json = """{ "name": "workflow", "entry": "transform", "functions": [
class MFNTest(): def __init__(self, test_name=None, timeout=None, workflow_filename=None, new_user=False, delete_user=False): self._settings = self._get_settings() if new_user: random_str = str(random.randint(0, 10000)) + "_" + str(time.time()) random_user = hashlib.sha256(random_str.encode()).hexdigest() random_user = "******" + random_user[0:31] + "@knix.io" print("User: "******"User: "******"mfn_user"]) self._client = MfnClient() if workflow_filename is None: self._workflow_filename = self._settings[ "workflow_description_file"] else: self._workflow_filename = workflow_filename ind = self._workflow_filename.rfind("/") if ind != -1: self._workflow_folder = self._workflow_filename[:ind + 1] else: self._workflow_folder = "./" #print("Workflow folder: " + self._workflow_folder) self._workflow_description = self._get_json_file( self._workflow_filename) if "name" in self._workflow_description: self._workflow_name = self._workflow_description["name"] else: self._workflow_name = self._workflow_filename[ 0:self._workflow_filename.rfind(".")] if test_name is not None: self._test_name = test_name else: self._test_name = self._workflow_filename if timeout is not None: self._settings["timeout"] = timeout self._log_clear_timestamp = int(time.time() * 1000.0 * 1000.0) # will be the deployed workflow object in self._client self._workflow = None self._deployment_error = "" self._workflow_resources = [] self.upload_workflow() self.deploy_workflow() def _get_json_file(self, filename): json_data = {} if os.path.isfile(filename): with open(filename) as json_file: json_data = json.load(json_file) return json_data def _get_settings(self): settings = {} # read default global settings files settings.update(self._get_json_file("../settings.json")) # read test specific settings settings.update(self._get_json_file("settings.json")) if len(settings) == 0: raise Exception("Empty settings") # Defaults settings.setdefault("timeout", 60) return settings def _get_resource_info(self, resource_ref): #dir_list = next(os.walk('.'))[1] dir_list = next(os.walk(self._workflow_folder))[1] is_zip = False is_jar = False runtime = "" found = False if "zips" in dir_list: resource_filename = self._workflow_folder + "zips/" + resource_ref + ".zip" if os.path.isfile(resource_filename): found = True runtime = "Python 3.6" is_zip = True if not found: if "python" in dir_list: resource_filename = self._workflow_folder + "python/" + resource_ref + ".py" if os.path.isfile(resource_filename): found = True runtime = "Python 3.6" else: resource_filename = self._workflow_folder + resource_ref + ".py" if os.path.isfile(resource_filename): found = True runtime = "Python 3.6" if not found and "jars" in dir_list: resource_filename = self._workflow_folder + "jars/" + resource_ref + ".jar" if os.path.isfile(resource_filename): found = True runtime = "Java" is_jar = True if not found: if "java" in dir_list: resource_filename = self._workflow_folder + "java/" + resource_ref + ".java" if os.path.isfile(resource_filename): found = True runtime = "Java" else: resource_filename = self._workflow_folder + resource_ref + ".java" if os.path.isfile(resource_filename): found = True runtime = "Java" retval = {} retval["resource_filename"] = resource_filename retval["resource_runtime"] = runtime retval["is_zip"] = is_zip retval["is_jar"] = is_jar return retval def _get_resource_info_map(self, workflow_description=None, resource_info_map=None): if workflow_description is None: workflow_description = self._workflow_description if resource_info_map is None: resource_info_map = {} if "functions" in self._workflow_description: workflow_functions = workflow_description["functions"] for wf_function in workflow_functions: if "name" in wf_function: resource_name = wf_function["name"] resource_ref = resource_name if "resource" in wf_function: resource_ref = wf_function["resource"] if resource_ref not in resource_info_map.keys(): resource_info = self._get_resource_info(resource_ref) resource_info[ "resource_req_filename"] = "requirements/" + resource_ref + "_requirements.txt" resource_info[ "resource_env_filename"] = "environment_variables/" + resource_ref + "_environment_variables.txt" resource_info_map[resource_ref] = resource_info elif "States" in workflow_description: states = workflow_description["States"] for sname in states: state = states[sname] if "Resource" in state: resource_name = state["Resource"] if resource_name not in resource_info_map.keys(): resource_info = self._get_resource_info(resource_name) resource_info[ "resource_req_filename"] = "requirements/" + resource_name + "_requirements.txt" resource_info[ "resource_env_filename"] = "environment_variables/" + resource_name + "_environment_variables.txt" resource_info_map[resource_name] = resource_info if "Type" in state and state["Type"] == "Parallel": branches = state['Branches'] for branch in branches: resource_info_map = self._get_resource_info_map( branch, resource_info_map) if "Type" in state and state["Type"] == "Map": branch = state['Iterator'] #print(str(branch)) resource_info_map = self._get_resource_info_map( branch, resource_info_map) #print(str(resource_info_map)) else: print("ERROR: invalid workflow description.") assert False return resource_info_map def _delete_resource_if_existing(self, existing_resources, resource_name): for g in existing_resources: if g.name == resource_name: self._client.delete_function(g) break print("deleted resource: " + resource_name) def _create_and_upload_resource(self, resource_name, resource_info): print("Deploying resource: " + resource_name) resource_filename = resource_info["resource_filename"] is_zip = resource_info["is_zip"] is_jar = resource_info["is_jar"] resource_req_filename = resource_info["resource_req_filename"] resource_env_filename = resource_info["resource_env_filename"] resource_runtime = resource_info["resource_runtime"] self._workflow_resources.append(resource_name) try: # add the resource g = self._client.add_function(resource_name, runtime=resource_runtime) # upload the resource source print('Uploading file: ' + resource_filename) if is_zip or is_jar: g.upload(resource_filename) else: source_text = '' with open(resource_filename, 'r') as f: source_text = f.read() g.source = {"code": source_text} # upload the resource requirements if os.path.isfile(resource_req_filename): with open(resource_req_filename, "r") as f: reqs = f.read().strip() g.requirements = reqs #print("set requirements for function: " + resource_name + " " + reqs) # resource environment variables # upload the resource environment variables if os.path.isfile(resource_env_filename): with open(resource_env_filename, "r") as f: env_vars = f.read().strip() g.environment_variables = env_vars #print("set environment variables for function: " + resource_name + " " + env_vars) except Exception as e: print("ERROR: Could not create resource.") print(str(e)) assert False def upload_workflow(self): self.undeploy_workflow() resource_info_map = self._get_resource_info_map() existing_resources = self._client.functions for resource_name in resource_info_map.keys(): self._delete_resource_if_existing(existing_resources, resource_name) resource_info = resource_info_map[resource_name] self._create_and_upload_resource(resource_name, resource_info) def get_deployment_error(self): return self._deployment_error def deploy_workflow(self): try: wf = self._client.add_workflow(self._workflow_name) wf.json = json.dumps(self._workflow_description) wf.deploy(self._settings["timeout"]) self._workflow = wf if self._workflow.status != "failed": print("MFN workflow " + self._workflow_name + " deployed.") else: print("MFN workflow " + self._workflow_name + " could not be deployed.") self._deployment_error = self._workflow.get_deployment_error() except Exception as e: print("ERROR: Could not deploy workflow.") raise e assert False def undeploy_workflow(self): existing_workflows = self._client.workflows for wf in existing_workflows: if wf.name == self._workflow_name: if wf.status == "deployed": wf.undeploy(self._settings["timeout"]) print("Workflow undeployed.") self._client.delete_workflow(wf) break existing_resources = self._client.functions for resource_name in self._workflow_resources: self._delete_resource_if_existing(existing_resources, resource_name) def get_test_workflow_endpoints(self): if self._workflow.status == "deployed": return self._workflow.endpoints def execute(self, message, timeout=None, check_duration=False, async=False): if timeout is None: timeout = self._settings["timeout"] if async: return self._workflow.execute_async(message, timeout) else: return self._workflow.execute(message, timeout, check_duration)
""" trigger: a script that sets up a triggerable bucket and a function and workflow The workflow is associated with the triggerable bucket. Upon writing to the triggerable bucket, the workflow is executed. The function then writes the data to the general storage. The script tries to retrieve the data from the general storage. """ import base64 import time from mfn_sdk import MfnClient c = MfnClient() function = c.add_function("react") function.code = """ def handle(event, context): context.log("Triggered "+str(event)) return None """ workflow = c.add_workflow("eventdriven_workflow") workflow.json = """{ "name": "eventdriven_workflow", "entry": "react", "functions": [ { "name": "react", "next": ["end"]