Ejemplo n.º 1
0
def run_function_wait_result(
        py_fn,
        py_fn_args,
        py_fn_kwargs={},
        endpoint_id="3c3f0b4f-4ae4-4241-8497-d7339972ff4a",
        print_status=True):
    """
    Register and run a function with FuncX, wait for execution,
        and return results when they are available

    :param py_fn: Handle of Python function
    :param py_fn_args: List of positional args for py function
    :param py_fn_kwargs: Dict of keyword args for py function,
    :param endpoint_id: ID of endpoint to run command on
        - must be configured in config.py
    """
    fxc = FuncXClient()
    func_uuid = fxc.register_function(py_fn)
    res = fxc.run(*py_fn_args,
                  **py_fn_kwargs,
                  endpoint_id=endpoint_id,
                  function_id=func_uuid)
    while True:
        try:
            if print_status:
                print("Waiting for results...")
            time.sleep(FUNCX_SLEEP_TIME)
            return str(fxc.get_result(res), encoding="utf-8")
            break
        except Exception as e:
            if "waiting-for-" in str(e):
                continue
            else:
                raise e
Ejemplo n.º 2
0
def register_container():
    from funcx.sdk.client import FuncXClient
    fxc = FuncXClient()
    from gladier_xpcs.tools.corr import eigen_corr
    cont_dir = '/eagle/APSDataAnalysis/XPCS_test/containers/'
    container_name = 'eigen_v2.simg'
    eigen_cont_id = fxc.register_container(location=cont_dir+container_name,container_type='singularity')
    corr_cont_fxid = fxc.register_function(eigen_corr, container_uuid=eigen_cont_id)
    return corr_cont_fxid
Ejemplo n.º 3
0
 def handle(self, *args, **options):
     if options['register']:
         fxc = FuncXClient()
         ep = fxc.register_function(process_hdfs,
                                    description="Process an hdf")
         self.stderr.write(f'FuncX function endpoint has been '
                           f'registered: {ep}')
         self.stderr.write(f'You need to add this somewhere manually!')
     elif options['test']:
         name = options['test']
         if not name:
             self.stderr.write('test needs the name of a search collection')
         bag = Bag.objects.filter(
             search_collection__name=options['test']).first()
         if bag:
             action = ReprocessingTask.new_action(bag, user=bag.user)
             action.save()
             rt = ReprocessingTask(bag=bag, action=action)
             rt.save()
             rt.action.start_flow()
             self.stdout.write(f'Started {action}')
         else:
             bags = [b.search_collection.name for b in Bag.objects.all()]
             self.stderr.write(f'No bag named {options["test"]}, please '
                               f'use one of the following instead {bags}')
     elif options['check']:
         rts = ReprocessingTask.objects.filter(action__status='ACTIVE')
         if not rts:
             self.stderr.write('No Tasks to update.')
         for rt in rts:
             old = rt.action.status
             rt.action.update_flow()
             self.stdout.write(f'Updated {rt.bag.search_collection.name} '
                               f'from "{old}" to "{rt.action.status}".')
     elif options.get('payload') is not None:
         raise NotImplementedError('This does not work yet...')
         pl = self.get_task_or_list_all(options['payload']).action.payload
         plain_pl = deserialize_payload(pl['ProcessDataInput']['payload'])
         pprint(plain_pl)
     elif options.get('output') is not None:
         task = self.get_task_or_list_all(options['output'])
         if not task:
             return
         automate_output = task.action.cache['details']['output']
         outputs = [
             data['details'] for name, data in automate_output.items()
             if 'details' in data
         ]
         for output in outputs:
             if 'result' in output:
                 pprint(deserialize_payload(output['result']))
             elif 'exception' in output:
                 deserialize_payload(output['exception']).reraise()
Ejemplo n.º 4
0
def run_real(payload):
    """Run a function with some raw json input.
    """
    fxc = FuncXClient(funcx_service_address='https://dev.funcx.org/api/v1')

    # register a function
    func_id = fxc.register_function(test_func)
    ep_id = '60ad46e1-c912-468b-8674-4d582e9dc9ee'

    res = fxc.run({'name': 'real'}, function_id=func_id, endpoint_id=ep_id)

    print(res)

    return res
Ejemplo n.º 5
0
def register_funcx(task):
    """Register the function and the container with funcX.

    Parameters
    ----------
    task : dict
        A dict of the task to publish

    Returns
    -------
    str
        The funcX function id
    """

    # Get the funcX dependent token
    fx_token = task['dlhub']['funcx_token']
    # Create a client using this token
    fx_auth = globus_sdk.AccessTokenAuthorizer(fx_token)
    fxc = FuncXClient(fx_authorizer=fx_auth, use_offprocess_checker=False)
    description = f"A container for the DLHub model {task['dlhub']['shorthand_name']}"
    try:
        description = task['datacite']['descriptions'][0]['description']
    except:
        # It doesn't have a simple description
        pass
    # Register the container with funcX
    container_id = fxc.register_container(task['dlhub']['ecr_uri'],
                                          'docker',
                                          name=task['dlhub']['shorthand_name'],
                                          description=description)

    # Register a function
    funcx_id = fxc.register_function(dlhub_run,
                                     function_name=task['dlhub']['name'],
                                     container_uuid=container_id,
                                     description=description,
                                     public=True)

    # Whitelist the function on DLHub's endpoint
    # First create a new fxc client on DLHub's behalf
    fxc = FuncXClient(use_offprocess_checker=False)
    endpoint_uuid = '86a47061-f3d9-44f0-90dc-56ddc642c000'
    res = fxc.add_to_whitelist(endpoint_uuid, [funcx_id])
    print(res)
    return funcx_id
Ejemplo n.º 6
0
def run_ser(payload):
    """Run a function with some raw json input.
    """
    fxc = FuncXClient(funcx_service_address='https://dev.funcx.org/api/v1')

    # register a function
    func_id = fxc.register_function(test_func)
    ep_id = '60ad46e1-c912-468b-8674-4d582e9dc9ee'
    payload = {
        'serialize': True,
        'payload': payload,
        'endpoint': ep_id,
        'func': func_id
    }

    res = fxc.post('submit', json_body=payload)
    res = res['task_uuid']
    print(res)

    return res
Ejemplo n.º 7
0
def run_function_async(py_fn,
                       py_fn_args,
                       py_fn_kwargs={},
                       endpoint_id="3c3f0b4f-4ae4-4241-8497-d7339972ff4a"):
    """
    Asynchronously register and run a Python function on a FuncX endpoint

    :param py_fn: Handle of Python function
    :param py_fn_args: List of positional args for py function
    :param py_fn_kwargs: Dict of keyword args for py function,
    :param endpoint_id: ID of endpoint to run command on
        - must be configured in config.py
    """
    # Use return value for Funcx polling
    fxc = FuncXClient()
    func_uuid = fxc.register_function(py_fn)
    res = fxc.run(*py_fn_args,
                  **kwargs,
                  endpoint_id=endpoint_id,
                  function_id=func_uuid)
    return res
Ejemplo n.º 8
0
exec_time3 = ((end3 - start3) * 1000)
print(exec_time3)
fxc.throttling_enabled = False
n = 2000
estimate = []


def matrix(ress):
    return ress


mat1 = ([1, 6, 5], [3, 4, 8], [2, 12, 3])
mat2 = ([3, 4, 6], [5, 6, 7], [6, 56, 7])
#overhead = []
start1 = time.time()
hello_function = fxc.register_function(matrix)
#ress = np.dot(mat1,mat2)
end1 = time.time()
registertime = ((end1 - start1) * 1000)
print("The register time is:", registertime)
index = 1
book = xlwt.Workbook()
sheet = book.add_sheet('data', cell_overwrite_ok=True)
sheet.write(0, 0, 'exec_time3')
sheet.write(0, 1, 'registertime')
sheet.write(0, 2, 'runtime')
sheet.write(0, 3, 'exec_time')
for i in range(0, n):
    #funcx-endpoint start Test
    #funcx-endpoint stop Test
    #funcx-endpoint start Test
Ejemplo n.º 9
0
        with open(result_path, 'w') as f:
            print('problem loading processor instance for {}'.format(
                str(item)),
                  file=f)
            print(e, file=f)
            print('environment:', file=f)
            for key, value in os.environ.items():
                print('{}: {}'.format(key, value), file=f)
            print('hostname: {}'.format(subprocess.check_output('hostname',
                                                                shell=True),
                                        file=f))

    if stageout_url.startswith('root://'):
        command = 'xrdcp {} {}'.format(result_path,
                                       os.path.join(stageout_url, subdir))
        subprocess.call(command, shell=True)
        os.unlink(result_path)

    return os.path.join(subdir, os.path.basename(result_path))


client = FuncXClient()
uuids = {}
for func in [process]:
    f = timeout(func)
    uuids[func.__name__] = client.register_function(f)

with open('data/function_uuids.json', 'w') as f:
    f.write(json.dumps(uuids, indent=4, sort_keys=True))
Ejemplo n.º 10
0
from funcx.sdk.client import FuncXClient

fxc = FuncXClient()


def funcx_sum(items):
    return sum(items)


def funcx_KMeans(n_clusters, data):
    from sklearn.cluster import KMeans as sklearn_cluster_KMeans
    k_means = sklearn_cluster_KMeans(n_clusters=n_clusters)
    k_means.fit(data)
    return k_means, k_means.predict(data)


#func_uuid = fxc.register_function(funcx_sum,
#                                  description="A summation function")
#print(func_uuid)

func_uuid = fxc.register_function(
    funcx_KMeans, description="A wrapper for sklearn.cluster.Kmeans")

print(func_uuid)
from funcx.sdk.client import FuncXClient
fxc = FuncXClient()


def funcx_test():
    while True:
        print("Viana")


func_uuid = fxc.register_function(funcx_test)
tutorial_endpoint = '70d29c21-66c3-4ba8-98fc-91490b522699'  # Public tutorial endpoint
res = fxc.run(endpoint_id=tutorial_endpoint, function_id=func_uuid)
funcx_test()
Ejemplo n.º 12
0
    # Connect to the MDML to query for data
    exp = mdml.experiment(params['experiment_id'], params['user'],
                          params['pass'], params['host'])
    # Grabbing the latest temperature value
    query = [{"device": "DATA1", "variables": ["temperature"], "last": 1}]
    res = exp.query(query, verify_cert=False)  # Running the query
    tempF = res['DATA1'][0]['temperature']
    tempC = (tempF - 32) * (5 / 9)
    return {'time': mdml.unix_time(True), 'tempC': tempC}


# Registering the function
if args.register:
    from funcx.sdk.client import FuncXClient
    fxc = FuncXClient()
    funcx_func_uuid = fxc.register_function(
        basic_analysis, description="Temperature conversion")
    print(f'funcX UUID: {funcx_func_uuid}')
else:  # Use the most recent function funcx ID (manually put here after running --register once)
    funcx_func_uuid = "1712a2fc-cc40-4b2c-ae44-405d58f78c5d"  # Sept 16th 2020

# Now that the function is ready for use, we need to start an experiment to use it with
import sys
import time
sys.path.insert(1, '../')  # using local mdml_client
import mdml_client as mdml

exp = mdml.experiment("TEST", args.username, args.password, args.host)
exp.add_config(auto=True)
exp.send_config()
time.sleep(1)
    return pyhf.Workspace(data)


if __name__ == "__main__":
    # locally get pyhf pallet for analysis
    if not Path("1Lbb-pallet").exists():
        download("https://doi.org/10.17182/hepdata.90607.v3/r3", "1Lbb-pallet")
    with open("1Lbb-pallet/BkgOnly.json") as bkgonly_json:
        bkgonly_workspace = json.load(bkgonly_json)

    # Use privately assigned endpoint id
    with open("endpoint_id.txt") as endpoint_file:
        pyhf_endpoint = str(endpoint_file.read().rstrip())

    fxc = FuncXClient()

    # Register function and execute on worker node
    prepare_func = fxc.register_function(prepare_workspace)
    prepare_task = fxc.run(bkgonly_workspace,
                           endpoint_id=pyhf_endpoint,
                           function_id=prepare_func)

    # Wait for worker to finish and retrieve results
    workspace = None
    while not workspace:
        try:
            workspace = fxc.get_result(prepare_task)
        except Exception as excep:
            print(f"prepare: {excep}")
            sleep(10)
Ejemplo n.º 14
0
          'w') as f:
    f.write(config)

# Start the endpoint
endpoint_name = args.endpoint_name
cmd = "funcx-endpoint start {}".format(endpoint_name)
try:
    subprocess.call(cmd, shell=True)
except Exception as e:
    print(e)
print("Started the endpoint {}".format(args.endpoint_id))
print("Wating 60 seconds for the endpoint to start")
time.sleep(60)

fxc = FuncXClient()
func_uuid = fxc.register_function(dlhub_test, description="A sum function")
print("The functoin uuid is {}".format(func_uuid))

fxs = FuncXSerializer()


def test(tasks=1,
         data=[1],
         timeout=float('inf'),
         endpoint_id=None,
         function_id=None,
         poll=0.1):

    start = time.time()
    res = fxc.run(data, endpoint_id=endpoint_id, function_id=function_id)
    print("Task ID: {}".format(res))
Ejemplo n.º 15
0
# In[73]:

local_ep = "4325781a-fcfc-4dac-9017-aa5bf97db85b"
theta_ep = "f3d6b327-d262-43a2-96da-0dbf1f5468b2"
cooley_ep = "2bf4b19b-eaec-42b2-a191-b1542f3cc868"

fxc = FuncXClient()

# In[70]:


def hello_world(name):
    return f"Hello, {name}"


hello_func = fxc.register_function(hello_world,
                                   description="Test hello world.")
print(hello_func)

# In[71]:

name = "Ryan"
res = fxc.run(name=name, endpoint_id=local_ep, function_id=hello_func)

# In[72]:

fxc.get_result(res)

# In[74]:

name = "Cooley"
res = fxc.run(name=name, endpoint_id=cooley_ep, function_id=hello_func)
Ejemplo n.º 16
0
def main(args):
    if args.config_file is not None:
        with open(args.config_file, "r") as infile:
            config = json.load(infile)

    backend = args.backend

    pallet_path = Path(config["input_prefix"]).joinpath(config["pallet_name"])

    # locally get pyhf pallet for analysis
    if not pallet_path.exists():
        download(config["pallet_url"], pallet_path)

    analysis_name = config["analysis_name"]
    analysis_prefix_str = "" if analysis_name is None else f"{analysis_name}_"
    if config["analysis_dir"] is not None:
        pallet_path = pallet_path.joinpath(config["analysis_dir"])

    with open(pallet_path.joinpath(
            f"{analysis_prefix_str}BkgOnly.json")) as bkgonly_json:
        bkgonly_workspace = json.load(bkgonly_json)

    # Initialize funcX client
    fxc = FuncXClient()
    fxc.max_requests = 200

    with open("endpoint_id.txt") as endpoint_file:
        pyhf_endpoint = str(endpoint_file.read().rstrip())

    # register functions
    prepare_func = fxc.register_function(prepare_workspace)
    infer_func = fxc.register_function(infer_hypotest)

    # execute background only workspace
    prepare_task = fxc.run(bkgonly_workspace,
                           backend,
                           endpoint_id=pyhf_endpoint,
                           function_id=prepare_func)

    # Read patchset in while background only workspace running
    with open(pallet_path.joinpath(
            f"{analysis_prefix_str}patchset.json")) as patchset_json:
        patchset = pyhf.PatchSet(json.load(patchset_json))

    workspace = None
    while not workspace:
        try:
            workspace = fxc.get_result(prepare_task)
        except Exception as excep:
            print(f"prepare: {excep}")
            sleep(10)

    print("--------------------")
    print(workspace)

    # execute patch fits across workers and retrieve them when done
    n_patches = len(patchset.patches)
    tasks = {}
    for patch_idx in range(n_patches):
        patch = patchset.patches[patch_idx]
        task_id = fxc.run(
            workspace,
            patch.metadata,
            [patch.patch],
            backend,
            endpoint_id=pyhf_endpoint,
            function_id=infer_func,
        )
        tasks[patch.name] = {"id": task_id, "result": None}

    while count_complete(tasks.values()) < n_patches:
        for task in tasks.keys():
            if not tasks[task]["result"]:
                try:
                    result = fxc.get_result(tasks[task]["id"])
                    print(
                        f"Task {task} complete, there are {count_complete(tasks.values())+1} results now"
                    )
                    tasks[task]["result"] = result
                except Exception as excep:
                    print(f"inference: {excep}")
                    sleep(15)

    print("--------------------")
    print(tasks.values())
print(exec_time3)
fxc.throttling_enabled = False
high = 10
low = 5
m, n = 100, 100
a = (high - low) * np.random.rand(m, n) + low


def complex(a):
    return a


n = 2000
estimate = []
start1 = time.time()
hello_function = fxc.register_function(complex)
end1 = time.time()
registertime = ((end1 - start1) * 1000)
print("The register time is:", registertime)
index = 1
book = xlwt.Workbook()
sheet = book.add_sheet('data', cell_overwrite_ok=True)
sheet.write(0, 0, 'exec_time3')
sheet.write(0, 1, 'registertime')
sheet.write(0, 2, 'runtime')
sheet.write(0, 3, 'exec_time')
for i in range(0, n):
    #start1 = time.time()
    #hello_function = fxc.register_function(matrix)
    #end1 = time.time()
    #registertime = ((end1 - start1) * 1000)
Ejemplo n.º 18
0
class TestTutorial:
    def __init__(
        self,
        fx_auth,
        search_auth,
        openid_auth,
        endpoint_id,
        func,
        expected,
        args=None,
        timeout=15,
        concurrency=1,
        tol=1e-5,
    ):
        self.endpoint_id = endpoint_id
        self.func = func
        self.expected = expected
        self.args = args
        self.timeout = timeout
        self.concurrency = concurrency
        self.tol = tol
        self.fxc = FuncXClient(
            fx_authorizer=fx_auth,
            search_authorizer=search_auth,
            openid_authorizer=openid_auth,
        )
        self.func_uuid = self.fxc.register_function(self.func)

        self.logger = logging.getLogger(__name__)
        self.logger.setLevel(logging.DEBUG)
        handler = logging.StreamHandler(sys.stdout)
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter(
            "%(asctime)s %(name)s:%(lineno)d [%(levelname)s]  %(message)s")
        handler.setFormatter(formatter)
        self.logger.addHandler(handler)

    def run(self):
        try:
            submissions = []
            for _ in range(self.concurrency):
                task = self.fxc.run(self.args,
                                    endpoint_id=self.endpoint_id,
                                    function_id=self.func_uuid)
                submissions.append(task)

            time.sleep(self.timeout)

            unfinished = copy.deepcopy(submissions)
            while True:
                unfinished[:] = [
                    task for task in unfinished
                    if self.fxc.get_task(task)["pending"]
                ]
                if not unfinished:
                    break
                time.sleep(self.timeout)

            success = 0
            for task in submissions:
                result = self.fxc.get_result(task)
                if abs(result - self.expected) > self.tol:
                    self.logger.exception(
                        f"Difference for task {task}. "
                        f"Returned: {result}, Expected: {self.expected}")
                else:
                    success += 1

            self.logger.info(
                f"{success}/{self.concurrency} tasks completed successfully")
        except KeyboardInterrupt:
            self.logger.info("Cancelled by keyboard interruption")
        except Exception as e:
            self.logger.exception(f"Encountered exception: {e}")
            raise
Ejemplo n.º 19
0
def test_batch1(a, b, c=2, d=2):
    return a + b + c + d


def test_batch2(a, b, c=2, d=2):
    return a * b * c * d


def test_batch3(a, b, c=2, d=2):
    return a + 2 * b + 3 * c + 4 * d


funcs = [test_batch1, test_batch2, test_batch3]
func_ids = []
for func in funcs:
    func_ids.append(fx.register_function(func, description='test'))

ep_id = '4b116d3c-1703-4f8f-9f6f-39921e5864df'
print("FN_UUID : ", func_ids)

start = time.time()
task_count = 5
batch = fx.create_batch()
for func_id in func_ids:
    for i in range(task_count):
        batch.add(i,
                  i + 1,
                  c=i + 2,
                  d=i + 3,
                  endpoint_id=ep_id,
                  function_id=func_id)
Ejemplo n.º 20
0
estimate = []
#overhead = []

index = 1
book = xlwt.Workbook()
sheet = book.add_sheet('data', cell_overwrite_ok=True)
sheet.write(0, 0, 'exec_time3')
sheet.write(0, 1, 'registertime')
sheet.write(0, 2, 'runtime')
sheet.write(0, 3, 'exec_time')
for i in range(0, n):
    #funcx-endpoint start Test
    #funcx-endpoint stop Test
    #funcx-endpoint start Test
    start1 = time.time()
    hello_function = fxc.register_function(car)
    enh = ImageEnhance.Contrast(im)
    end1 = time.time()
    registertime = ((end1 - start1) * 1000)
    print("The register time is:", registertime)
    start2 = time.time()
    # res = fxc.run(items, endpoint_id='7601789e-8569-413f-be3e-e573d04c5799', function_id=sum_function)
    res = fxc.run(enh,
                  endpoint_id='d4b5b300-d12b-40d2-acff-fa54cb7dcfb2',
                  function_id=hello_function)
    end2 = time.time()
    runtime = ((end2 - start2) * 1000)
    print("The runtime is:", runtime)
    # get the raw json response
    start = time.time()
    result = fxc.get(f"tasks/{res}")
Ejemplo n.º 21
0
    parser.add_argument(
        "--proxy",
        action="store_true",
        help="Use proxy store to pass inputs",
    )
    parser.add_argument(
        "--redis-port",
        type=int,
        default=59465,
        help="If not None, use Redis backend",
    )
    args = parser.parse_args()

    fxc = FuncXClient()

    double_uuid = fxc.register_function(app_double)
    sum_uuid = fxc.register_function(app_sum)

    if args.proxy:
        store = ps.store.init_store(
            "redis",
            hostname="127.0.0.1",
            port=args.redis_port,
        )

    batch = fxc.create_batch()
    for _ in range(args.num_arrays):
        x = np.random.rand(args.size, args.size)
        if args.proxy:
            x = store.proxy(x)
        batch.add(x, endpoint_id=args.funcx_endpoint, function_id=double_uuid)
Ejemplo n.º 22
0
    query = [{
        "device": "DEVICE_A",
        "variables": [],
        "last" : 1
    }]
    exp = mdml.experiment('TEST','test','testtest','merfpoc.egs.anl.gov')
    dat = exp.query(query)
    print(dat)
    row = dat['DEVICE_A'][0]
    var_sum = float(row['variable1']) + float(row['variable2']) + float(row['variable3']) + float(row['variable4']) + float(row['variable5'])
    return str(var_sum)

if args.register:
    from funcx.sdk.client import FuncXClient
    fxc = FuncXClient()
    funcx_func_uuid = fxc.register_function(sum_vars,
        description="Sum 5 variables")
    print(f'FuncX function UUID: {funcx_func_uuid}')
else:
    funcx_func_uuid = '2b5b472c-8f04-4dec-bc03-fe7ed0717cfa'

funcx_endp_id = "a62a830a-5cd1-42a8-a4a8-a44fa552c899" # merf.egs.anl.gov endpoint
# funcx_endp_id = "2895306b-569f-4ec9-815a-bcab73ea32f7" # 146.137.10.50 endpoint
# funcx_endp_id = "4b116d3c-1703-4f8f-9f6f-39921e5864df" # public tutorial endpoint

reset = False
try:
    i = 1
    while True:
        # Send 5 datapoints and then an analyses
        while i < 6:
            # Create random data
Ejemplo n.º 23
0
    chunks = np.array_split(smiles, n_splits)
    feats = np.concatenate(pool.map(compute_features, chunks))
    result = invoke_model(feats, smiles)
    result.update(other_cols)

    # Measure the end time
    end_time = datetime.utcnow().isoformat()
    return {
        'start': start_time,
        'result': result,
        'end': end_time,
        'core_count': core_count,
        'hostname': hostname
    }


# Test run
print(inference_function(['C', 'CCCCC'], identifier=[1, 2]))

# Make the client
fxc = FuncXClient()

# Register and save the function
func_uuid = fxc.register_function(
    inference_function,
    description=
    "Infer toxicity based on Tox21 with Deepchem's Graph Convolution")
print(f'Registered function as {func_uuid}')
with open('func_uuid.json', 'w') as fp:
    json.dump(func_uuid, fp)
import requests
from funcx.sdk.client import FuncXClient

pyhf_endpoint = 'a727e996-7836-4bec-9fa2-44ebf7ca5302'

fxc = FuncXClient()
fxc.max_requests = 200


def prepare_workspace(data):
    import pyhf
    w = pyhf.Workspace(data)
    return w


prepare_func = fxc.register_function(prepare_workspace)


def infer_hypotest(w, metadata, doc):
    import pyhf
    import time

    tick = time.time()
    m = w.model(patches=[doc],
                modifier_settings={
                    "normsys": {
                        "interpcode": "code4"
                    },
                    "histosys": {
                        "interpcode": "code4p"
                    },
estimate = []
#overhead = []

index = 1
book = xlwt.Workbook()
sheet = book.add_sheet('data', cell_overwrite_ok=True)
sheet.write(0,0,'exec_time3')
sheet.write(0,1,'registertime')
sheet.write(0,2,'runtime')
sheet.write(0,3,'exec_time')
for i in range (0, n):
    funcx-endpoint start Test
    funcx-endpoint stop Test
    funcx-endpoint start Test
    start1 = time.time()
    hello_function = fxc.register_function(hello_world)
    event = ("Hello World")
    end1 = time.time()
    registertime = ((end1 - start1) * 1000)
    print("The register time is:", registertime)
    start2 = time.time()
    # res = fxc.run(items, endpoint_id='7601789e-8569-413f-be3e-e573d04c5799', function_id=sum_function)
    res = fxc.run(event, endpoint_id='a4c93d82-58e0-4062-aa97-be34f6734e88', function_id=hello_function)
    end2 = time.time()
    runtime = ((end2 - start2) * 1000)
    print("The runtime is:", runtime)
    # get the raw json response
    start = time.time()
    result = fxc.get(f"tasks/{res}") 
    while result['status'] != 'success':
        time.sleep(1)