def handler(event, context): # Read event params logger.info("Process event: %s", event) params = json.loads(event) service_name = params['serviceName'] function_name = params['functionName'] alias = params['alias'] provision_count = int(params.get('provisionCount', 1)) # Init fc client creds = context.credentials client = fc2.Client( endpoint='https://{}.{}-internal.fc.aliyuncs.com'.format( context.account_id, context.region), accessKeyID=creds.access_key_id, accessKeySecret=creds.access_key_secret, securityToken=creds.security_token) # Update provision config client.put_provision_config(service_name, alias, function_name, provision_count) # Confirm update config = client.get_provision_config(service_name, alias, function_name).data logger.info("Resource: %s, target: %d, current: %d", config['resource'], config['target'], config['current']) return config
def __init__(self, aliyun_fc_config, storage_config): logger.debug("Creating Aliyun Function Compute client") self.log_active = logger.getEffectiveLevel() != logging.WARNING self.name = 'aliyun_fc' self.config = aliyun_fc_config self.is_lithops_worker = is_lithops_worker() self.version = 'lithops_{}'.format(__version__) self.user_agent = aliyun_fc_config['user_agent'] if 'service' in aliyun_fc_config: self.service_name = aliyun_fc_config['service'] else: self.service_name = aliyunfc_config.SERVICE_NAME self.endpoint = aliyun_fc_config['public_endpoint'] self.access_key_id = aliyun_fc_config['access_key_id'] self.access_key_secret = aliyun_fc_config['access_key_secret'] logger.debug("Set Aliyun FC Service to {}".format(self.service_name)) logger.debug("Set Aliyun FC Endpoint to {}".format(self.endpoint)) self.fc_client = fc2.Client(endpoint=self.endpoint, accessKeyID=self.access_key_id, accessKeySecret=self.access_key_secret) msg = COMPUTE_CLI_MSG.format('Aliyun Function Compute') logger.info("{}".format(msg))
def __init__(self, aliyun_fc_config, storage_config): logger.debug("Creating Aliyun Function Compute client") self.name = 'aliyun_fc' self.type = 'faas' self.config = aliyun_fc_config self.user_agent = aliyun_fc_config['user_agent'] self.endpoint = aliyun_fc_config['public_endpoint'] self.access_key_id = aliyun_fc_config['access_key_id'] self.access_key_secret = aliyun_fc_config['access_key_secret'] self.role_arn = aliyun_fc_config['role_arn'] self.region = self.endpoint.split('.')[1] self.default_service_name = f'{aliyunfc_config.SERVICE_NAME}_{self.access_key_id[0:4].lower()}' self.service_name = aliyun_fc_config.get('service', self.default_service_name) logger.debug("Set Aliyun FC Service to {}".format(self.service_name)) logger.debug("Set Aliyun FC Endpoint to {}".format(self.endpoint)) self.fc_client = fc2.Client(endpoint=self.endpoint, accessKeyID=self.access_key_id, accessKeySecret=self.access_key_secret) msg = COMPUTE_CLI_MSG.format('Aliyun Function Compute') logger.info("{}".format(msg))
def __init__(self, *args, **kwargs): super(TestService, self).__init__(*args, **kwargs) self.access_key_id = os.environ['ACCESS_KEY_ID'] self.access_key_secret = os.environ['ACCESS_KEY_SECRET'] self.endpoint = os.environ['ENDPOINT'] self.account_id = os.environ['ACCOUNT_ID'] self.region = os.environ['REGION'] self.code_bucket = os.environ['CODE_BUCKET'] self.invocation_role = os.environ['INVOCATION_ROLE'] self.invocation_role_sls = os.environ['INVOCATION_ROLE_SLS'] self.log_project = os.environ['LOG_PROJECT'] self.log_store = os.environ['LOG_STORE'] self.service_name = 'test_trigger_service' + str(uuid.uuid4()) self.function_name = 'test_trigger_function' + \ ''.join(random.choice(string.ascii_lowercase) for _ in range(8)) self.http_function_name = 'test_http_function' + \ ''.join(random.choice(string.ascii_lowercase) for _ in range(8)) self.trigger_name = 'test_trigger' + \ ''.join(random.choice(string.ascii_lowercase) for _ in range(8)) self.client = fc2.Client( endpoint=self.endpoint, accessKeyID=self.access_key_id, accessKeySecret=self.access_key_secret, )
def __init__(self, *args, **kwargs): super(TestService, self).__init__(*args, **kwargs) self.client = fc2.Client( endpoint=os.environ['ENDPOINT'], accessKeyID=os.environ['ACCESS_KEY_ID'], accessKeySecret=os.environ['ACCESS_KEY_SECRET'], )
def test_sts(self): helloWorld = 'test_invoke_hello_world_' + ''.join( random.choice(string.ascii_lowercase) for _ in range(8)) logging.info('create function: {0}'.format(helloWorld)) self.client.create_function( self.serviceName, helloWorld, handler='main.my_handler', runtime='python2.7', codeZipFile='test/hello_world/hello_world.zip') sts_client = AliyunSDK.AcsClient(os.environ['ACCESS_KEY_ID'], os.environ['ACCESS_KEY_SECRET'], 'cn-shanghai') request = AssumeRoleRequest.AssumeRoleRequest() request.set_RoleArn(os.environ['STS_ROLE']) request.set_RoleSessionName('fc-python-sdk') response = sts_client.do_action_with_exception(request) resp = json.loads(response) client = fc2.Client( endpoint=os.environ['ENDPOINT'], accessKeyID=resp['Credentials']['AccessKeyId'], accessKeySecret=resp['Credentials']['AccessKeySecret'], securityToken=resp['Credentials']['SecurityToken'], ) r = client.invoke_function(self.serviceName, helloWorld) self.assertEqual(r.data.decode('utf-8'), 'hello world') self.client.delete_function(self.serviceName, helloWorld)
def __init__(self, *args, **kwargs): super(TestReservedCapacity, self).__init__(*args, **kwargs) self.access_key_id = os.environ['ACCESS_KEY_ID'] self.access_key_secret = os.environ['ACCESS_KEY_SECRET'] self.endpoint = os.environ['ENDPOINT'] self.client = fc2.Client( endpoint=self.endpoint, accessKeyID=self.access_key_id, accessKeySecret=self.access_key_secret, )
def __init__(self, *args, **kwargs): super(TestFunction, self).__init__(*args, **kwargs) self.client = fc2.Client( endpoint=os.environ['ENDPOINT'], accessKeyID=os.environ['ACCESS_KEY_ID'], accessKeySecret=os.environ['ACCESS_KEY_SECRET'], ) serviceName = 'TestFunction_service_' + ''.join(random.choice(string.ascii_lowercase) for _ in range(8)) self.serviceName = serviceName self.client.create_service(self.serviceName) logging.info("create service: {0}".format(self.serviceName))
def __init__(self, *args, **kwargs): super(TestService, self).__init__(*args, **kwargs) self.vpcId = os.environ['VPC_ID'] self.vSwitchIds = os.environ['VSWITCH_IDS'] self.securityGroupId = os.environ['SECURITY_GROUP_ID'] self.vpcRole = os.environ['VPC_ROLE'] self.client = fc2.Client( endpoint=os.environ['ENDPOINT'], accessKeyID=os.environ['ACCESS_KEY_ID'], accessKeySecret=os.environ['ACCESS_KEY_SECRET'], )
def setUpClass(cls): logging.info("This setUpClass() method only called once.") cls.client = fc2.Client( endpoint=os.environ['ENDPOINT'], accessKeyID=os.environ['ACCESS_KEY_ID'], accessKeySecret=os.environ['ACCESS_KEY_SECRET'], ) service_name = 'TestFunction_service_' + ''.join(random.choice(string.ascii_lowercase) for _ in range(8)) cls.serviceName = service_name cls.client.create_service(service_name) logging.info("create service: {0}".format(service_name))
def __init__(self, *args, **kwargs): super(TestCustomDomain, self).__init__(*args, **kwargs) self.access_key_id = os.environ['ACCESS_KEY_ID'] self.access_key_secret = os.environ['ACCESS_KEY_SECRET'] self.endpoint = os.environ['ENDPOINT'] self.domain_name = 'pythonSDK.cn-hongkong.1221968287646227.cname-test.fc.aliyun-inc.com' self.client = fc2.Client( endpoint=self.endpoint, accessKeyID=self.access_key_id, accessKeySecret=self.access_key_secret, )
def run(self) -> None: function_compute_arn = os.getenv("FUNCTION_COMPUTE_ARN") assert function_compute_arn match_pattern = r"^acs:fc:([^:]*):(\d+):services/([^.]*)\..*/functions/(.*)$" service_site, account_id, service_name, function_name = re.match( match_pattern, function_compute_arn).groups() client = fc2.Client( endpoint=f"http://{account_id}.{service_site}.fc.aliyuncs.com", accessKeyID=os.getenv("ACCESS_KEY_ID"), accessKeySecret=os.getenv("SECRET_ACCESS_KEY"), Timeout=300, ) client.update_function(service_name, function_name, codeDir="dist")
def __init__(self, *args, **kwargs): super(TestService, self).__init__(*args, **kwargs) self.vpcId = os.environ['VPC_ID'] self.vSwitchIds = os.environ['VSWITCH_IDS'] self.securityGroupId = os.environ['SECURITY_GROUP_ID'] self.vpcRole = os.environ['VPC_ROLE'] self.userId = os.environ['USER_ID'] self.groupId = os.environ['GROUP_ID'] self.nasServerAddr = os.environ['NAS_SERVER_ADDR'] self.nasMountDir = os.environ['NAS_MOUNT_DIR'] self.client = fc2.Client( endpoint=os.environ['ENDPOINT'], accessKeyID=os.environ['ACCESS_KEY_ID'], accessKeySecret=os.environ['ACCESS_KEY_SECRET'], )
def __init__(self, *args, **kwargs): super(TestProvisonConfig, self).__init__(*args, **kwargs) self.access_key_id = os.environ['ACCESS_KEY_ID'] self.access_key_secret = os.environ['ACCESS_KEY_SECRET'] self.endpoint = os.environ['ENDPOINT'] self.region = os.environ['REGION'] self.account_id = os.environ['ACCOUNT_ID'] self.client = fc2.Client( endpoint=self.endpoint, accessKeyID=self.access_key_id, accessKeySecret=self.access_key_secret, ) self.prefix = "test-provision-config-" + ''.join( random.choice(string.ascii_lowercase) for _ in range(8))
def __init__(self, config, storage_config): self.log_level = os.getenv('LITHOPS_LOGLEVEL') self.name = 'aliyun_fc' self.config = config self.service_name = backend_config.SERVICE_NAME self.version = 'lithops_{}'.format(__version__) self.fc_client = fc2.Client(endpoint=self.config['public_endpoint'], accessKeyID=self.config['access_key_id'], accessKeySecret=self.config['access_key_secret']) log_msg = 'Lithops v{} init for Aliyun Function Compute'.format(__version__) logger.info(log_msg) if not self.log_level: print(log_msg)
def __init__(self, *args, **kwargs): super(TestVersioning, self).__init__(*args, **kwargs) self.vpcId = os.environ['VPC_ID'] self.vSwitchIds = os.environ['VSWITCH_IDS'] self.securityGroupId = os.environ['SECURITY_GROUP_ID'] self.vpcRole = os.environ['VPC_ROLE'] self.userId = os.environ['USER_ID'] self.groupId = os.environ['GROUP_ID'] self.nasServerAddr = os.environ['NAS_SERVER_ADDR'] self.nasMountDir = os.environ['NAS_MOUNT_DIR'] self.region = os.environ['REGION'] self.account_id = os.environ['ACCOUNT_ID'] self.code_bucket = os.environ['CODE_BUCKET'] self.invocation_role = os.environ['INVOCATION_ROLE'] self.client = fc2.Client( endpoint=os.environ['ENDPOINT'], accessKeyID=os.environ['ACCESS_KEY_ID'], accessKeySecret=os.environ['ACCESS_KEY_SECRET'], ) self._test_service_name = ""
def main(): config = _load_config() ACCOUNT_ID = config.get('ALIBABA_CLOUD_ACCOUNT_ID') ACCESS_KEY_ID = config.get('ALIBABA_CLOUD_ACCESS_KEY_ID') ACCESS_KEY_SECRET = config.get('ALIBABA_CLOUD_ACCESS_KEY_SECRET') if not (ACCOUNT_ID and ACCESS_KEY_ID and ACCESS_KEY_SECRET): raise RuntimeError('access key envs required') BUILD_ID = sys.argv[1] client = fc2.Client( endpoint=f'{ACCOUNT_ID}.cn-zhangjiakou.fc.aliyuncs.com', accessKeyID=ACCESS_KEY_ID, accessKeySecret=ACCESS_KEY_SECRET, ) name = 'rssant-img1' image = f'registry.cn-zhangjiakou.aliyuncs.com/rssant/async-api:{BUILD_ID}' response = client.update_function(serviceName=name, functionName=name, customContainerConfig=dict(image=image)) print(response) print(response.headers) print(response.data)
def __init__(self, *args, **kwargs): super(TestService, self).__init__(*args, **kwargs) self.access_key_id = os.environ['ACCESS_KEY_ID'] self.access_key_secret = os.environ['ACCESS_KEY_SECRET'] self.endpoint = os.environ['ENDPOINT'] self.account_id = os.environ['ACCOUNT_ID'] self.region = os.environ['REGION'] self.code_bucket = os.environ['CODE_BUCKET'] self.invocation_role = os.environ['INVOCATION_ROLE'] self.invocation_role_sls = os.environ['INVOCATION_ROLE_SLS'] self.log_project = os.environ['LOG_PROJECT'] self.log_store = os.environ['LOG_STORE'] self.service_name = 'test_trigger_service' self.function_name = 'test_trigger_function' self.trigger_name = 'test_trigger' self.client = fc2.Client( endpoint=self.endpoint, accessKeyID=self.access_key_id, accessKeySecret=self.access_key_secret, )
def handler(event, context): logger = logging.getLogger() logger.info('going to update provision event: %s' % event) ev = json.loads(event) payloadStr = ev.get("payload", '{"functions":[]}') payload = json.loads(payloadStr) client = fc2.Client(endpoint='https://%s.%s.fc.aliyuncs.com' % (context.account_id, context.region), accessKeyID=context.credentials.access_key_id, accessKeySecret=context.credentials.access_key_secret, securityToken=context.credentials.security_token) functions = payload.get("functions", []) request_id = context.request_id for func in functions: service_name = func.get("serviceName", None) qualifier = func.get("qualifier", None) function_name = func.get("functionName", None) target = func.get("target", None) func_info = "services/%s.%s/function/%s, target:%d" % ( service_name, qualifier, function_name, target) logger.info("going to put_provision_config for %s" % func_info) try: client.put_provision_config(service_name, qualifier, function_name, target) logger.info("put_provision_config for %s successfully" % func_info) send_dingtalk_text( "put_provision_config for %s successfully, requestID:%s" % (func_info, request_id)) except: logger.error("failed to put_provision_config for %s" % func_info) send_dingtalk_text( "failed to put_provision_config for %s, requestID:%s" % (func_info, request_id)) return 'finished'
def get_schedulers(self): client = fc2.Client(endpoint='%s.%s.fc.aliyuncs.com' % (self._account_id, self._region), accessKeyID=self._access_key, accessKeySecret=self._access_key_secret) triggers = client.list_triggers(serviceName=self.service_name, functionName=self.function_name) if triggers: triggers = triggers.data rc = {} for t in triggers['triggers']: if 'enable' in t['triggerConfig'] and t['triggerConfig']['enable']: name = t['triggerName'] if not name.startswith(self.schedule_prefix): continue name.replace(self.schedule_prefix, '') rc[name] = t rc['Default'] = 24 return rc
def invoke_fc(service_name, function_name, payload=None, invocation_type='Sync', log_type='None', trace_id=None, key='LTAIZtlslPls3GEn', secret='EFE5Byj2r1aO6RPbTem5mUBFOz3klS'): endpoint = '30691700.cn-beijing.fc.aliyuncs.com' client = fc2.Client(accessKeyID=key, accessKeySecret=secret, endpoint=endpoint, invocation_type=invocation_type) payload_bin = bytes(json.dumps(payload), encoding='utf-8') if payload else None res = client.invoke_function(service_name, function_name, payload=payload_bin) res_data = json.loads(res.data, encoding='utf-8') return res_data pass
def handler(event, context): logger = logging.getLogger() creds = context.credentials client = fc2.Client(endpoint='https://%s.%s.fc.aliyuncs.com' % (context.account_id, context.region), accessKeyID=creds.access_key_id, accessKeySecret=creds.access_key_secret, securityToken=creds.security_token or '') evt = json.loads(event) response = client.invoke_function( evt.get('serviceName'), evt.get('functionName'), event, headers={'x-fc-invocation-type': 'Async'}) logger.info(response.data) return { 'status': 'async invoked %s/%s' % (evt.get('serviceName'), evt.get('functionName')) }
def __init__(self): self.name = ALI_NAME self.region = REGION self.servicename = ALI_NORMAL_SERVICE self.logger = getLogger() self.logger.info("Init ALIYUN Client...\n") self.logger.info("The region is: %s\n" % self.region) self.logger.info("The user id is: %s\n" % ALI_USER_ID) self.logger.info("The secert info:") self.logger.info("AccessKey ID: %s" % (ALI_ACCESSKEY_ID[0:5] + "******")) self.logger.info("Access Key Secret: %s\n" % (ALI_ACCESSKEY_KEY[0:5] + "******")) endpoint = ALI_ENDPOINT_MODEL % (ALI_USER_ID, self.region) self.logger.info("The request endpoint is: %s\n" % endpoint) self.client = fc2.Client(endpoint=endpoint, accessKeyID=ALI_ACCESSKEY_ID, accessKeySecret=ALI_ACCESSKEY_KEY) self.logger.info("ALIYUN client init success!\n") self.logger.info("Start to get FC Service info...") self.logger.info("VPC-SERVICE name is: %s" % ALI_VPC_SERVICE) self.logger.info("NORMAL-SERVICE name is: %s\n" % ALI_NORMAL_SERVICE)
def handler(event, context): start = time.time() evt = json.loads(event) oss_bucket_name = evt["bucket_name"] object_key = evt["object_key"] dst_type = evt["dst_type"].strip() output_dir = evt["output_dir"] segment_time_seconds = str(evt.get("segment_time_seconds", 20)) creds = context.credentials auth = oss2.StsAuth(creds.accessKeyId, creds.accessKeySecret, creds.securityToken) oss_client = oss2.Bucket(auth, 'oss-%s-internal.aliyuncs.com' % context.region, oss_bucket_name) shortname, extension = get_fileNameExt(object_key) input_path = oss_client.sign_url('GET', object_key, 15 * 60) # split video to pieces try: subprocess.check_call([ "/code/ffmpeg", "-y", "-i", input_path, "-c", "copy", "-f", "segment", "-segment_time", segment_time_seconds, "-reset_timestamps", "1", "/tmp/split_" + shortname + '_piece_%02d' + extension ]) except subprocess.CalledProcessError as exc: LOGGER.error('split video to pieces returncode:{}'.format( exc.returncode)) LOGGER.error('split video to pieces cmd:{}'.format(exc.cmd)) LOGGER.error('split video to pieces output:{}'.format(exc.output)) split_keys = [] for filename in os.listdir('/tmp/'): filepath = '/tmp/' + filename if filename.startswith('split_' + shortname): filekey = os.path.join(output_dir, context.request_id, filename) oss_client.put_object_from_file(filekey, filepath) os.remove(filepath) split_keys.append(filekey) LOGGER.info("Uploaded {} to {}".format(filepath, filekey)) LOGGER.info("split spend time = {}".format(time.time() - start)) start = time.time() # call worker parallel to transcode endpoint = "http://{}.{}-internal.fc.aliyuncs.com".format( context.account_id, context.region) fcClient = fc2.Client(endpoint=endpoint, accessKeyID=creds.accessKeyId, accessKeySecret=creds.accessKeySecret, securityToken=creds.securityToken, Timeout=600) LOGGER.info("split_keys = {}".format(json.dumps(split_keys))) sub_service_name = context.service.name stack_name = sub_service_name[0:sub_service_name.index("FcOssFFmpeg")] sub_function_name = fcClient.list_functions( sub_service_name, limit=1, prefix=stack_name + "transcode-worker").data["functions"][0]["functionName"] LOGGER.info("worker function name = {}".format(sub_function_name)) ts = [] for obj_key in split_keys: subEvent = { "bucket_name": oss_bucket_name, "object_key": obj_key, "dst_type": dst_type, "output_dir": os.path.join(output_dir, context.request_id), "service_name": sub_service_name, "function_name": sub_function_name } LOGGER.info(json.dumps(subEvent)) t = Thread(target=sub_transcode, args=( fcClient, subEvent, )) t.start() ts.append(t) for t in ts: t.join() LOGGER.info("transcode spend time = {}".format(time.time() - start)) start = time.time() # merge split pieces which is transcoded segs_filename = "segs_{}.txt".format(shortname) segs_filepath = os.path.join('/tmp/', segs_filename) if os.path.exists(segs_filepath): os.remove(segs_filepath) output_prefix = os.path.join(output_dir, context.request_id) prefix = os.path.join(output_prefix, 'transcoded_split_' + shortname) LOGGER.info("output prefix " + prefix) split_files = [] with open(segs_filepath, "a") as f: for obj in oss2.ObjectIterator(oss_client, prefix=prefix): if obj.key.endswith(dst_type): filename = obj.key.replace("/", "_") filepath = "/tmp/" + filename split_files.append(filepath) oss_client.get_object_to_file(obj.key, filepath) f.write("file '%s'\n" % filepath) # debug with open(segs_filepath, "r") as f: LOGGER.info("segs_file content = {}".format(json.dumps(f.read()))) merged_filename = "merged_" + shortname + dst_type merged_filepath = os.path.join("/tmp/", merged_filename) try: subprocess.check_call([ "/code/ffmpeg", "-y", "-f", "concat", "-safe", "0", "-i", segs_filepath, "-c", "copy", "-fflags", "+genpts", merged_filepath ]) except subprocess.CalledProcessError as exc: LOGGER.error('merge split pieces returncode:{}'.format(exc.returncode)) LOGGER.error('merge split pieces cmd:{}'.format(exc.cmd)) LOGGER.error('merge split pieces output:{}'.format(exc.output)) merged_key = os.path.join(output_prefix, merged_filename) oss_client.put_object_from_file(merged_key, merged_filepath) LOGGER.info("Uploaded {} to {}".format(merged_filepath, merged_key)) LOGGER.info("merge spend time = {}".format(time.time() - start)) os.remove(segs_filepath) for fp in split_files: os.remove(fp) # clear all split_video and transcoded split_video on oss # todo ... return "ok"
def handler(event, context): print('Received event: ' + json.dumps(event, indent=2)) start_time = time.time() evt = json.loads(event) creds = context.credentials region = context.region # job bucket, we just got a notification from this bucket bucket_name = evt['events'][0]['oss']['bucket']['name'] oss_auth = oss2.StsAuth(creds.access_key_id, creds.access_key_secret, creds.security_token) oss_endpoint = 'http://oss-%s.aliyuncs.com' % region job_bucket = oss2.Bucket(oss_auth, oss_endpoint, bucket_name) config = json.loads(open('jobinfo.json', 'r').read()) job_id = config['jobId'] map_count = config['mapCount'] service_name = config['serviceName'] r_function_name = config['reducerFunction'] r_handler = config['reducerHandler'] # fc session fc_endpoint = 'https://%s.%s.fc.aliyuncs.com' % (context.account_id, region) fc_client = fc2.Client(endpoint=fc_endpoint, accessKeyID=creds.access_key_id, accessKeySecret=creds.access_key_secret, securityToken=creds.security_token) # get job files # files: <class>SimplifiedObjectInfo files = [] for obj in oss2.ObjectIterator(job_bucket, prefix=job_id): if obj.size > 0: files.append(obj) if check_job_done(files): print('Job done!!! Check the result file.') return else: ### stateless coordinator logic mapper_keys = get_mapper_files(files) print('Mappers done so far {0}'.format(len(mapper_keys))) if map_count == len(mapper_keys): # all the mapper have finished , time to schedule the reducers step_info = get_reducer_state_info(files, job_id, job_bucket) print('Step info: {0}'.format(step_info)) step_number = step_info[0] reducer_keys = step_info[1] if len(reducer_keys) == 0: print('Still waiting for finishing Reducer step{0}'.format( step_number)) return # compute this based on meradata of files r_batch_size = get_reducer_batch_size(reducer_keys) print('Starting the Reducer step{0}\nBatch Size {1}'.format( step_number, r_batch_size)) # create batch params for the FC function r_batch_params = fcutils.batch_creator(reducer_keys, r_batch_size) # build n_reducers = len(r_batch_params) n_oss = n_reducers * len(r_batch_params[0]) step_id = step_number + 1 for i in range(len(r_batch_params)): batch = [b.key for b in r_batch_params[i]] resp = fc_client.invoke_function(service_name, r_function_name, payload=json.dumps({ "bucket": bucket_name, "keys": batch, "jobBucket": bucket_name, "jobId": job_id, "nReducers": n_reducers, "stepId": step_id, "reducerId": i })) print(resp.data) # now write the reducer state fname = '{0}/reducerstate.{1}'.format(job_id, step_id) write_reducer_state(n_reducers, n_oss, job_bucket, fname) else: print('Still waiting for all the mappers to finish ...')
############################################################### # Function Compute # Serviceの作成、Functionの作成、OSS Triggerの作成 ############################################################### import fc2 # 変数の準備 FC_ENDPOINT = "https://" + ACCOUNT_ID + "." + REGION + ".fc.aliyuncs.com" # Function Compute Endpoint ROLE_ARN = "acs:ram::" + ACCOUNT_ID + ":role/fc-role" # Function Compute Role ARN FC_SERVICE = "test-fun" # Function Compute Service Name FC_FUNCTION = "fc_function" # Function Compute Function Name # FC Clientの作成 fc_client = fc2.Client(endpoint=FC_ENDPOINT, accessKeyID=ACCESS_KEY_ID, accessKeySecret=ACCESS_KEY_SECRET) # FC Serviceの作成 service = fc_client.create_service(serviceName=FC_SERVICE, role=ROLE_ARN) response = fc_client.list_services() for service in response.data["services"]: print("Function compute : ", service["serviceName"]) # FC Functionの作成 response = fc_client.create_function( FC_SERVICE, FC_FUNCTION, 'python3', 'index.handler',
def setdid_env(did='none'): import fc2 client = fc2.Client(endpoint='', accessKeyID='', accessKeySecret='') client.update_function('', '', environmentVariables={'did': did})
def __init__(self): self._fc_client = fc2.Client(endpoint=profile.aliyun_fc_endpoint, accessKeyID=profile.ak_id, accessKeySecret=profile.ak_secret) self._logger = logging.getLogger()