def make_flow(obj, token, name, tags, namespace, max_workers, workflow_timeout, is_project): datastore = obj.datastore(obj.flow.name, mode='w', metadata=obj.metadata, event_logger=obj.event_logger, monitor=obj.monitor) if datastore.TYPE != 's3': raise MetaflowException("AWS Step Functions requires --datastore=s3.") # Attach AWS Batch decorator to the flow decorators._attach_decorators(obj.flow, [BatchDecorator.name]) decorators._init_step_decorators( obj.flow, obj.graph, obj.environment, obj.datastore, obj.logger) obj.package = MetaflowPackage( obj.flow, obj.environment, obj.echo, obj.package_suffixes) package_url = datastore.save_data( obj.package.sha, TransformableObject(obj.package.blob)) return StepFunctions(name, obj.graph, obj.flow, obj.package, package_url, token, obj.metadata, obj.datastore, obj.environment, obj.event_logger, obj.monitor, tags=tags, namespace=namespace, max_workers=max_workers, username=get_username(), workflow_timeout=workflow_timeout, is_project=is_project)
def _save_package_once(cls, datastore, package): if cls.package_url is None: cls.package_url = datastore.save_data( package.sha, TransformableObject(package.blob)) cls.package_sha = package.sha