示例#1
0
 def __init__( self, application ):
     super( CreateSparkCluster, self ).__init__( application )
     self.option( '--num-slaves', '-s', metavar='NUM',
                  type=int, default=1,
                  help='The number of slaves to start.' )
     # We want --instance-type for the slaves and --master-instance-type for the master and we
     # want --master-instance-type to default to the value of --instance-type.
     super( CreateSparkCluster, self ).option(
         '--instance-type', '-t', metavar='TYPE', dest='slave_instance_type',
         default=SparkBox.recommended_instance_type( ),
         help='The type of EC2 instance to launch for the slaves, e.g. t2.micro, '
              'm3.small, m3.medium, or m3.large etc. ' )
     self.option( '--master-instance-type', metavar='TYPE', dest='instance_type',
                  help='The type of EC2 instance to launch for the master, e.g. t2.micro, '
                       'm3.small, m3.medium, or m3.large etc. The default is the instance type '
                       'used for the slaves.' )
     self.option( '--ebs-volume-size', metavar='GB', default=0,
                  help='The size in GB of an EBS volume to be attached to each node for '
                       'persistent data such as that backing HDFS. By default HDFS will be '
                       'backed instance store ( ephemeral) only, or the root volume for '
                       'instance types that do not offer instance store.' )