Ejemplo n.º 1
0
def test_adam_optimizer_options():
    processed = generate_random_data()
    mg = build_graph(create_random_model)
    options = build_adam_config(learning_rate=0.1, beta1=0.85, beta2=0.98, epsilon=1e-8)
    spark_model = SparkAsyncDL(
        inputCol='features',
        tensorflowGraph=mg,
        tfInput='x:0',
        tfLabel='y:0',
        tfOutput='outer/Sigmoid:0',
        tfOptimizer='adam',
        tfLearningRate=.1,
        iters=25,
        partitions=4,
        predictionCol='predicted',
        labelCol='label',
        optimizerOptions=options
    )
    handle_assertions(spark_model, processed)
Ejemplo n.º 2
0
    # create spark session and train with final_df
    spark = SparkSession.builder \
            .appName(task+'flow') \
            .getOrCreate()

    # sc.stop() ## stop?

    mg = build_graph(small_model)
    #Assemble and one hot encode
    va = VectorAssembler(inputCols=final_df.columns[1:151],
                         outputCol='features')
    encoded = OneHotEncoder(inputCol='result',
                            outputCol='labels',
                            dropLast=False)
    adam_config = build_adam_config(learning_rate=0.001,
                                    beta1=0.9,
                                    beta2=0.999)

    spark_model = SparkAsyncDL(inputCol='features',
                               tensorflowGraph=mg,
                               tfInput='x:0',
                               tfLabel='y:0',
                               tfOutput='out:0',
                               tfLearningRate=.001,
                               iters=20,
                               predictionCol='predicted',
                               labelCol='labels',
                               verbose=1,
                               optimizerOptions=adam_config)

    ckptpath = os.path.join(ckptdir, task)