def saveToCassandra(dstream, keyspace, table, columns=None, write_conf=None, row_format=None): helper = dstream._sc._jvm.java.lang.Thread.currentThread().getContextClassLoader() \ .loadClass("pyspark_cassandra.PythonHelper").newInstance() write_conf = as_java_object(dstream._sc._gateway, write_conf.__dict__) if write_conf else None columns = as_java_array(dstream._sc._gateway, "String", columns) if columns else None helper.saveToCassandra(dstream._jdstream, keyspace, table, columns, write_conf, row_format)
def saveToCassandra(dstream, keyspace, table, columns=None, row_format=None, keyed=None, write_conf=None, **write_conf_kwargs): ctx = dstream._ssc._sc gw = ctx._gateway # create write config as map write_conf = WriteConf.build(write_conf, **write_conf_kwargs) write_conf = as_java_object(gw, write_conf.settings()) # convert the columns to a string array columns = as_java_array(gw, "String", columns) if columns else None return helper(ctx).saveToCassandra(dstream._jdstream, keyspace, table, columns, row_format, keyed, write_conf)