def code(): run_jar( "%s/flink-jobs/target" % generators.ALS.repo.get_absolute_path(), "flink-jobs-*.jar", args=["master", 15, 1, 10, 100, "rand", "%s/als-temp/" % get_hdfs_address(), als_in, als_out], clazz="com.github.projectflink.als.ALSJoinBlocking", )
def code(): run_jar("%s/target/" % self.repo.get_absolute_path(), "flink-dataflow-*-SNAPSHOT.jar", args = ["--", # Flink 0.8 way of specifying options to user programs "--input=%s" % wordcount_in, "--output=%s" % self.wordcount_out], clazz = "com.dataartisans.flink.dataflow.examples.DataflowWordCount")
def code(): run_jar( "%s/target/" % self.repo.get_absolute_path(), "flink-dataflow-*-SNAPSHOT.jar", args=[wordcount_in, self.wordcount_out], clazz= "com.dataartisans.flink.dataflow.examples.StreamingPipeline")
def code(): run_jar( "%s/examples/streaming/" % get_flink_dist_path(), "WindowWordCount.jar", args=[self.wordcount_in, self.wordcount_out, 10000], clazz= "org.apache.flink.streaming.examples.windowing.WindowWordCount" )
def code(): run_jar("%s/examples/batch/" % get_flink_dist_path(), "WordCount.jar", args=[ "--input", self.wordcount_in, "--output", self.wordcount_out ], clazz="org.apache.flink.examples.java.wordcount.WordCount")
def code(): run_jar(path = "experiments/wordcount_files/", jar_name = "flink-java-examples-0.8-incubating-SNAPSHOT-WordCount.jar", args = [ "hdfs://%s:50040/generated-wc.txt" % env.master, "hdfs://%s:50040/tmp/wc-out/" % env.master ], upload=True )
def code(): run_jar("%s/flink-jobs/target" % self.repo.get_absolute_path(), "flink-jobs-*.jar", args=[ self.num_rows, self.num_cols, self.mean_entry, self.variance_entry, self.mean_num_row_entries, self.variance_num_row_entries, self.out_path ], clazz="com.github.projectflink.als.ALSDataGeneration")
def code(): run_jar("%s/flink-jobs/target" % self.repo.get_absolute_path(), "flink-jobs-*.jar", args=[ "-s", self.scale_factor, "-p", self.parallelism, "-o", self.out, ], clazz="com.github.projectflink.avro.GenerateLineitems" )
def code(): run_jar( path="experiments/wordcount_files/", jar_name= "flink-java-examples-0.8-incubating-SNAPSHOT-WordCount.jar", args=[ "hdfs://%s:50040/generated-wc.txt" % env.master, "hdfs://%s:50040/tmp/wc-out/" % env.master ], upload=True)
def code(): run_jar("%s/flink-jobs/target" % generators.ALS.repo.get_absolute_path(), "flink-jobs-*.jar", args = [ "master", 15, 1, 10, 100, "rand", "%s/als-temp/" % get_hdfs_address(), als_in, als_out ], clazz = "com.github.projectflink.als.ALSJoinBlocking")
def code(): run_jar( "%s/target/" % self.repo.get_absolute_path(), "flink-dataflow-*-SNAPSHOT.jar", args=[ "--", # Flink 0.8 way of specifying options to user programs "--input=%s" % wordcount_in, "--output=%s" % self.wordcount_out ], clazz= "com.dataartisans.flink.dataflow.examples.DataflowWordCount")
def code(): run_jar("~/flink-perf/flink-jobs/target", "flink-jobs-*.jar", args=[grep_in, grep_out, "these", "are", "test", "words"], clazz="com.github.projectflink.grep.GrepJob")
def code(): run_jar("%s/flink-jobs/target" % self.repo.get_absolute_path(), "flink-jobs-*.jar", args=[self.dop, self.out_path, self.size_gb], clazz="com.github.projectflink.generators.Text")
def code(): run_jar("%s/examples/streaming/" % get_flink_dist_path(), "WordCount.jar", args = ["--input", self.wordcount_in, "--output", self.wordcount_out], clazz = "org.apache.flink.streaming.examples.wordcount.WordCount")
def code(): run_jar("%s/target/" % self.repo.get_absolute_path(), "flink-dataflow-*-SNAPSHOT.jar", args = [wordcount_in, self.wordcount_out], clazz = "com.dataartisans.flink.dataflow.GoogleStreamingPipeline.examples")
def code(): run_jar("%s/target/" % self.repo.get_absolute_path(), "flink-dataflow-*-SNAPSHOT.jar", args = [wordcount_in, self.wordcount_out], clazz = self.implicit_clazz if self.implicit_combine else self.explicit_clazz)
def code(): run_jar("%s/flink-staging/flink-streaming/flink-streaming-examples/target/" % get_flink_path(), "flink-streaming-*-WordCount.jar", args = [self.wordcount_in, self.wordcount_out], clazz = "org.apache.flink.streaming.examples.wordcount.WordCount")
def code(): run_jar("%s/examples/streaming/" % get_flink_dist_path(), "WindowWordCount.jar", args = [self.wordcount_in, self.wordcount_out, 10000], clazz = "org.apache.flink.streaming.examples.windowing.WindowWordCount")
def code(): run_jar("%s/examples/" % get_flink_dist_path(), "flink-java-*WordCount.jar", args = [wordcount_out, wordcount_in], clazz = "org.apache.flink.examples.java.wordcount.WordCount")
def code(): run_jar("%s/target/" % self.repo.get_absolute_path(), "flink-dataflow-*-SNAPSHOT.jar", args=[wordcount_in, self.wordcount_out], clazz=self.implicit_clazz if self.implicit_combine else self.explicit_clazz)
def code(): run_jar("~/flink-perf/flink-jobs/target", "flink-jobs-*.jar", args = [grep_in, grep_out, "these", "are", "test", "words"], clazz = "com.github.projectflink.grep.GrepJob")
def code(): run_jar("%s/flink-jobs/target" % generators.Avro.repo.get_absolute_path(), "flink-jobs-*.jar", args=[self.out_path, self.in_path], clazz="com.github.projectflink.avro.CompareJob")