SPARK算子实例JAVA实现(reduce)

package day06;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function2;
import java.util.Arrays;
public class MyAction_Java {
    public static void myReduce(JavaSparkContext jsc){
        JavaRDD<String> RDD1 = jsc.parallelize(Arrays.asList("aa", "aa", "bb", "cc", "dd"));
        JavaRDD<Integer> RDD2 = jsc.parallelize(Arrays.asList(1,2,3));
      /*  Integer reduce = RDD2.reduce(new Function2<Integer, Integer, Integer>() {
            public Integer call(Integer integer, Integer integer2) throws Exception {
                return integer + integer2;
            }
        });*/
        String  reduce= RDD1.reduce(new Function2<String, String, String>() {
            public String call(String s, String s2) throws Exception {
                return s + s2;
            }
        });
        System.out.print(reduce);
    }
    public static void main(String[] args){
        SparkConf conf = new SparkConf().setMaster("local").setAppName("MyAction_Java ");
        JavaSparkContext jsc = new JavaSparkContext(conf);
        myReduce(jsc);
        jsc.stop();
    }
}

猜你喜欢

转载自blog.csdn.net/wjn19921104/article/details/80230348