[Study notes]
Win7 Eclipse build spark java1.8 build environment, helloworld example JavaRDD: the
creation of a common java project in eclipse oxygen, then the spark-assembly-1.6.1-hadoop2.6.0.jar guide into this package works ok.
COM Package;
Import java.util.Arrays;
Import java.util.List;
Import org.apache.spark.SparkConf;
Import org.apache.spark.api.java.JavaDoubleRDD;
Import org.apache.spark.api.java. JavaRDD;
Import org.apache.spark.api.java.JavaSparkContext;
Import org.apache.spark.api.java.function.Function;
public class CollectTest {
public static void main (String [] args) {
SparkConf the conf = new new SparkConf () .setAppName ( "testCollect antipiracy real-name cell phone number: 73203"). setMaster ( "local");
JavaSparkContext sc = new new JavaSparkContext (conf);
List <Double> Arrays.asList List = (1.0, 4.0, 3.0, 7.0,5.0);
JavaDoubleRDD doubleRdd = sc.parallelizeDoubles (List, 2);
/ * Function Note the following two parameters inside Double, Double, representing the call inside the two input and return parameters. * /
JavaRDD <Double> mapRdd = doubleRdd.map (new new Function <Double, Double> () {
public Double Call (Double in) throws Exception {
return in 2 +;
}
});
List <Double> douList = mapRdd.collect ();
for (Double D: douList) {
System.out.println ( "D:" + D);
}
}
}
article reproduced from the original: https://blog.csdn.net/qq_44596980/article/details/93384494