取两个数据集的差集,结果 RDD 的分区数与 subtract 前面的 RDD 的 分区数一致。
- java
package transformations;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import java.util.Arrays;
/**
* @Author yqq
* @Date 2021/12/09 18:03
* @Version 1.0
*/
public class SubtractTest {
public static void main(String[] args) {
JavaSparkContext context = new JavaSparkContext(
new SparkConf()
.setMaster("local")
.setAppName("subtract")
);
context.setLogLevel("Error");
JavaRDD<String> rdd = context.parallelize(Arrays.asList("a", "b", "c", "e", "f"),2);
JavaRDD<String> rdd1 = context.parallelize(Arrays.asList("a", "b", "g", "h", "f"),3);
JavaRDD<String> subtract = rdd.subtract(rdd1);
System.out.println("rdd partition length = "+rdd.getNumPartitions());
System.out.println("rdd1 partition length = "+rdd1.getNumPartitions());
System.out.println("subtract partition length = "+subtract.getNumPartitions());
subtract.foreach(e-> System.out.print(e+"\t"));
}
}
2. scala
package transformation
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}
/**
* @Author yqq
* @Date 2021/12/09 18:09
* @Version 1.0
*/
object SubtractTest {
def main(args: Array[String]): Unit = {
val context = new SparkContext(
new SparkConf()
.setMaster("local")
.setAppName("subtract")
)
context.setLogLevel("Error")
val rdd: RDD[String] = context.parallelize(Array[String]("a", "b", "c", "e", "f"))
val rdd1: RDD[String] = context.parallelize(Array[String]("a", "b", "g", "h", "f"))
val value: RDD[String] = rdd.subtract(rdd1)
value.foreach(print)
}
}