针对 K,V 格式的 RDD,该函数对 K,V 格式 RDD 中的 value 做操作,返回是 K,V 格式的 RDD.
- java
package transformations;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaSparkContext;
import scala.Tuple2;
import java.util.Arrays;
/**
* @Author yqq
* @Date 2021/12/10 00:25
* @Version 1.0
*/
public class MapValueTest {
public static void main(String[] args) {
JavaSparkContext context = new JavaSparkContext(
new SparkConf()
.setMaster("local")
.setAppName("mapValue")
);
context.setLogLevel("Error");
context.parallelizePairs(Arrays.asList(
new Tuple2<>("科比",10),
new Tuple2<>("詹姆斯",11),
new Tuple2<>("乔丹",12),
new Tuple2<>("保罗",13),
new Tuple2<>("威斯布鲁克",14)
)).mapValues(e->e+100).foreach(e-> System.out.println(e));
}
}
2. scala
package transformation
import org.apache.spark.{SparkConf, SparkContext}
/**
* @Author yqq
* @Date 2021/12/10 00:35
* @Version 1.0
*/
object MapValueTest {
def main(args: Array[String]): Unit = {
val context = new SparkContext(
new SparkConf()
.setAppName("MapValue")
.setMaster("local")
)
context.setLogLevel("Error")
context.parallelize(Array[(String,Int)](
("科比",10),
("詹姆斯",11),
("乔丹",12),
("保罗",13),
("威斯布鲁克",14)
)).mapValues(e=>e+"NBA").foreach(println)
}
}