import org.apache.log4j.{Level, Logger}
import org.apache.spark.{SparkConf, SparkContext}
/**
* Created by liupeng on 2017/6/17.
*/
object A_countByKey {
System.setProperty("hadoop.home.dir","F:\\hadoop-2.6.5")
Logger.getLogger("org").setLevel(Level.ERROR)
def main(args: Array[String]): Unit = {
val conf = new SparkConf().setAppName("countByKey_test").setMaster("local")
val sc = new SparkContext(conf)
//準備一下資料
val nameList = List(("A",1),("A",2),("B",1))
val data = sc.parallelize(nameList)
//僅适用于類型(K,V)的RDD,傳回與每個鍵的計數的(K,Int)對的hashmap
val num = data.countByKey()
for (x <- num) {
println(x)
}
}
}
運作結果: (B,1)
(A,2)