hello,102
java,122
fucking,444
world,144
hello,4444
mdq,898
xyh,443
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}
object Test02 {
def main(args: Array[String]): Unit = {
val conf=new SparkConf().setAppName(“topn”).setMaster(“local[2]”)
val sc=new SparkContext(conf)
val rdd: Unit =sc.textFile(“E:\input\wordcount.txt”).map((.split(",")))
.map(line=>(line(0),line(1).toInt)).reduceByKey(+_)
.map(t=>(t._2,t._1))
.sortByKey(false).map(t=>(t._2,t._1))
.saveAsTextFile(“output1/result.txt”)
}
}