spark实现wordcount

spark-shell --master yarn
sc:
val lineRDD=sc.textFile("/test/input/test")
lineRDD.collect().foreach(println)
val wordRDD = lineRDD
wordRDD.collect().foreach(println)
val wordcountRDD = wordRDD.map(x=>(x,1))
wordcountRDD.collect().foreach(println)
val resultRDD = wordcountRDD.reduceByKey((x,y)=>x+y)
resultRDD.collect().foreach(println)
val orderedRDD = resultRDD.sortByKey()
orderedRDD.collect().foreach(println)

原文地址:https://www.cnblogs.com/timlong/p/9939671.html