import org.apache.spark.SparkConf import org.apache.spark.rdd.RDD import org.apache.spark.streaming.{Seconds, StreamingContext} import scala.collection.mutable object DStream_RDDqueue { def main(args: Array[String]): Unit = { val conf=new SparkConf().setAppName("RDD队列流").setMaster("local[2]") val ss=new StreamingContext(conf,Seconds(1)) //每一秒监听一次 val RDDQueue=new mutable.SynchronizedQueue[RDD[Int]] val queueStream=ss.queueStream(RDDQueue) val result=queueStream.map(x=>(x%5,1)).reduceByKey(_+_) result.print(1000) ss.start() while(true){ RDDQueue +=ss.sparkContext.makeRDD(1 to 100,2) Thread.sleep(2000) //每2秒发一次数据 } ss.stop() } }