spark 分析作者发布文章的总阅读量

import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.sql.catalyst.expressions.Second
import javafx.scene.shape.Line
import org.apache.spark.SparkConf
import org.dmg.pmml.True
import org.apache.spark.SparkConf
import scala.util.matching.Regex
import java.io.FileNotFoundException
import breeze.io.TextReader.FileReader
import org.apache.spark.SparkConf
import org.apache.commons.math3.geometry.euclidean.twod.Line
import scala.tools.nsc.doc.model.Val

object spark_test {
  def main(args: Array[String]) {

    var conf = new SparkConf().setAppName("name").setMaster("local");
    val sc = new SparkContext(conf);
    var line = sc.textFile("/Users/lihu/Desktop/crawle/tap.txt", 1);
    val reduceData = line.map(line => (line.split(" ")(1), Integer.parseInt(
                                String.valueOf(line.split(" ")(2))))).reduceByKey(_+_).collect.foreach(println _);

    sc.stop()
  }
}
原文地址:https://www.cnblogs.com/sunyaxue/p/6367792.html