看下列代码。用eclipse 开发spark wordCount时找不到reduceByKey(_+_)操作是由于缺少 导入包
import org.apache.spark.SparkContext._ 就可以解决了或者直接用 import org.apache.spark._ 也行
package com.scala.spark.wordcountimport org.apache.spark.SparkConfimport org.apache.spark.SparkContextimport org.apache.spark.SparkContext._ object WordCount2 extends App { val conf = new SparkConf().setAppName("WordCount").setMaster(""); val sc = new SparkContext(conf); val lines = sc.textFile("/home/streamsadmin/data/WT06557-dex5sr"); // println(lines.count()); val b = lines.flatMap(_.split(",")); val c = b.map(x => (x, 1)); val d = c.reduceByKey(_ + _); println(d.count())}
}