老师您好,spark streaming测试代码在spark shell里面可以正常运行,但是写成.scala文件,用scalac编译显示error: object apache is not a member of package org。我直接在linux服务器上写的程序,没有安装idea。
import org.apache.spark.streaming.{Seconds, StreamingContext}
object NetworkWordCount {
def main(args: Array[String]): Unit = {
val sc = new SparkConf().setMaster(“local”).setAppName(“NetworkWordCount”)
val ssc = new StreamingContext(sc, Seconds(3))
val lines = ssc.socketTextStream(“hadoopmaster”, 9999)
val words = lines.flatMap(.split(" "))
val wordCounts = words.map(x => (x, 1)).reduceByKey( + _)
wordCounts.print()
ssc.start()
ssc.awaitTermination()
}
}