Exception in thread "main"java.lang.NoSuchMethodError: scala.Predef$.$scope()Lscala/xml/TopScope$;


Exception in thread "main" java.lang.NoSuchMethodError: scala.Predef$.$scope()Lscala/xml/TopScope$;
	at org.apache.spark.ui.jobs.AllJobsPage.(AllJobsPage.scala:39)
	at org.apache.spark.ui.jobs.JobsTab.(JobsTab.scala:38)
	at org.apache.spark.ui.SparkUI.initialize(SparkUI.scala:67)
	at org.apache.spark.ui.SparkUI.(SparkUI.scala:84)
	at org.apache.spark.ui.SparkUI$.create(SparkUI.scala:221)
	at org.apache.spark.ui.SparkUI$.createLiveUI(SparkUI.scala:163)
	at org.apache.spark.SparkContext.(SparkContext.scala:452)
	at Spark_day01.SparkWC$.main(SparkWC.scala:18)
	at Spark_day01.SparkWC.main(SparkWC.scala)

解決策:spark-coreのバージョン依存に問題が発生し、元のpom.xmlは次のとおりです.
 <dependency>
    <groupId>org.apache.sparkgroupId>
    <artifactId>spark-core_2.10artifactId>
    <version>2.1.0version>
 dependency>

次のように変更します.
<dependency>
    <groupId>org.apache.sparkgroupId>
    <artifactId>spark-core_2.11artifactId>
    <version>2.1.1version>
dependency>


問題が解決する...ソースコードを添付:
package Spark_day01

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.rdd.RDD

object SparkWC {

  def main(args: Array[String]): Unit = {

    val conf:SparkConf = new SparkConf()
    //       master
    // local[3]     3           
    // local:               
    // local[*]:                 
      .setMaster("local")
      .setAppName("spark-word-count")
    //     SparkContext  
    val sc:SparkContext = new SparkContext(conf)
    //     ,        ,       ,  RDD
    val lines:RDD[String] = sc.textFile(args(0))
    //     
    val words:RDD[String] = lines.flatMap(_.split(" "))

    // word=>(word,1)
    val tuple: RDD[(String,Int)] = words.map((_,1))
    //    key    reduce
    val reduced:RDD[(String,Int)] = tuple.reduceByKey(_+_)
    //       
    val sorted:RDD[(String,Int)] = reduced.sortBy(_._2,false)
    //     
    sorted.saveAsTextFile(args(1))

    sc.stop()

  }

}