NoSuchMethodError:org.apache.spark.rdd.RDD.mapPartitionsInternal$default$2()Z


変態のspark sql dataframe
ソリューション:バージョン!!!
    val testDF = test_util.readFile_2(sqlContext)
//    testDF.registerTempTable("testTable")
//    val queryDF = sqlContext.sql("select * from testTable")
    testDF.show()
    testDF.printSchema()

    testDF.columns.foreach(println(_))

    //1
    val df2 = testDF.selectExpr("cast(cnt as int) cnt", "weathersit")

    //2   !!!!!!!!!!!!!
//    val df2 = testDF.withColumn("cntTemp", testDF.col("cnt").cast(IntegerType)).select("cntTemp", "weathersit")

    //3
//    val testDF2 = testDF.select("weathersit").select(testDF.col("cnt").cast(IntegerType))

    df2.printSchema()
//    df2.show(10)

    val testGroupBy = df2.groupBy("weathersit").max("cnt").show()

    //4
    val df3 = testDF.select(testDF.col("cnt").cast(IntegerType))//.select("weathersit").select("instant")
    df3.printSchema()