Spark sqlのDataFrameとRDDの相互変換
一、RDD回転データFrame case classによるDataFrames の作成 structTypeによるDataFrames の作成 jsonによるDataFream の作成
2、DataFrame回転rdd
df.rdd
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.sql.SQLContext
object TestDataFrame {
def main(args: Array[String]): Unit = {
/**
* 1、 spark config
*/
val conf = new SparkConf().setAppName("TestDataFrame").setMaster("local");
/**
* 2、 spark context
*/
val sc = new SparkContext(conf);
/**
* 3、 spark sql context
*/
val ssc = new SQLContext(sc);
/**
* 4、 spark sql df
*/
val PeopleRDD = sc.textFile("F:\\input.txt").map(line => People(line.split(" ")(0),line.split(" ")(1).trim.toInt))
import ssc.implicits._
var df = PeopleRDD.toDF
// DataFrame , , ,
df.registerTempTable("peopel")
var df2 =ssc.sql("select * from peopel where age > 23").show()
/**
* 5、spark context
*/
sc.stop();
}
}
case class People(var name:String ,var age : Int)
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.DataFrame
import org.apache.spark.sql.Row
import org.apache.spark.sql.types.{StructType,StructField,StringType,IntegerType}
object TestDataFrame2{
def test2(): Unit = {
/**
* 1、 spark config
*/
val conf = new SparkConf().setAppName("TestDataFrame").setMaster("local");
/**
* 2、 spark context
*/
val sc = new SparkContext(conf);
/**
* 3、 spark sql context
*/
val ssc = new SQLContext(sc);
/**
* 4、 spark sql df
*/
val peopleRDD = sc.textFile("F:\\input.txt")map(line =>
Row(line.split(" ")(0),line.split(" ")(1).trim().toInt))
// StructType
val structType : StructType = StructType(
StructField("name",StringType,true)::
StructField("age",IntegerType,true) ::Nil
);
val df : DataFrame = ssc.createDataFrame(peopleRDD, structType);
df.registerTempTable("peopel");
ssc.sql("select * from peopel").show();
/**
* 5、spark context
*/
sc.stop();
}
}
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.DataFrame
import org.apache.spark.sql.Row
import org.apache.spark.sql.types.{StructType,StructField,StringType,IntegerType}
import org.apache.spark.sql.DataFrame
object TestDataFrame2{
def test3() : Unit={
/**
* 1、 spark config
*/
val conf = new SparkConf().setAppName("TestDataFrame").setMaster("local");
/**
* 2、 spark context
*/
val sc = new SparkContext(conf);
/**
* 3、 spark sql context
*/
val ssc = new SQLContext(sc);
/**
* 4、 spark sql df
*/
val df :DataFrame = ssc.read.json("F:\\json.json")
df.registerTempTable("people")
ssc.sql("select * from people").show();
/**
* 5、spark context
*/
sc.stop();
}
}
2、DataFrame回転rdd
df.rdd