ビッグデータシリーズ第5課:scala基礎

3589 ワード

第1段階:Spark streaming、spark sql、kafka、sparkカーネル原理(大規模なプロジェクト経験が必要);
第2段階:sparkが運行する各種環境、各種故障の解決、性能の最適化(sparkカーネル、運行原理に精通する);
第3段階:ストリーム処理、機械学習がトップであり、まず前の2段階の内容を把握する必要がある.
王家林先生のゼロ基礎の説明に従って、実戦を重視して、sparkの高数になって、笑ってビッグデータの林になります!
第一部学習ノート
/**
 * Scala         
 * 1、Scala                          
 * 2、Scala      
 */
object Essay {
  def main(args: Array[String]): Unit = {
    //   
    //implicit conversion function         fileTorichFile

    implicit def rddToSequenceFileRDDFunctions[K <% Writable: ClassTag, V <% Writable: ClassTag](
      rdd: RDD[(K, V)]) = new SequenceFileRDDFunctions(rdd)
    implicit def rddToOrderedRDDFunctions[K : Ordering : ClassTag, V: ClassTag](
      rdd: RDD[(K, V)]) =
    new OrderedRDDFunctions[K, V, (K, V)](rdd)
    
    class Person(val name: String) //val                 
    //class Engineer(val name: String,val salary : Double)
    //new Person("Spark").code
    class Engineer(val name: String,val salary : Double){
      def code = println("Coding ....")
    }
    //def toCode(p:Person){
     // p.code
    //}
    //          
    implicit def person2Enginner(p: Person):Engineer ={
      new Engineer(p.name,1000)
    }
    def toCode(p:Person){
      p.code
    }
    toCode(new Person("Scala"))
    //    
    //     
    //implicit val
    //implicit val
    //                 
    class Level(val level : Int)
    def toWorker(name : String)(implicit l : Level)
    = println(name + " : " + l.level)
    
    implicit val level = new Level(8)
    toWorker("Spark")
    
  }
      implicit val default:String = "Flink"
//}
 
object Param{
   def print(content:String)(implicit language:String){
      println(language+":"+content)
   }
}
object Implicit_Parameters {

  def main(args: Array[String]) {
    Param.print("Spark")("Scala")
     
    import Context_Implicits._
    Param.print("Hadoop")
  }
}
class RicherFile(val file:File){
   def read = Source.fromFile(file.getPath()).mkString
}
 
class File_Implicits( path: String) extends File(path)
object File_Implicits{
    implicit def file2RicherFile(file:File)= new RicherFile(file) //File -> RicherFile
}

object Implicits_Internals {
	def main(args: Array[String]) {
	  val file = new File_Implicits("content.txt")
		println(file.read) 
		
	}
}
   ///Actor   Thread  Java            
      //akka    Actor
      //spark1.6.0      
/*    class HiActor extends Actor{
      def act(){ //  run
        while(true){
          while(true){
            case name:String => println(name)
          }
        }
      }
    }*/
    class HiActor extends Actor{
      def act(){
        while(true){
          receive {        //Actor      
            case name: String => println(name)
      }
      }
      }
      }
    val actor = new HiActor
    actor.start()
    actor ! "Spark"
  }
  
  //Master  Worker     Actor
  
  case class Basic(name : String,age : Int)
  case class Worker(name: String ,age : Int)
  class basicActor extends Actor{
    def act(){
      while(true){
        receive{
          case Basic(name, age) => println("Basic information " +name + " "+age)
          case Worker(name,age) => println("Worker information " +name + " "+age)
        }
      }
    }
  }
  val b = new basicActor
  b.start()
  
  
  b ! Worker("Spark", 7)
  
  
//!?:     Actor         
  
  //feture
  
  //DAGScheduler Master Worker