scala notes - functions

Knowledge points: functions, anonymous functions, higher-order functions, partial functions

 

Test code:

package demo.scala

object FuncTest {

  def main(args: Array[String]): Unit = {
    //    //1.try-catch
    //    try {
    //          1 / 0
    //    } catch {
    // case e: ArithmeticException => println("Divisor cannot be 0")
    //      case e: Exception => println(e.getMessage)
    //    } finally {
    // //EVERYTHING
    //      println("finally ...")
    //    }

    // //The application of 2.case class in pattern matching
    //    testMatch(Student("li"))

    // //Assign the method to the variable, two ways of writing
    //    val sayHelloObj = sayHello _
    //    val sayHelloObjet1 = sayHello(_)
    //    sayHelloObj("hello")

    // //Anonymous functions can be assigned to variables
    //    var obj = (name: String) => {
    //      println(name)
    //    }
    //    obj("hello")


    //3. The higher-order function map, does the same for each element in the collection
    var list = List (1, 2, 3)
    //map is an anonymous function of the form y = f(x)
    println(list.map((x: Int) => x * 2))

    //Parameter type inference, omitting the type
    println(list.map((x) => x * 2))

    //If the anonymous function has only one parameter, you can omit the parentheses
    println(list.map(x => x * 2))

    //Placeholder_ represents each element in the list (recommended writing)
    println(list.map(_ * 2))

    //4.filter filtering
    println(list.filter(_ > 2))

    //5.take value
    println(list.take(2))

    //6.reduce processes element x and element y, using method x+y, and so on (continuous + operation from left to right, that is, to find the sum of all elements in the list)
    list.reduce((x, y) => x + y)
    list.reduce(_ + _)

    //print intermediate process
    list.reduce((x, y) => {
      println(x + "," + y)
      x + y
    })

    //7.max,min,sum,count
    println(list.min)
    println(list.max)
    println(list.sum)
    println(list.count(_ >= list.min))

    //8.flatten tile
    var list2 = List(List(1, 2), List(3, 4))
    println(list2.flatten)

    //9.flatMap flattening
    println(list2.flatMap(_.map(_ * 2))) //result: List(2, 4, 6, 8)
    println(list2.map(_.map(_ * 2))) //结果:List(List(2, 4), List(6, 8))

    // partial function
    println(getScore("zhangsan"))
  }

  // normal writing
  def add1(x: Int, y: Int) = {
    x + y
  }

  // assign an anonymous function to a new function
  def add2 = (x: Int, y: Int) => {
    x + y
  }

  def sayHello(mess: String): Unit = {
    println(mess)
  }


  class Person

  //case class does not need new, it can be used directly, which is used a lot in spark sql
  case class Teacher(name: String) extends Person

  case class Student(name: String) extends Person

  def testMatch(person: Person): Unit = {
    //case class is used in pattern matching
    person match {
      case Teacher("wang") => println("this is teacher")
      case Student("lisi") => println("student lisi")
      case Student(name) => println("this is student")
      case _ => println("other")
    }
  }

  //Partial function (a group of case statements without match enclosed in curly braces is a partial function)
  def getScore: PartialFunction[String, Int] = {
    case "math" => 95
    case "english" => 98
    case _ => 60
  }
}

 

Guess you like

Origin http://43.154.161.224:23101/article/api/json?id=326175968&siteId=291194637