newExecuteStatementOperation single session

  var udfNotInited = true


  override def newExecuteStatementOperation(
      parentSession: HiveSession,
      statement: String,
      confOverlay: JMap[String, String],
      async: Boolean): ExecuteStatementOperation = synchronized {
    val sqlContext = sessionToContexts.get(parentSession.getSessionHandle)
    require(sqlContext != null, s"Session handle: ${parentSession.getSessionHandle} has not been" +
      s" initialized or had already closed.")

   if(udfNotInited) {
     val configFilePath = sqlContext.sparkContext.conf.getOption("spark.app.confpath")
       .getOrElse("/sparklib/conf/udf.config")
     println(s"openSession  configFilePath:$configFilePath ")
     UdfLoadUtils.udfRegister(configFilePath, sqlContext.sparkSession)
     udfNotInited = false
   }



    val conf = sqlContext.sessionState.conf
    val runInBackground = async && conf.getConf(HiveUtils.HIVE_THRIFT_SERVER_ASYNC)
    val operation = new SparkExecuteStatementOperation(parentSession, statement, confOverlay,
      runInBackground)(sqlContext, sessionToActivePool)
    handleToOperation.put(operation.getHandle, operation)
    logDebug(s"Created Operation for $statement with session=$parentSession, " +
      s"runInBackground=$runInBackground")
    operation
  }



org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager#openSession


override def openSession(
                            protocol: TProtocolVersion,
                            username: String,
                            passwd: String,
                            ipAddress: String,
                            sessionConf: java.util.Map[String, String],
                            withImpersonation: Boolean,
                            delegationToken: String): SessionHandle = {
    val sessionHandle =
      super.openSession(protocol, username, passwd, ipAddress, sessionConf, withImpersonation,
        delegationToken)
    val session = super.getSession(sessionHandle)
    HiveThriftServer2.listener.onSessionCreated(
      session.getIpAddress, sessionHandle.getSessionId.toString, session.getUsername)
  //  println(s" open session : single ${sqlContext.conf.hiveThriftServerSingleSession}  ")
    val ctx = sqlContext
//    val ctx = if (sqlContext.conf.hiveThriftServerSingleSession) {
//      sqlContext
//    } else {
//       sqlContext.newSession()
//   }
//    if (!sqlContext.conf.hiveThriftServerSingleSession) {
//      val configFilePath = ctx.sparkContext.conf.getOption("spark.app.confpath")
//        .getOrElse("/sparklib/conf/udf.config")
//      println(s"openSession  configFilePath: $configFilePath ")
//      UdfLoadUtils.udfRegister(configFilePath, ctx.sparkSession)
//    }
    ctx.setConf("spark.sql.hive.version", HiveUtils.hiveExecutionVersion)
    if (sessionConf != null && sessionConf.containsKey("use:database")) {
      ctx.sql(s"use ${sessionConf.get("use:database")}")
    }
    sparkSqlOperationManager.sessionToContexts.put(sessionHandle, ctx)
    sessionHandle
  }

Guess you like

Origin http://43.154.161.224:23101/article/api/json?id=326226017&siteId=291194637