spark-sql: "java.lang.NoSuchFieldError: out" exception resolution

Anomalies

        at java.lang.reflect.Method.invoke(Method.java:498)
        at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
        at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:847)
        at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:161)
        at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:184)
        at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:86)
        at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:922)
        at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:931)
        at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
Caused by: java.lang.NoSuchFieldError: out
        at org.apache.spark.sql.hive.client.HiveClientImpl.newState(HiveClientImpl.scala:221)
        at org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:127)
        at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
        at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
        at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
        at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
        at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:314)
        at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:433)
        at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:326)
        at org.apache.spark.sql.hive.HiveExternalCatalog.client$lzycompute(HiveExternalCatalog.scala:66)
        at org.apache.spark.sql.hive.HiveExternalCatalog.client(HiveExternalCatalog.scala:65)
        at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply$mcZ$sp(HiveExternalCatalog.scala:219)
        at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:219)
        at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:219)
        at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:97)
        ... 59 more
【ERROR】spark.sql hql error
Exception in thread "main" org.apache.spark.sql.AnalysisException: java.lang.NoSuchFieldError: out;
        at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:106)
        at org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:218)
        at org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:138)
        at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:126)
        at org.apache.spark.sql.hive.HiveSessionStateBuilder.org$apache$spark$sql$hive$HiveSessionStateBuilder$$externalCatalog(HiveSessionStateBuilder.scala:39)
        at org.apache.spark.sql.hive.HiveSessionStateBuilder$$anonfun$1.apply(HiveSessionStateBuilder.scala:54)
        at org.apache.spark.sql.hive.HiveSessionStateBuilder$$anonfun$1.apply(HiveSessionStateBuilder.scala:54)
        at org.apache.spark.sql.catalyst.catalog.SessionCatalog.externalCatalog$lzycompute(SessionCatalog.scala:90)
        at org.apache.spark.sql.catalyst.catalog.SessionCatalog.externalCatalog(SessionCatalog.scala:90)
        at org.apache.spark.sql.catalyst.catalog.SessionCatalog.databaseExists(SessionCatalog.scala:243)
        at org.apache.spark.sql.catalyst.catalog.SessionCatalog.org$apache$spark$sql$catalyst$catalog$SessionCatalog$$requireDbExists(SessionCatalog.scala:177)
        at org.apache.spark.sql.catalyst.catalog.SessionCatalog.getTableMetadata(SessionCatalog.scala:432)
        at org.apache.spark.sql.catalyst.catalog.CatalogUtils$.getMetaData(ExternalCatalogUtils.scala:265

spark application

package org.example.spark;

import java.util.Base64;
import org.apache.spark.sql.SparkSession;

public class JavaSparkHiveExample {
    
    


    public static void main(String[] args) {
    
    

        long start = System.currentTimeMillis();

        byte[] decodedBytes = Base64.getDecoder().decode(args[0]);
        String sql = new String(decodedBytes);
        System.out.println("sql:" + sql);

        SparkSession spark = SparkSession
                .builder()
                .config("spark.sql.hive.loadStagingDirectory.enabled", args[1])
                .appName("Java Spark Hive Example")
                .enableHiveSupport()
                .getOrCreate();

        spark.sql(sql);

        long end = System.currentTimeMillis();

        System.out.println("cost time:" + (end - start));
    }
}

Abnormal

The version is incompatible. The spark version in the cdp cluster is 2.4.7. The Java project uses 2.4.0


Solution

Modify the Java project dependencies as follows:

    <dependency>
      <groupId>org.apache.spark</groupId>
      <artifactId>spark-hive_2.11</artifactId>
      <version>2.4.7.7.1.7.2000-305</version>
    </dependency>
    <dependency>
    <groupId>org.apache.spark</groupId>
    <artifactId>spark-sql_2.11</artifactId>
    <version>2.4.7.7.1.7.2000-305</version>
    </dependency>
    <dependency>
    <groupId>org.apache.spark</groupId>
    <artifactId>spark-core_2.11</artifactId>
    <version>2.4.7.7.1.7.2000-305</version>
    </dependency>

The above method can solve the problem. There is also a more adaptable solution .

Guess you like

Origin blog.csdn.net/yy_diego/article/details/132023661