有 Java 编程相关的问题?

你可以在下面搜索框中键入要查询的问题!

maven java。lang.NoClassDefFoundError org/apache/spark/sql/hive/HiveContext

我试图使用配置单元上下文,但出现以下错误:

exception: java.lang.NoClassDefFoundError: org/apache/spark/sql/hive/HiveContext

有什么建议吗

package com.mapr.examples;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaSparkContext;

import org.apache.spark.sql.Row;
import org.apache.spark.sql.hive.HiveContext;

import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.function.*;
import org.apache.spark.sql.DataFrame;
import org.apache.spark.sql.SQLContext;
import static org.apache.spark.sql.functions.*;

import java.io.File;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;

public class SparkSQLHiveContextExample {
    public static void main(String[] args) throws Exception {
        SparkConf sparkConf = new SparkConf().setAppName("SparkSQLHiveContextExample");
        JavaSparkContext ctx = new JavaSparkContext(sparkConf);

        // SQLContext sqlContext = new HiveContext(ctx); // exception: java.lang.NoClassDefFoundError: org/apache/spark/sql/hive/HiveContext
        // HiveContext hiveContext = new HiveContext(ctx.sc()); // exception: java.lang.NoClassDefFoundError: org/apache/spark/sql/hive/HiveContext
        // HiveContext hiveContext = new org.apache.spark.sql.hive.HiveContext(ctx.sc()); // exception: java.lang.NoClassDefFoundError: org/apache/spark/sql/hive/HiveContext


        ctx.stop();
        ctx.close();

    }
}

波姆。xml

<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>

    <groupId>com.mapr.sample</groupId>
    <artifactId>SparkSQLExample</artifactId>
    <version>1.0-SNAPSHOT</version>

<dependencies>
    <dependency>
        <groupId>org.apache.spark</groupId>
        <artifactId>spark-core_2.10</artifactId>
        <version>1.6.0</version>
    </dependency>
    <dependency>
        <groupId>org.apache.spark</groupId>
        <artifactId>spark-sql_2.10</artifactId>
        <version>1.6.0</version>
    </dependency>
    <dependency>
        <groupId>org.apache.spark</groupId>
        <artifactId>spark-hive_2.10</artifactId>
        <version>1.6.0</version>
    </dependency>
    <dependency>
        <groupId>org.apache.spark</groupId>
        <artifactId>spark-catalyst_2.10</artifactId>
        <version>1.6.0</version>
    </dependency>
    <dependency>
        <groupId>org.apache.hive</groupId>
        <artifactId>hive-jdbc</artifactId>
        <version>1.2.2</version>
    </dependency>
    <dependency>
        <groupId>org.apache.maven.plugins</groupId>
        <artifactId>maven-compiler-plugin</artifactId>
        <version>2.3.2</version>
    </dependency>
</dependencies>


    <build>
        <plugins>
            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-compiler-plugin</artifactId>
                <version>2.3.2</version>
                <configuration>
                    <source>1.7</source>
                    <target>1.7</target>
                </configuration>
            </plugin>
        </plugins>
    </build>

</project>

共 (1) 个答案

  1. # 1 楼答案

    添加此依赖项后,您能再试一次吗

    应该行得通

    maven link