2017-08-29 3 views
0

Ich verwende folgende pom.xml, um mein Projekt zu erstellen. Wenn ich den resultierenden Behälter mit der Funkenabgabe führe, gibt es den folgenden Fehler.java.lang.NoClassDefFoundError: org/apache/spark/streaming/kafka/KafkaUtils

java.lang.NoClassDefFoundError: org/Apache/Funken/Streaming/kafka/KafkaUtils

Allerdings, wenn ich --packages „org.apache.spark umfassen: Funken Streaming-kafka-0-8-assembly_2 .11: 2.0.2 "Option läuft es wie gewohnt. Ich verstehe nicht, warum ich diese Option bereitstellen muss, wenn meine Abhängigkeit als Teil von pom.xml enthalten ist.

http://maven.apache.org/maven-v4_0_0.xsd "> 4.0.0 test.project ede-Produkte-UIS-Funken-Streaming jar 1.0-SNAPSHOT ede-Produkte -uis-Funken-Streaming

<properties> 
    <spark.streaming.mainclass>test.project.spark.streaming.StreamReader 
    </spark.streaming.mainclass> 
    <scala.version>2.11.8</scala.version> 
    <scala.major.minor.version>2.11</scala.major.minor.version> 
    <spark.version>2.0.2</spark.version> 
    <java.major.minor.version>1.8</java.major.minor.version> 
    <libthrift.version>0.9.0</libthrift.version> 
</properties> 

<repositories> 
    <repository> 
     <id>scala-tools.org</id> 
     <name>Scala-Tools Maven2 Repository</name> 
     <url>http://scala-tools.org/repo-releases</url> 
    </repository> 
    <repository> 
     <id>confluent</id> 
     <url>http://packages.confluent.io/maven/</url> 
    </repository> 
</repositories> 

<dependencies> 
    <dependency> 
     <groupId>org.apache.spark</groupId> 
     <artifactId>spark-core_${scala.major.minor.version}</artifactId> 
     <version>${spark.version}</version> 
     <scope>provided</scope> 
    </dependency> 
    <dependency> 
     <groupId>org.apache.spark</groupId> 
     <artifactId>spark-hive_${scala.major.minor.version}</artifactId> 
     <version>${spark.version}</version> 
    </dependency> 
    <dependency> 
     <groupId>org.apache.spark</groupId> 
     <artifactId>spark-streaming_${scala.major.minor.version}</artifactId> 
     <version>${spark.version}</version> 
     <scope>provided</scope> 
    </dependency> 
    <dependency> 
     <groupId>org.apache.spark</groupId> 
     <artifactId>spark-streaming-kafka-0-8_${scala.major.minor.version}</artifactId> 
     <version>${spark.version}</version> 
     <scope>provided</scope> 
    </dependency> 
    <dependency> 
     <groupId>org.apache.spark</groupId> 
     <artifactId>spark-streaming_${scala.major.minor.version}</artifactId> 
     <version>${spark.version}</version> 
     <type>test-jar</type> 
     <scope>test</scope> 
    </dependency> 
    <!-- https://mvnrepository.com/artifact/com.typesafe.play/play-json_2.10 --> 
    <dependency> 
     <groupId>com.typesafe.play</groupId> 
     <artifactId>play-json_2.10</artifactId> 
     <version>2.4.0-M1</version> 
    </dependency> 
    <dependency> 
     <groupId>org.parboiled</groupId> 
     <artifactId>parboiled-java</artifactId> 
     <version>1.0.2</version> 
     <scope>test</scope> 
    </dependency> 
    <dependency> 
     <groupId>org.apache.thrift</groupId> 
     <artifactId>libthrift</artifactId> 
     <version>${libthrift.version}</version> 
     <exclusions> 
      <exclusion> 
       <groupId>org.apache.httpcomponents</groupId> 
       <artifactId>httpclient</artifactId> 
      </exclusion> 
      <exclusion> 
       <groupId>org.apache.httpcomponents</groupId> 
       <artifactId>httpcore</artifactId> 
      </exclusion> 
     </exclusions> 
    </dependency> 
</dependencies> 
<build> 
    <sourceDirectory>src/main/scala</sourceDirectory> 
    <testSourceDirectory>src/test/scala</testSourceDirectory> 
    <plugins> 
     <plugin> 
      <groupId>org.scala-tools</groupId> 
      <artifactId>maven-scala-plugin</artifactId> 
      <executions> 
       <execution> 
        <goals> 
         <goal>compile</goal> 
         <goal>testCompile</goal> 
        </goals> 
       </execution> 
      </executions> 
      <configuration> 
       <scalaVersion>${scala.version}</scalaVersion> 
       <args> 
        <arg>-target:jvm-${java.major.minor.version}</arg> 
       </args> 
      </configuration> 
     </plugin> 
     <plugin> 
      <artifactId>maven-shade-plugin</artifactId> 
      <version>2.4</version> 
      <executions> 
       <execution> 
        <phase>package</phase> 
        <goals> 
         <goal>shade</goal> 
        </goals> 
        <configuration> 
         <finalName>ede-products-uis-spark-streaming</finalName> 
         <shadedArtifactAttached>false</shadedArtifactAttached> 
         <artifactSet> 
          <includes> 
           <include>*:*</include> 
          </includes> 
         </artifactSet> 
         <filters> 
          <filter> 
           <artifact>*:*</artifact> 
           <excludes> 
            <exclude>META-INF/*.SF</exclude> 
            <exclude>META-INF/*.DSA</exclude> 
            <exclude>META-INF/*.RSA</exclude> 
           </excludes> 
          </filter> 
         </filters> 
         <transformers> 
          <transformer 
            implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"/> 
          <transformer 
            implementation="org.apache.maven.plugins.shade.resource.AppendingTransformer"> 
           <resource>reference.conf</resource> 
          </transformer> 
          <transformer 
            implementation="org.apache.maven.plugins.shade.resource.DontIncludeResourceTransformer"> 
           <resource>log4j.properties</resource> 
          </transformer> 
          <transformer 
            implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer"> 
           <mainClass>${spark.streaming.mainclass}</mainClass> 
          </transformer> 
         </transformers> 
         <relocations> 
          <relocation> 
           <pattern>org.eclipse.jetty</pattern> 
           <shadedPattern>org.spark-project.jetty</shadedPattern> 
           <includes> 
            <include>org.eclipse.jetty.**</include> 
           </includes> 
          </relocation> 
          <relocation> 
           <pattern>com.google.common</pattern> 
           <shadedPattern>org.spark-project.guava</shadedPattern> 
           <excludes> 
            <exclude>com/google/common/base/Absent*</exclude> 
            <exclude>com/google/common/base/Function</exclude> 
            <exclude>com/google/common/base/Optional*</exclude> 
            <exclude>com/google/common/base/Present*</exclude> 
            <exclude>com/google/common/base/Supplier</exclude> 
           </excludes> 
          </relocation> 
         </relocations> 
        </configuration> 
       </execution> 
      </executions> 
      <configuration> 
       <shadedArtifactAttached>false</shadedArtifactAttached> 
       <artifactSet> 
        <includes> 
         <include>*:*</include> 
        </includes> 
       </artifactSet> 
       <filters> 
        <filter> 
         <artifact>*:*</artifact> 
         <excludes> 
          <exclude>META-INF/*.SF</exclude> 
          <exclude>META-INF/*.DSA</exclude> 
          <exclude>META-INF/*.RSA</exclude> 
         </excludes> 
        </filter> 
       </filters> 
       <transformers> 
        <transformer 
          implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"/> 
        <transformer implementation="org.apache.maven.plugins.shade.resource.AppendingTransformer"> 
         <resource>reference.conf</resource> 
        </transformer> 
        <transformer 
          implementation="org.apache.maven.plugins.shade.resource.DontIncludeResourceTransformer"> 
         <resource>log4j.properties</resource> 
        </transformer> 
        <transformer 
          implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer"> 
         <mainClass>${spark.streaming.mainclass}</mainClass> 
        </transformer> 
       </transformers> 
       <relocations> 
        <relocation> 
         <pattern>org.eclipse.jetty</pattern> 
         <shadedPattern>org.spark-project.jetty</shadedPattern> 
         <includes> 
          <include>org.eclipse.jetty.**</include> 
         </includes> 
        </relocation> 
        <relocation> 
         <pattern>com.google.common</pattern> 
         <shadedPattern>org.spark-project.guava</shadedPattern> 
         <excludes> 
          <exclude>com/google/common/base/Absent*</exclude> 
          <exclude>com/google/common/base/Function</exclude> 
          <exclude>com/google/common/base/Optional*</exclude> 
          <exclude>com/google/common/base/Present*</exclude> 
          <exclude>com/google/common/base/Supplier</exclude> 
         </excludes> 
        </relocation> 
       </relocations> 
      </configuration> 
     </plugin> 

    </plugins> 
</build> 

Antwort

2

Sie müssen die "zur Verfügung gestellt" Tag von spark-Streaming Kafka entfernen, da sie nicht auf dem Cluster ist:

<dependency> 
    <groupId>org.apache.spark</groupId> 
    <artifactId>spark-streaming-kafka-0-8_${scala.major.minor.version}</artifactId> 
    <version>${spark.version}</version> 
</dependency> 

Wenn Sie Ihre Uber-JAR erstellen, wird auch Kafka darin enthalten sein, um es zur Laufzeit dem Klassenlader zur Verfügung zu stellen.

+1

Danke. Das funktioniert. Ich habe es total vermisst. – Swapnil

Verwandte Themen