Revision e87ffcaa3e5b75f8d313dc995e4801063b60cd5c authored by Wenchen Fan on 08 August 2017, 08:32:49 UTC, committed by Wenchen Fan on 08 August 2017, 08:32:49 UTC
This reverts commit 86609a95af4b700e83638b7416c7e3706c2d64c6.
1 parent 86609a9
Raw File
pom.xml
<?xml version="1.0" encoding="UTF-8"?>
<!--
  ~ Licensed to the Apache Software Foundation (ASF) under one or more
  ~ contributor license agreements.  See the NOTICE file distributed with
  ~ this work for additional information regarding copyright ownership.
  ~ The ASF licenses this file to You under the Apache License, Version 2.0
  ~ (the "License"); you may not use this file except in compliance with
  ~ the License.  You may obtain a copy of the License at
  ~
  ~    http://www.apache.org/licenses/LICENSE-2.0
  ~
  ~ Unless required by applicable law or agreed to in writing, software
  ~ distributed under the License is distributed on an "AS IS" BASIS,
  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  ~ See the License for the specific language governing permissions and
  ~ limitations under the License.
  -->

<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
  <modelVersion>4.0.0</modelVersion>
  <parent>
    <groupId>org.apache.spark</groupId>
    <artifactId>spark-parent_2.11</artifactId>
    <version>2.2.1-SNAPSHOT</version>
    <relativePath>../pom.xml</relativePath>
  </parent>

  <artifactId>spark-assembly_2.11</artifactId>
  <name>Spark Project Assembly</name>
  <url>http://spark.apache.org/</url>
  <packaging>pom</packaging>

  <properties>
    <sbt.project.name>assembly</sbt.project.name>
    <build.testJarPhase>none</build.testJarPhase>
    <build.copyDependenciesPhase>package</build.copyDependenciesPhase>
  </properties>

  <dependencies>
    <!-- Prevent our dummy JAR from being included in Spark distributions or uploaded to YARN -->
    <dependency>
      <groupId>org.spark-project.spark</groupId>
      <artifactId>unused</artifactId>
      <version>1.0.0</version>
      <scope>provided</scope>
    </dependency>
    <dependency>
      <groupId>org.apache.spark</groupId>
      <artifactId>spark-core_${scala.binary.version}</artifactId>
      <version>${project.version}</version>
    </dependency>
    <dependency>
      <groupId>org.apache.spark</groupId>
      <artifactId>spark-mllib_${scala.binary.version}</artifactId>
      <version>${project.version}</version>
    </dependency>
    <dependency>
      <groupId>org.apache.spark</groupId>
      <artifactId>spark-streaming_${scala.binary.version}</artifactId>
      <version>${project.version}</version>
    </dependency>
    <dependency>
      <groupId>org.apache.spark</groupId>
      <artifactId>spark-graphx_${scala.binary.version}</artifactId>
      <version>${project.version}</version>
    </dependency>
    <dependency>
      <groupId>org.apache.spark</groupId>
      <artifactId>spark-sql_${scala.binary.version}</artifactId>
      <version>${project.version}</version>
    </dependency>
    <dependency>
      <groupId>org.apache.spark</groupId>
      <artifactId>spark-repl_${scala.binary.version}</artifactId>
      <version>${project.version}</version>
    </dependency>

    <!--
      Because we don't shade dependencies anymore, we need to restore Guava to compile scope so
      that the libraries Spark depend on have it available. We'll package the version that Spark
      uses (14.0.1) which is not the same as Hadoop dependencies, but works.
    -->
    <dependency>
      <groupId>com.google.guava</groupId>
      <artifactId>guava</artifactId>
      <scope>${hadoop.deps.scope}</scope>
    </dependency>
  </dependencies>

  <build>
    <plugins>
      <plugin>
        <groupId>org.apache.maven.plugins</groupId>
        <artifactId>maven-deploy-plugin</artifactId>
        <configuration>
          <skip>true</skip>
        </configuration>
      </plugin>
      <plugin>
        <groupId>org.apache.maven.plugins</groupId>
        <artifactId>maven-install-plugin</artifactId>
        <configuration>
          <skip>true</skip>
        </configuration>
      </plugin>
      <!-- zip pyspark archives to run python application on yarn mode -->
      <plugin>
        <groupId>org.apache.maven.plugins</groupId>
          <artifactId>maven-antrun-plugin</artifactId>
          <executions>
            <execution>
              <phase>package</phase>
                <goals>
                  <goal>run</goal>
                </goals>
            </execution>
          </executions>
          <configuration>
            <target>
              <delete dir="${basedir}/../python/lib/pyspark.zip"/>
              <zip destfile="${basedir}/../python/lib/pyspark.zip">
                <fileset dir="${basedir}/../python/" includes="pyspark/**/*"/>
              </zip>
            </target>
          </configuration>
      </plugin>
    </plugins>
  </build>

  <profiles>
    <profile>
      <id>yarn</id>
      <dependencies>
        <dependency>
          <groupId>org.apache.spark</groupId>
          <artifactId>spark-yarn_${scala.binary.version}</artifactId>
          <version>${project.version}</version>
        </dependency>
      </dependencies>
    </profile>
    <profile>
      <id>mesos</id>
      <dependencies>
        <dependency>
          <groupId>org.apache.spark</groupId>
          <artifactId>spark-mesos_${scala.binary.version}</artifactId>
          <version>${project.version}</version>
        </dependency>
      </dependencies>
    </profile>
    <profile>
      <id>hive</id>
      <dependencies>
        <dependency>
          <groupId>org.apache.spark</groupId>
          <artifactId>spark-hive_${scala.binary.version}</artifactId>
          <version>${project.version}</version>
        </dependency>
      </dependencies>
    </profile>
    <profile>
      <id>hive-thriftserver</id>
      <dependencies>
        <dependency>
          <groupId>org.apache.spark</groupId>
          <artifactId>spark-hive-thriftserver_${scala.binary.version}</artifactId>
          <version>${project.version}</version>
        </dependency>
      </dependencies>
    </profile>
    <profile>
      <id>spark-ganglia-lgpl</id>
      <dependencies>
        <dependency>
          <groupId>org.apache.spark</groupId>
          <artifactId>spark-ganglia-lgpl_${scala.binary.version}</artifactId>
          <version>${project.version}</version>
        </dependency>
      </dependencies>
    </profile>
    <profile>
      <id>bigtop-dist</id>
      <!-- This profile uses the assembly plugin to create a special "dist" package for BigTop
           that contains Spark but not the Hadoop JARs it depends on. -->
      <build>
        <plugins>
          <plugin>
            <groupId>org.apache.maven.plugins</groupId>
            <artifactId>maven-assembly-plugin</artifactId>
            <version>3.0.0</version>
            <executions>
              <execution>
                <id>dist</id>
                <phase>package</phase>
                <goals>
                  <goal>single</goal>
                </goals>
                <configuration>
                  <descriptors>
                    <descriptor>src/main/assembly/assembly.xml</descriptor>
                  </descriptors>
                </configuration>
              </execution>
            </executions>
          </plugin>
        </plugins>
      </build>
    </profile>

    <!-- Profiles that disable inclusion of certain dependencies. -->
    <profile>
      <id>hadoop-provided</id>
      <properties>
        <hadoop.deps.scope>provided</hadoop.deps.scope>
      </properties>
    </profile>
    <profile>
      <id>hive-provided</id>
      <properties>
        <hive.deps.scope>provided</hive.deps.scope>
      </properties>
    </profile>
    <profile>
      <id>parquet-provided</id>
      <properties>
        <parquet.deps.scope>provided</parquet.deps.scope>
      </properties>
    </profile>
  </profiles>
</project>
back to top