https://github.com/apache/spark
Raw File
Tip revision: a7259279d07b302a51456adb13dc1e41a6fd06ed authored by Maxim Gekk on 24 May 2022, 10:15:29 UTC
Preparing Spark release v3.3.0-rc3
Tip revision: a725927
pom.xml
<?xml version="1.0" encoding="UTF-8"?>
<!--
  ~ Licensed to the Apache Software Foundation (ASF) under one or more
  ~ contributor license agreements.  See the NOTICE file distributed with
  ~ this work for additional information regarding copyright ownership.
  ~ The ASF licenses this file to You under the Apache License, Version 2.0
  ~ (the "License"); you may not use this file except in compliance with
  ~ the License.  You may obtain a copy of the License at
  ~
  ~    http://www.apache.org/licenses/LICENSE-2.0
  ~
  ~ Unless required by applicable law or agreed to in writing, software
  ~ distributed under the License is distributed on an "AS IS" BASIS,
  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  ~ See the License for the specific language governing permissions and
  ~ limitations under the License.
  -->

<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
  <modelVersion>4.0.0</modelVersion>
  <parent>
    <groupId>org.apache.spark</groupId>
    <artifactId>spark-parent_2.12</artifactId>
    <version>3.3.0</version>
    <relativePath>../pom.xml</relativePath>
  </parent>

  <artifactId>spark-mllib_2.12</artifactId>
  <properties>
    <sbt.project.name>mllib</sbt.project.name>
  </properties>
  <packaging>jar</packaging>
  <name>Spark Project ML Library</name>
  <url>https://spark.apache.org/</url>

  <dependencies>
    <dependency>
      <groupId>org.scala-lang.modules</groupId>
      <artifactId>scala-parser-combinators_${scala.binary.version}</artifactId>
    </dependency>
    <dependency>
      <groupId>org.apache.spark</groupId>
      <artifactId>spark-core_${scala.binary.version}</artifactId>
      <version>${project.version}</version>
    </dependency>
    <dependency>
      <groupId>org.apache.spark</groupId>
      <artifactId>spark-core_${scala.binary.version}</artifactId>
      <version>${project.version}</version>
      <type>test-jar</type>
      <scope>test</scope>
    </dependency>
    <dependency>
      <groupId>org.apache.spark</groupId>
      <artifactId>spark-streaming_${scala.binary.version}</artifactId>
      <version>${project.version}</version>
    </dependency>
    <dependency>
      <groupId>org.apache.spark</groupId>
      <artifactId>spark-sql_${scala.binary.version}</artifactId>
      <version>${project.version}</version>
    </dependency>
    <dependency>
      <groupId>org.apache.spark</groupId>
      <artifactId>spark-catalyst_${scala.binary.version}</artifactId>
      <version>${project.version}</version>
      <type>test-jar</type>
      <scope>test</scope>
    </dependency>
    <dependency>
      <groupId>org.apache.spark</groupId>
      <artifactId>spark-sql_${scala.binary.version}</artifactId>
      <version>${project.version}</version>
      <type>test-jar</type>
      <scope>test</scope>
    </dependency>
    <dependency>
      <groupId>org.apache.spark</groupId>
      <artifactId>spark-graphx_${scala.binary.version}</artifactId>
      <version>${project.version}</version>
    </dependency>
    <dependency>
      <groupId>org.apache.spark</groupId>
      <artifactId>spark-mllib-local_${scala.binary.version}</artifactId>
      <version>${project.version}</version>
    </dependency>
    <dependency>
      <groupId>org.apache.spark</groupId>
      <artifactId>spark-mllib-local_${scala.binary.version}</artifactId>
      <version>${project.version}</version>
      <type>test-jar</type>
      <scope>test</scope>
    </dependency>
    <!-- #if scala-2.13 --><!--
    <dependency>
      <groupId>org.scala-lang.modules</groupId>
      <artifactId>scala-parallel-collections_${scala.binary.version}</artifactId>
    </dependency>
    --><!-- #endif scala-2.13 -->
    <dependency>
      <groupId>org.scalanlp</groupId>
      <artifactId>breeze_${scala.binary.version}</artifactId>
    </dependency>
    <dependency>
      <groupId>org.apache.commons</groupId>
      <artifactId>commons-math3</artifactId>
    </dependency>
    <dependency>
      <groupId>org.scalacheck</groupId>
      <artifactId>scalacheck_${scala.binary.version}</artifactId>
      <scope>test</scope>
    </dependency>
    <dependency>
      <groupId>org.mockito</groupId>
      <artifactId>mockito-core</artifactId>
      <scope>test</scope>
    </dependency>
    <dependency>
      <groupId>org.mockito</groupId>
      <artifactId>mockito-inline</artifactId>
      <scope>test</scope>
    </dependency>
    <dependency>
      <groupId>org.apache.spark</groupId>
      <artifactId>spark-streaming_${scala.binary.version}</artifactId>
      <version>${project.version}</version>
      <type>test-jar</type>
      <scope>test</scope>
    </dependency>
    <dependency>
      <groupId>org.jpmml</groupId>
      <artifactId>pmml-model</artifactId>
      <scope>compile</scope>
    </dependency>
    <!-- JPMML seems to be the piece that needs JAXB right now: -->
    <dependency>
      <groupId>org.glassfish.jaxb</groupId>
      <artifactId>jaxb-runtime</artifactId>
    </dependency>
    <dependency>
      <groupId>org.apache.spark</groupId>
      <artifactId>spark-tags_${scala.binary.version}</artifactId>
    </dependency>

    <!--
      This spark-tags test-dep is needed even though it isn't used in this module, otherwise testing-cmds that exclude
      them will yield errors.
    -->
    <dependency>
      <groupId>org.apache.spark</groupId>
      <artifactId>spark-tags_${scala.binary.version}</artifactId>
      <type>test-jar</type>
      <scope>test</scope>
    </dependency>

    <dependency>
      <groupId>dev.ludovic.netlib</groupId>
      <artifactId>blas</artifactId>
    </dependency>
    <dependency>
      <groupId>dev.ludovic.netlib</groupId>
      <artifactId>lapack</artifactId>
    </dependency>
    <dependency>
      <groupId>dev.ludovic.netlib</groupId>
      <artifactId>arpack</artifactId>
    </dependency>

  </dependencies>

  <build>
    <outputDirectory>target/scala-${scala.binary.version}/classes</outputDirectory>
    <testOutputDirectory>target/scala-${scala.binary.version}/test-classes</testOutputDirectory>
    <plugins>
      <plugin>
        <groupId>org.apache.maven.plugins</groupId>
        <artifactId>maven-dependency-plugin</artifactId>
        <executions>
          <!-- When using SPARK_PREPEND_CLASSES Spark classes compiled locally don't use
               shaded deps. So here we store jars in their original form which are added
               when the classpath is computed. -->
          <!-- See similar execution in core/pom.xml -->
          <execution>
            <id>copy-dependencies</id>
            <phase>package</phase>
            <goals>
              <goal>copy-dependencies</goal>
            </goals>
            <configuration>
              <outputDirectory>${project.build.directory}</outputDirectory>
              <overWriteReleases>false</overWriteReleases>
              <overWriteSnapshots>false</overWriteSnapshots>
              <overWriteIfNewer>true</overWriteIfNewer>
              <useSubDirectoryPerType>true</useSubDirectoryPerType>
              <includeGroupIds>org.jpmml</includeGroupIds>
              <silent>true</silent>
            </configuration>
          </execution>
        </executions>
      </plugin>
    </plugins>
  </build>

</project>
back to top