Revision 399c397e7035665c928b1d439a860f9e7b1ce3b3 authored by Dongjoon Hyun on 01 September 2022, 16:34:55 UTC, committed by Dongjoon Hyun on 01 September 2022, 16:35:06 UTC
### What changes were proposed in this pull request?

This PR aims to add a new test tag, `decomTestTag`, to K8s Integration Test.

### Why are the changes needed?

Decommission-related tests took over 6 minutes (`363s`). It would be helpful we can run them selectively.
```
[info] - Test basic decommissioning (44 seconds, 51 milliseconds)
[info] - Test basic decommissioning with shuffle cleanup (44 seconds, 450 milliseconds)
[info] - Test decommissioning with dynamic allocation & shuffle cleanups (2 minutes, 43 seconds)
[info] - Test decommissioning timeouts (44 seconds, 389 milliseconds)
[info] - SPARK-37576: Rolling decommissioning (1 minute, 8 seconds)
```

### Does this PR introduce _any_ user-facing change?

No, this is a test-only change.

### How was this patch tested?

Pass the CIs and test manually.
```
$ build/sbt -Psparkr -Pkubernetes -Pkubernetes-integration-tests \
-Dspark.kubernetes.test.deployMode=docker-desktop "kubernetes-integration-tests/test" \
-Dtest.exclude.tags=minikube,local,decom
...
[info] KubernetesSuite:
[info] - Run SparkPi with no resources (12 seconds, 441 milliseconds)
[info] - Run SparkPi with no resources & statefulset allocation (11 seconds, 949 milliseconds)
[info] - Run SparkPi with a very long application name. (11 seconds, 999 milliseconds)
[info] - Use SparkLauncher.NO_RESOURCE (11 seconds, 846 milliseconds)
[info] - Run SparkPi with a master URL without a scheme. (11 seconds, 176 milliseconds)
[info] - Run SparkPi with an argument. (11 seconds, 868 milliseconds)
[info] - Run SparkPi with custom labels, annotations, and environment variables. (11 seconds, 858 milliseconds)
[info] - All pods have the same service account by default (11 seconds, 5 milliseconds)
[info] - Run extraJVMOptions check on driver (5 seconds, 757 milliseconds)
[info] - Verify logging configuration is picked from the provided SPARK_CONF_DIR/log4j2.properties (12 seconds, 467 milliseconds)
[info] - Run SparkPi with env and mount secrets. (21 seconds, 119 milliseconds)
[info] - Run PySpark on simple pi.py example (13 seconds, 129 milliseconds)
[info] - Run PySpark to test a pyfiles example (14 seconds, 937 milliseconds)
[info] - Run PySpark with memory customization (12 seconds, 195 milliseconds)
[info] - Run in client mode. (11 seconds, 343 milliseconds)
[info] - Start pod creation from template (11 seconds, 975 milliseconds)
[info] - SPARK-38398: Schedule pod creation from template (11 seconds, 901 milliseconds)
[info] - Run SparkR on simple dataframe.R example (14 seconds, 305 milliseconds)
...
```

Closes #37755 from dongjoon-hyun/SPARK-40304.

Authored-by: Dongjoon Hyun <dongjoon@apache.org>
Signed-off-by: Dongjoon Hyun <dongjoon@apache.org>
(cherry picked from commit fd0498f81df72c196f19a5b26053660f6f3f4d70)
Signed-off-by: Dongjoon Hyun <dongjoon@apache.org>
1 parent 7c19df6
Raw File
appveyor.yml
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

version: "{build}-{branch}"

shallow_clone: true

platform: x64
configuration: Debug

branches:
  only:
    - master

only_commits:
  files:
    - appveyor.yml
    - dev/appveyor-install-dependencies.ps1
    - R/
    - sql/core/src/main/scala/org/apache/spark/sql/api/r/
    - core/src/main/scala/org/apache/spark/api/r/
    - mllib/src/main/scala/org/apache/spark/ml/r/
    - core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
    - bin/*.cmd

cache:
  - C:\Users\appveyor\.m2

install:
  # Install SBT and dependencies
  - ps: .\dev\appveyor-install-dependencies.ps1
  # Required package for R unit tests. xml2 is required to use jUnit reporter in testthat.
  - cmd: Rscript -e "install.packages(c('knitr', 'rmarkdown', 'testthat', 'e1071', 'survival', 'arrow', 'xml2'), repos='https://cloud.r-project.org/')"
  - cmd: Rscript -e "pkg_list <- as.data.frame(installed.packages()[,c(1, 3:4)]); pkg_list[is.na(pkg_list$Priority), 1:2, drop = FALSE]"

build_script:
  # '-Djna.nosys=true' is required to avoid kernel32.dll load failure.
  # See SPARK-28759.
  # Ideally we should check the tests related to Hive in SparkR as well (SPARK-31745).
  - cmd: set SBT_MAVEN_PROFILES=-Psparkr
  - cmd: set SBT_OPTS=-Djna.nosys=true -Dfile.encoding=UTF-8 -Xms4096m -Xms4096m -XX:ReservedCodeCacheSize=128m
  - cmd: sbt package
  - cmd: set SBT_MAVEN_PROFILES=
  - cmd: set SBT_OPTS=

environment:
  NOT_CRAN: true
  # See SPARK-27848. Currently installing some dependent packages causes
  # "(converted from warning) unable to identify current timezone 'C':" for an unknown reason.
  # This environment variable works around to test SparkR against a higher version.
  R_REMOTES_NO_ERRORS_FROM_WARNINGS: true

test_script:
  - cmd: .\bin\spark-submit2.cmd --driver-java-options "-Dlog4j.configuration=file:///%CD:\=/%/R/log4j.properties" --conf spark.hadoop.fs.defaultFS="file:///" R\pkg\tests\run-all.R

notifications:
  - provider: Email
    on_build_success: false
    on_build_failure: false
    on_build_status_changed: false
back to top