Revision 972338ae771c99fc63acb5f75fdfa2f6d2c0ffab authored by Dongjoon Hyun on 27 June 2022, 08:29:57 UTC, committed by Dongjoon Hyun on 27 June 2022, 08:31:01 UTC
This PR aims to fix a regression at Apache Spark 3.3.0 which doesn't allow long pod name prefix whose length is greater than 63.

Although Pod's `hostname` follows [DNS Label Names](https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#dns-label-names), Pod name itself follows [DNS Subdomain Names](https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#dns-subdomain-names) whose maximum length is 253.

Yes, this fixes a regression.

Pass the CIs with the updated unit tests.

Closes #36999 from dongjoon-hyun/SPARK-39614.

Authored-by: Dongjoon Hyun <dongjoon@apache.org>
Signed-off-by: Dongjoon Hyun <dongjoon@apache.org>
(cherry picked from commit c15508f0d6a49738db5edf7eb139cc1d438af9a9)
Signed-off-by: Dongjoon Hyun <dongjoon@apache.org>
1 parent 427148f
Raw File
stop-worker.sh
#!/usr/bin/env bash

#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements.  See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License.  You may obtain a copy of the License at
#
#    http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#

# A shell script to stop all workers on a single worker
#
# Environment variables
#
#   SPARK_WORKER_INSTANCES The number of worker instances that should be
#                          running on this worker machine.  Default is 1.

# Usage: stop-worker.sh
#   Stops all workers on this worker machine

if [ -z "${SPARK_HOME}" ]; then
  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
fi

. "${SPARK_HOME}/sbin/spark-config.sh"

. "${SPARK_HOME}/bin/load-spark-env.sh"

if [ "$SPARK_WORKER_INSTANCES" = "" ]; then
  "${SPARK_HOME}/sbin"/spark-daemon.sh stop org.apache.spark.deploy.worker.Worker 1
else
  for ((i=0; i<$SPARK_WORKER_INSTANCES; i++)); do
    "${SPARK_HOME}/sbin"/spark-daemon.sh stop org.apache.spark.deploy.worker.Worker $(( $i + 1 ))
  done
fi
back to top