Skip to content

Commit

Permalink
Add a spark-shell smoke test to premerge and nightly
Browse files Browse the repository at this point in the history
Contributes to NVIDIA#5704

Signed-off-by: Gera Shegalov <[email protected]>
  • Loading branch information
gerashegalov committed Oct 20, 2023
1 parent 02ff24e commit 649e2cc
Show file tree
Hide file tree
Showing 3 changed files with 15 additions and 1 deletion.
13 changes: 12 additions & 1 deletion integration_tests/run_pyspark_from_build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -309,7 +309,18 @@ EOF
fi
export PYSP_TEST_spark_rapids_memory_gpu_allocSize=${PYSP_TEST_spark_rapids_memory_gpu_allocSize:-'1536m'}

if ((${#TEST_PARALLEL_OPTS[@]} > 0));
SPARK_SHELL_SMOKE_TEST="${SPARK_SHELL_SMOKE_TEST:-0}"
if [[ "$SPARK_SHELL_SMOKE_TEST" != "0" ]]; then
echo "Running spark-shell smoke test..."
<<< 'spark.range(100).agg(Map("id" -> "sum")).collect()' \
"$SPARK_HOME"/bin/spark-shell \
--master local-cluster[1,1,1024] \
--jars "${PYSP_TEST_spark_jars}" \
--conf spark.plugins=com.nvidia.spark.SQLPlugin \
--conf spark.deploy.maxExecutorRetries=0 2>/dev/null \
| grep -F 'res0: Array[org.apache.spark.sql.Row] = Array([4950])'
echo "SUCCESS spark-shell smoke test..."
elif ((${#TEST_PARALLEL_OPTS[@]} > 0));
then
exec python "${RUN_TESTS_COMMAND[@]}" "${TEST_PARALLEL_OPTS[@]}" "${TEST_COMMON_OPTS[@]}"
else
Expand Down
1 change: 1 addition & 0 deletions jenkins/spark-premerge-build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,7 @@ mvn_verify() {

# Triggering here until we change the jenkins file
rapids_shuffle_smoke_test
SPARK_SHELL_SMOKE_TEST=1 ./integration_tests/run_pyspark_from_build.sh
}

rapids_shuffle_smoke_test() {
Expand Down
2 changes: 2 additions & 0 deletions jenkins/spark-tests.sh
Original file line number Diff line number Diff line change
Expand Up @@ -270,6 +270,8 @@ TEST_MODE=${TEST_MODE:-'DEFAULT'}
if [[ $TEST_MODE == "DEFAULT" ]]; then
./run_pyspark_from_build.sh

SPARK_SHELL_SMOKE_TEST=1 ./integration_tests/run_pyspark_from_build.sh

# ParquetCachedBatchSerializer cache_test
PYSP_TEST_spark_sql_cache_serializer=com.nvidia.spark.ParquetCachedBatchSerializer \
./run_pyspark_from_build.sh -k cache_test
Expand Down

0 comments on commit 649e2cc

Please sign in to comment.