diff --git a/examples/localstack.yml b/examples/localstack.yml index 3b64806a..d461280d 100644 --- a/examples/localstack.yml +++ b/examples/localstack.yml @@ -38,6 +38,7 @@ spec: - /bin/sh - -c - > + awslocal s3 mb s3://spark-events; awslocal s3 mb s3://data; awslocal s3 cp /opt/code/localstack/Makefile s3://data/ --- diff --git a/examples/spark-history-server.yaml b/examples/spark-history-server.yaml new file mode 100644 index 00000000..644a0feb --- /dev/null +++ b/examples/spark-history-server.yaml @@ -0,0 +1,42 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +apiVersion: spark.apache.org/v1beta1 +kind: SparkApplication +metadata: + name: spark-history-server +spec: + mainClass: "org.apache.spark.deploy.history.HistoryServer" + sparkConf: + spark.jars.packages: "org.apache.hadoop:hadoop-aws:3.4.1" + spark.jars.ivy: "/tmp/.ivy2.5.2" + spark.driver.memory: "2g" + spark.kubernetes.authenticate.driver.serviceAccountName: "spark" + spark.kubernetes.container.image: "apache/spark:4.0.0-java21-scala" + spark.history.fs.logDirectory: "s3a://spark-events" + spark.history.fs.cleaner.enabled: "true" + spark.history.fs.cleaner.maxAge: "30d" + spark.history.fs.cleaner.maxNum: "100" + spark.history.fs.eventLog.rolling.maxFilesToRetain: "10" + spark.hadoop.fs.defaultFS: "s3a://spark-events" + spark.hadoop.fs.s3a.endpoint: "http://localstack:4566" + spark.hadoop.fs.s3a.path.style.access: "true" + spark.hadoop.fs.s3a.access.key: "test" + spark.hadoop.fs.s3a.secret.key: "test" + runtimeVersions: + sparkVersion: "4.0.0" + applicationTolerations: + restartConfig: + restartPolicy: Always + maxRestartAttempts: 9223372036854775807