Skip to content

Commit 6a99371

Browse files
author
Arthur Rand
authored
wip, making dcos spark secret command (apache#234)
[SPARK-601] Test RPC authentication, allow users to generate a secret client-side
1 parent bd2cb26 commit 6a99371

File tree

5 files changed

+114
-25
lines changed

5 files changed

+114
-25
lines changed

cli/dcos-spark/main.go

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@ type SparkCommand struct {
2424
submitDockerImage string
2525
submitDcosSpace string
2626
submitEnv map[string]string
27+
secretPath string
2728

2829
statusSkipMessage bool
2930

@@ -145,6 +146,24 @@ func (cmd *SparkCommand) runWebui(a *kingpin.Application, e *kingpin.ParseElemen
145146
return nil
146147
}
147148

149+
func (cmd *SparkCommand) runGenerateSecret(a *kingpin.Application, e *kingpin.ParseElement, c *kingpin.ParseContext) error {
150+
secret, err := GetRandomStringSecret()
151+
152+
if err != nil {
153+
return err
154+
}
155+
156+
_, err = client.RunCLICommand(
157+
"security", "secrets", "create", cmd.secretPath, fmt.Sprintf("-v %s", secret))
158+
159+
if err != nil {
160+
log.Fatalf("Unable to create secret, %s", err)
161+
return err
162+
}
163+
164+
return err
165+
}
166+
148167
func handleCommands(app *kingpin.Application) {
149168
cmd := &SparkCommand{submitEnv: make(map[string]string)}
150169
run := app.Command("run", "Submit a job to the Spark Mesos Dispatcher").Action(cmd.runSubmit)
@@ -181,6 +200,10 @@ func handleCommands(app *kingpin.Application) {
181200
kill := app.Command("kill", "Aborts a submitted Spark job").Action(cmd.runKill)
182201
kill.Arg("submission-id", "The ID of the Spark job").Required().StringVar(&cmd.submissionId)
183202

203+
secret := app.Command("secret", "Make a shared secret, used for RPC authentication").
204+
Action(cmd.runGenerateSecret)
205+
secret.Arg("secret_path", "path and name for the secret").Required().StringVar(&cmd.secretPath)
206+
184207
app.Command("webui", "Returns the Spark Web UI URL").Action(cmd.runWebui)
185208
}
186209

cli/dcos-spark/secretGenerator.go

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
package main
2+
3+
import (
4+
"crypto/rand"
5+
"encoding/base64"
6+
)
7+
8+
const KEYLENGTH = 128
9+
10+
func generateRandomBytes(n int) ([]byte, error) {
11+
// https://elithrar.github.io/article/generating-secure-random-numbers-crypto-rand/
12+
b := make([]byte, n)
13+
_, err := rand.Read(b)
14+
if err != nil {
15+
return nil, err
16+
}
17+
return b, nil
18+
}
19+
20+
func GetRandomStringSecret() (string, error) {
21+
b, err := generateRandomBytes(KEYLENGTH)
22+
return base64.URLEncoding.EncodeToString(b), err
23+
}
24+

cli/dcos-spark/submit_builder.go

Lines changed: 14 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -3,14 +3,12 @@ package main
33
import (
44
"bufio"
55
"bytes"
6-
"encoding/base64"
76
"encoding/json"
87
"errors"
98
"fmt"
109
"github.com/mesosphere/dcos-commons/cli/client"
1110
"github.com/mesosphere/dcos-commons/cli/config"
1211
"gopkg.in/alecthomas/kingpin.v3-unstable"
13-
"io/ioutil"
1412
"log"
1513
"net/url"
1614
"os"
@@ -22,6 +20,10 @@ var keyWhitespaceValPattern = regexp.MustCompile("(.+)\\s+(.+)")
2220
var backslashNewlinePattern = regexp.MustCompile("\\s*\\\\s*\\n\\s+")
2321
var collapseSpacesPattern = regexp.MustCompile(`[\s\p{Zs}]{2,}`)
2422

23+
const SECRET_REFERENCE_TEMPLATE = "spark.mesos.%s.secret.names"
24+
const SECRET_FILENAME_TEMPLATE = "spark.mesos.%s.secret.filenames"
25+
const SECRET_ENVKEY_TEMPLATE = "spark.mesos.%s.secret.envkeys"
26+
2527
type sparkVal struct {
2628
flagName string
2729
propName string
@@ -241,8 +243,7 @@ func sparkSubmitHelp() string {
241243
func prepareBase64Secret(secretPath string, isEncoded bool) string {
242244
ss := strings.Split(secretPath, "/")
243245
s := ss[len(ss) - 1] // The secret file without any slashes
244-
// TODO document how secret formatting works w.r.t decoding
245-
// secrets with __dcos_base64__ will be decoded by mesos or spark
246+
// secrets with __dcos_base64__ will be decoded by Mesos
246247
if strings.HasPrefix(s, "__dcos_base64__") || strings.HasSuffix(s, "base64") {
247248
// if we have the .base64, maintain the whole thing spark-env will decode it
248249
return strings.TrimPrefix(s, "__dcos_base64__")
@@ -255,9 +256,11 @@ func prepareBase64Secret(secretPath string, isEncoded bool) string {
255256
}
256257

257258
func addArgsForFileBasedSecret(args *sparkArgs, secretPath, property string) {
258-
args.properties["spark.mesos.driver.secret.names"] = secretPath
259+
secretRefProp := fmt.Sprintf(SECRET_REFERENCE_TEMPLATE, "driver")
260+
secretFileProp := fmt.Sprintf(SECRET_FILENAME_TEMPLATE, "driver")
261+
appendToProperty(secretRefProp, secretPath, args)
262+
appendToProperty(secretFileProp, prepareBase64Secret(secretPath, true), args)
259263
args.properties[property] = prepareBase64Secret(secretPath, false)
260-
args.properties["spark.mesos.driver.secret.filenames"] = prepareBase64Secret(secretPath, true)
261264
}
262265

263266
func setupKerberosAuthArgs(args *sparkArgs) error {
@@ -271,9 +274,9 @@ func setupKerberosAuthArgs(args *sparkArgs) error {
271274
return nil
272275
}
273276
if args.tgtSecretValue != "" { // using secret by value
274-
args.properties["spark.mesos.driver.secret.values"] = args.tgtSecretValue
277+
appendToProperty("spark.mesos.driver.secret.values", args.tgtSecretValue, args)
275278
args.properties["spark.mesos.driverEnv.KRB5CCNAME"] = "tgt"
276-
args.properties["spark.mesos.driver.secret.filenames"] = "tgt.base64"
279+
appendToProperty(fmt.Sprintf(SECRET_FILENAME_TEMPLATE, "driver"), "tgt.base64", args)
277280
return nil
278281
}
279282
return errors.New(fmt.Sprintf("Unable to add Kerberos args, got args %s", args))
@@ -306,9 +309,9 @@ func setupTLSArgs(args *sparkArgs) {
306309

307310
taskTypes :=[]string{"driver", "executor"}
308311
for _, taskType := range taskTypes {
309-
appendToProperty(fmt.Sprintf("spark.mesos.%s.secret.names", taskType), joinedPaths, args)
310-
appendToProperty(fmt.Sprintf("spark.mesos.%s.secret.filenames", taskType), joinedFilenames, args)
311-
appendToPropertyIfSet(fmt.Sprintf("spark.mesos.%s.secret.envkeys", taskType), joinedEnvkeys, args)
312+
appendToProperty(fmt.Sprintf(SECRET_REFERENCE_TEMPLATE, taskType), joinedPaths, args)
313+
appendToProperty(fmt.Sprintf(SECRET_FILENAME_TEMPLATE, taskType), joinedFilenames, args)
314+
appendToPropertyIfSet(fmt.Sprintf(SECRET_ENVKEY_TEMPLATE, taskType), joinedEnvkeys, args)
312315
}
313316

314317
// Passwords
@@ -510,20 +513,6 @@ func appendToPropertyIfSet(propValue, toAppend string, args *sparkArgs) {
510513
}
511514
}
512515

513-
func getBase64Content(path string) string {
514-
log.Printf("Opening file %s", path)
515-
data, err := ioutil.ReadFile(path)
516-
if err != nil {
517-
log.Fatal(err)
518-
}
519-
520-
var encodebuf bytes.Buffer
521-
encoder := base64.NewEncoder(base64.StdEncoding, &encodebuf)
522-
encoder.Write(data)
523-
encoder.Close() // must be called before returning string to ensure flush
524-
return encodebuf.String()
525-
}
526-
527516
func buildSubmitJson(cmd *SparkCommand) (string, error) {
528517
// first, import any values in the provided properties file (space separated "key val")
529518
// then map applicable envvars

docs/security.md

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -86,7 +86,34 @@ See the [Secrets Documentation about spaces][13] for more details about spaces.
8686
the keystore and truststore secrets will also show up as environment-based secrets,
8787
due to the way secrets are implemented. You can ignore these extra environment variables.
8888

89+
# Spark SASL (RPC endpoint authentication)
90+
Spark uses Simple Authentication Security Layer (SASL) to authenticate Executors with the Driver and for encrypting messages sent between components. This functionality relies on a shared secret between all components you expect to communicate with each other. A secret can be generated with the DC/OS Spark CLI
91+
```bash
92+
dcos spark secret <secret_path>
93+
# for example
94+
dcos spark secret /sparkAuthSecret
95+
```
96+
This will generate a random secret and upload it to the DC/OS secrets store [14] at the designated path. To use this secret for RPC authentication add the following configutations to your CLI command:
97+
```bash
98+
dcos spark run --submit-args="\
99+
...
100+
--conf spark.mesos.containerizer=mesos \ # Mesos UCR is required for secrets
101+
--conf spark.authenticate=true \ # tell Spark to use authentication
102+
--conf spark.authenticate.enableSaslEncryption=true \ # tell Spark to encrypt with Sasl
103+
--conf spark.authenticate.secret=sparkauthsecret.secret \ # name of file-based secret for Driver, you may change the name
104+
--conf spark.executorEnv._SPARK_AUTH_SECRET=sparkauthsecret.secret \ # name of file-based secret for the Executors
105+
--conf spark.mesos.driver.secret.names=<secret_path> \ # secret path generated in the previous step, for Driver
106+
--conf spark.mesos.driver.secret.filenames=sparkauthsecret.secret \ # tell Mesos to put the secret in this file in the Driver
107+
--conf spark.mesos.executor.secret.names=<secret_path> \ # secret path generated in previous step for Executor
108+
--conf spark.mesos.executor.secret.filenames=sparkauthsecret.secret \ # tell Mesos to put the secret in this File for the Executors
109+
...
110+
"
111+
112+
```
113+
114+
89115

90116
[11]: https://docs.mesosphere.com/1.9/overview/architecture/components/
91117
[12]: http://docs.oracle.com/javase/8/docs/technotes/tools/unix/keytool.html
92118
[13]: https://docs.mesosphere.com/1.10/security/#spaces
119+
[14]: https://docs.mesosphere.com/latest/security/secrets/

tests/test_spark.py

Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414
import shakedown
1515

1616
import sdk_utils
17+
import sdk_cmd
1718

1819
from tests import s3
1920
from tests import utils
@@ -51,6 +52,31 @@ def test_jar(app_name="/spark"):
5152
args=["--class", 'com.typesafe.spark.test.mesos.framework.runners.SparkJobRunner'])
5253

5354

55+
@pytest.mark.sanity
56+
def test_rpc_auth():
57+
secret_name = "sparkauth"
58+
59+
rc, stdout, stderr = sdk_cmd.run_raw_cli("spark secret /{}".format(secret_name))
60+
assert rc == 0, "Failed to generate Spark auth secret, stderr {err} stdout {out}".format(err=stderr, out=stdout)
61+
62+
args = ["--conf", "spark.mesos.containerizer=mesos",
63+
"--conf", "spark.authenticate=true",
64+
"--conf", "spark.authenticate.secret=sparkauthsecret.secret",
65+
"--conf", "spark.authenticate.enableSaslEncryption=true",
66+
"--conf", "spark.executorEnv._SPARK_AUTH_SECRET=sparkauthsecret.secret",
67+
"--conf", "spark.mesos.driver.secret.names={}".format(secret_name),
68+
"--conf", "spark.mesos.driver.secret.filenames=sparkauthsecret.secret",
69+
"--conf", "spark.mesos.executor.secret.names={}".format(secret_name),
70+
"--conf", "spark.mesos.executor.secret.filenames=sparkauthsecret.secret",
71+
"--class", "org.apache.spark.examples.SparkPi"]
72+
73+
utils.run_tests(app_url=utils.SPARK_EXAMPLES,
74+
app_args="100",
75+
expected_output="Pi is roughly 3",
76+
app_name="/spark",
77+
args=args)
78+
79+
5480
@pytest.mark.sanity
5581
def test_sparkPi():
5682
utils.run_tests(app_url=utils.SPARK_EXAMPLES,

0 commit comments

Comments
 (0)