Skip to content
Closed
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
58 commits
Select commit Hold shift + click to select a range
8b56400
Add new module and add v11 thrift protocol
AngersZhuuuu Oct 22, 2019
e30f686
add pom lincense
AngersZhuuuu Oct 23, 2019
4a50bc9
update code folder name
AngersZhuuuu Oct 23, 2019
7ffdc3b
all like hive
AngersZhuuuu Oct 23, 2019
b74d5e0
remove py/cpp/r/php
AngersZhuuuu Oct 23, 2019
fc2648f
Maven generate thrift source code
wangyum Oct 24, 2019
5365dcf
org.apache.thrift.tools:maven-thrift-plugin -> org.apache.thrift:thri…
wangyum Oct 25, 2019
95d8137
save some basic code
AngersZhuuuu Oct 26, 2019
4dc5c7e
Merge pull request #2 from wangyum/SPARK-29108
AngersZhuuuu Oct 27, 2019
24ce6d4
Merge branch 'SPARK-29018-V11' of https://github.com/AngersZhuuuu/spa…
AngersZhuuuu Oct 27, 2019
5efe8cb
Revert "save some basic code"
AngersZhuuuu Oct 27, 2019
6f7d48a
Update TCLIService.thrift
AngersZhuuuu Oct 27, 2019
cf24306
Add basic data
AngersZhuuuu Oct 27, 2019
24fce6b
save code
AngersZhuuuu Oct 28, 2019
bc36bdf
change
AngersZhuuuu Oct 28, 2019
c438673
format code
AngersZhuuuu Oct 28, 2019
a7ec8b0
format code
AngersZhuuuu Oct 29, 2019
5cf7eb0
Update SparkSQLEnv.scala
AngersZhuuuu Oct 29, 2019
0023bcb
fix logger conflict
AngersZhuuuu Oct 29, 2019
723450b
fix scala style
AngersZhuuuu Oct 29, 2019
3eb7672
start with execution hive
AngersZhuuuu Oct 29, 2019
5aa4d9d
format code add file header
AngersZhuuuu Oct 29, 2019
782d36b
FOR BUILD spark-thriftserver
AngersZhuuuu Oct 29, 2019
f2b5346
add UT class
AngersZhuuuu Oct 29, 2019
a14a9e9
fix UT case, remove can't supported UT
AngersZhuuuu Oct 29, 2019
e1bb6e1
fix for UT
AngersZhuuuu Oct 29, 2019
74fb240
fix UT
AngersZhuuuu Oct 29, 2019
2f90ed9
fix for UT
AngersZhuuuu Oct 30, 2019
cd92f3f
fix return error
AngersZhuuuu Oct 30, 2019
60dc24f
fix client convert row map
AngersZhuuuu Oct 30, 2019
409a1a3
add UT of TCLIServiceClient
AngersZhuuuu Oct 31, 2019
5771a9b
fix java code style
AngersZhuuuu Oct 31, 2019
77578c3
add processGlobalInitFile
AngersZhuuuu Oct 31, 2019
4f7cbac
Update ThriftServerSessionImpl.scala
AngersZhuuuu Oct 31, 2019
d93ab72
Keep type in scala and remove unused type
AngersZhuuuu Oct 31, 2019
ac9ad54
basic service to java
AngersZhuuuu Oct 31, 2019
795ada1
Handle to java
AngersZhuuuu Oct 31, 2019
63076d3
Operation Type State Status to java
AngersZhuuuu Oct 31, 2019
000443a
Fetch type and orientation to java
AngersZhuuuu Oct 31, 2019
5de2bf9
PatternOrIdentifier to java
AngersZhuuuu Oct 31, 2019
a6b0ed1
Interface to Java
AngersZhuuuu Oct 31, 2019
2e6d221
fix bug
AngersZhuuuu Nov 1, 2019
158b298
remove setApplicationName
AngersZhuuuu Nov 1, 2019
745ca60
Session Basic Class to java
AngersZhuuuu Nov 1, 2019
f62e577
auth and thrift service to java
AngersZhuuuu Nov 1, 2019
5fae842
Update ColumnBasedSet.scala
AngersZhuuuu Nov 1, 2019
fe39db0
Update RowBasedSet.scala
AngersZhuuuu Nov 1, 2019
1c303ba
Update type map
AngersZhuuuu Nov 1, 2019
9a32af8
make RowSetFactory clean
AngersZhuuuu Nov 1, 2019
287c6be
update
AngersZhuuuu Nov 2, 2019
f6a7736
format code
AngersZhuuuu Nov 2, 2019
f8b7351
add thrift file
AngersZhuuuu Nov 6, 2019
272ba3c
not check current java code now
AngersZhuuuu Nov 6, 2019
0de3191
remove import jdk.tools dependency
AngersZhuuuu Nov 6, 2019
6374f42
follow comment
AngersZhuuuu Nov 6, 2019
f464773
Merge branch 'master' into SPARK-29018-V11-STEP4-ADD-TEST
AngersZhuuuu Nov 7, 2019
13da926
fix for `mvn install`
AngersZhuuuu Nov 7, 2019
d0de49f
fix for `mvn install`
AngersZhuuuu Nov 7, 2019
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
auth and thrift service to java
  • Loading branch information
AngersZhuuuu committed Nov 1, 2019
commit f62e577ef0db6a6a45faaeb775c7e7344f25cd13
Original file line number Diff line number Diff line change
Expand Up @@ -1042,7 +1042,7 @@ object SparkSubmit extends CommandLineUtils with Logging {
*/
private def isThriftServer(mainClass: String): Boolean = {
mainClass == "org.apache.spark.sql.hive.thriftserver.HiveThriftServer2" ||
mainClass == "org.apache.spark.sql.thriftserver.SparkThriftServer2"
mainClass == "org.apache.spark.sql.thriftserver.server.SparkThriftServer"
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ class SparkSubmitCommandBuilder extends AbstractCommandBuilder {
SparkLauncher.NO_RESOURCE);
specialClasses.put("org.apache.spark.sql.hive.thriftserver.HiveThriftServer2",
SparkLauncher.NO_RESOURCE);
specialClasses.put("org.apache.spark.sql.thriftserver.SparkThriftServer2",
specialClasses.put("org.apache.spark.sql.thriftserver.server.SparkThriftServer",
SparkLauncher.NO_RESOURCE);
}

Expand Down Expand Up @@ -401,7 +401,7 @@ boolean isClientMode(Map<String, String> userProps) {
private boolean isThriftServer(String mainClass) {
return (mainClass != null &&
(mainClass.equals("org.apache.spark.sql.hive.thriftserver.HiveThriftServer2") ||
mainClass.equals("org.apache.spark.sql.thriftserver.SparkThriftServer2")));
mainClass.equals("org.apache.spark.sql.thriftserver.server.SparkThriftServer")));
}

private List<String> findExamplesJars() {
Expand Down
2 changes: 1 addition & 1 deletion sbin/start-spark-thriftserver.sh
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ fi

# NOTE: This exact class name is matched downstream by SparkSubmit.
# Any changes need to be reflected there.
CLASS="org.apache.spark.sql.thriftserver.SparkThriftServer2"
CLASS="org.apache.spark.sql.thriftserver.server.SparkThriftServer"

function usage {
echo "Usage: ./sbin/start-spark-thriftserver [options] [thrift server options]"
Expand Down
2 changes: 1 addition & 1 deletion sbin/stop-spark-thriftserver.sh
Original file line number Diff line number Diff line change
Expand Up @@ -23,4 +23,4 @@ if [ -z "${SPARK_HOME}" ]; then
export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
fi

"${SPARK_HOME}/sbin"/spark-daemon.sh stop org.apache.spark.sql.thriftserver.SparkThriftServer2 1
"${SPARK_HOME}/sbin"/spark-daemon.sh stop org.apache.spark.sql.thriftserver.server.SparkThriftServer 1

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
@@ -0,0 +1,189 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.spark.sql.thriftserver.auth;

import java.security.AccessControlContext;
import java.security.AccessController;
import java.security.PrivilegedExceptionAction;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import java.util.StringTokenizer;

import javax.security.auth.Subject;

import org.apache.commons.codec.binary.Base64;
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.http.protocol.BasicHttpContext;
import org.apache.http.protocol.HttpContext;
import org.ietf.jgss.GSSContext;
import org.ietf.jgss.GSSManager;
import org.ietf.jgss.GSSName;
import org.ietf.jgss.Oid;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
* Utility functions for HTTP mode authentication.
*/
public final class HttpAuthUtils {
public static final String WWW_AUTHENTICATE = "WWW-Authenticate";
public static final String AUTHORIZATION = "Authorization";
public static final String BASIC = "Basic";
public static final String NEGOTIATE = "Negotiate";
private static final Logger LOG = LoggerFactory.getLogger(HttpAuthUtils.class);
private static final String COOKIE_ATTR_SEPARATOR = "&";
private static final String COOKIE_CLIENT_USER_NAME = "cu";
private static final String COOKIE_CLIENT_RAND_NUMBER = "rn";
private static final String COOKIE_KEY_VALUE_SEPARATOR = "=";
private static final Set<String> COOKIE_ATTRIBUTES =
new HashSet<String>(Arrays.asList(COOKIE_CLIENT_USER_NAME, COOKIE_CLIENT_RAND_NUMBER));

/**
* @return Stringified Base64 encoded kerberosAuthHeader on success
* @throws Exception
*/
public static String getKerberosServiceTicket(String principal, String host,
String serverHttpUrl, boolean assumeSubject) throws Exception {
String serverPrincipal =
ShimLoader.getHadoopThriftAuthBridge().getServerPrincipal(principal, host);
if (assumeSubject) {
// With this option, we're assuming that the external application,
// using the JDBC driver has done a JAAS kerberos login already
AccessControlContext context = AccessController.getContext();
Subject subject = Subject.getSubject(context);
if (subject == null) {
throw new Exception("The Subject is not set");
}
return Subject.doAs(subject, new HttpKerberosClientAction(serverPrincipal, serverHttpUrl));
} else {
// JAAS login from ticket cache to setup the client UserGroupInformation
UserGroupInformation clientUGI =
ShimLoader.getHadoopThriftAuthBridge().getCurrentUGIWithConf("kerberos");
return clientUGI.doAs(new HttpKerberosClientAction(serverPrincipal, serverHttpUrl));
}
}

/**
* Creates and returns a HS2 cookie token.
* @param clientUserName Client User name.
* @return An unsigned cookie token generated from input parameters.
* The final cookie generated is of the following format :
* {@code cu=<username>&rn=<randomNumber>&s=<cookieSignature>}
*/
public static String createCookieToken(String clientUserName) {
StringBuffer sb = new StringBuffer();
sb.append(COOKIE_CLIENT_USER_NAME).append(COOKIE_KEY_VALUE_SEPARATOR).append(clientUserName)
.append(COOKIE_ATTR_SEPARATOR);
sb.append(COOKIE_CLIENT_RAND_NUMBER).append(COOKIE_KEY_VALUE_SEPARATOR)
.append((new Random(System.currentTimeMillis())).nextLong());
return sb.toString();
}

/**
* Parses a cookie token to retrieve client user name.
* @param tokenStr Token String.
* @return A valid user name if input is of valid format, else returns null.
*/
public static String getUserNameFromCookieToken(String tokenStr) {
Map<String, String> map = splitCookieToken(tokenStr);

if (!map.keySet().equals(COOKIE_ATTRIBUTES)) {
LOG.error("Invalid token with missing attributes " + tokenStr);
return null;
}
return map.get(COOKIE_CLIENT_USER_NAME);
}

/**
* Splits the cookie token into attributes pairs.
* @param tokenStr input token.
* @return a map with the attribute pairs of the token if the input is valid.
* Else, returns null.
*/
private static Map<String, String> splitCookieToken(String tokenStr) {
Map<String, String> map = new HashMap<String, String>();
StringTokenizer st = new StringTokenizer(tokenStr, COOKIE_ATTR_SEPARATOR);

while (st.hasMoreTokens()) {
String part = st.nextToken();
int separator = part.indexOf(COOKIE_KEY_VALUE_SEPARATOR);
if (separator == -1) {
LOG.error("Invalid token string " + tokenStr);
return null;
}
String key = part.substring(0, separator);
String value = part.substring(separator + 1);
map.put(key, value);
}
return map;
}


private HttpAuthUtils() {
throw new UnsupportedOperationException("Can't initialize class");
}

/**
* We'll create an instance of this class within a doAs block so that the client's TGT credentials
* can be read from the Subject
*/
public static class HttpKerberosClientAction implements PrivilegedExceptionAction<String> {
public static final String HTTP_RESPONSE = "HTTP_RESPONSE";
public static final String SERVER_HTTP_URL = "SERVER_HTTP_URL";
private final String serverPrincipal;
private final String serverHttpUrl;
private final Base64 base64codec;
private final HttpContext httpContext;

public HttpKerberosClientAction(String serverPrincipal, String serverHttpUrl) {
this.serverPrincipal = serverPrincipal;
this.serverHttpUrl = serverHttpUrl;
base64codec = new Base64(0);
httpContext = new BasicHttpContext();
httpContext.setAttribute(SERVER_HTTP_URL, serverHttpUrl);
}

@Override
public String run() throws Exception {
// This Oid for Kerberos GSS-API mechanism.
Oid mechOid = new Oid("1.2.840.113554.1.2.2");
// Oid for kerberos principal name
Oid krb5PrincipalOid = new Oid("1.2.840.113554.1.2.2.1");
GSSManager manager = GSSManager.getInstance();
// GSS name for server
GSSName serverName = manager.createName(serverPrincipal, krb5PrincipalOid);
// Create a GSSContext for authentication with the service.
// We're passing client credentials as null since we want them to be read from the Subject.
GSSContext gssContext =
manager.createContext(serverName, mechOid, null, GSSContext.DEFAULT_LIFETIME);
gssContext.requestMutualAuth(false);
// Establish context
byte[] inToken = new byte[0];
byte[] outToken = gssContext.initSecContext(inToken, 0, inToken.length);
gssContext.dispose();
// Base64 encoded and stringified token for server
return new String(base64codec.encode(outToken));
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,110 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.thriftserver.auth;

import java.io.IOException;
import java.util.Map;
import javax.security.sasl.SaslException;

import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge;
import org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge.Server;
import org.apache.spark.sql.thriftserver.cli.thrift.TCLIService;
import org.apache.spark.sql.thriftserver.cli.thrift.ThriftCLIService;
import org.apache.thrift.TProcessor;
import org.apache.thrift.TProcessorFactory;
import org.apache.thrift.transport.TSaslClientTransport;
import org.apache.thrift.transport.TTransport;

public final class KerberosSaslHelper {

public static TProcessorFactory getKerberosProcessorFactory(Server saslServer,
ThriftCLIService service) {
return new CLIServiceProcessorFactory(saslServer, service);
}

public static TTransport getKerberosTransport(String principal, String host,
TTransport underlyingTransport, Map<String, String> saslProps, boolean assumeSubject)
throws SaslException {
try {
String[] names = principal.split("[/@]");
if (names.length != 3) {
throw new IllegalArgumentException("Kerberos principal should have 3 parts: " + principal);
}

if (assumeSubject) {
return createSubjectAssumedTransport(principal, underlyingTransport, saslProps);
} else {
HadoopThriftAuthBridge.Client authBridge =
ShimLoader.getHadoopThriftAuthBridge().createClientWithConf("kerberos");
return authBridge.createClientTransport(principal, host, "KERBEROS", null,
underlyingTransport, saslProps);
}
} catch (IOException e) {
throw new SaslException("Failed to open client transport", e);
}
}

public static TTransport createSubjectAssumedTransport(String principal,
TTransport underlyingTransport, Map<String, String> saslProps) throws IOException {
String[] names = principal.split("[/@]");
try {
TTransport saslTransport =
new TSaslClientTransport("GSSAPI", null, names[0], names[1], saslProps, null,
underlyingTransport);
return new TSubjectAssumingTransport(saslTransport);
} catch (SaslException se) {
throw new IOException("Could not instantiate SASL transport", se);
}
}

public static TTransport getTokenTransport(String tokenStr, String host,
TTransport underlyingTransport, Map<String, String> saslProps) throws SaslException {
HadoopThriftAuthBridge.Client authBridge =
ShimLoader.getHadoopThriftAuthBridge().createClientWithConf("kerberos");

try {
return authBridge.createClientTransport(null, host, "DIGEST", tokenStr, underlyingTransport,
saslProps);
} catch (IOException e) {
throw new SaslException("Failed to open client transport", e);
}
}

private KerberosSaslHelper() {
throw new UnsupportedOperationException("Can't initialize class");
}

private static class CLIServiceProcessorFactory extends TProcessorFactory {

private final ThriftCLIService service;
private final Server saslServer;

CLIServiceProcessorFactory(Server saslServer, ThriftCLIService service) {
super(null);
this.service = service;
this.saslServer = saslServer;
}

@Override
public TProcessor getProcessor(TTransport trans) {
TProcessor sqlProcessor = new TCLIService.Processor<TCLIService.Iface>(service);
return saslServer.wrapNonAssumingProcessor(sqlProcessor);
}
}
}
Loading