Skip to content
Closed
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
58 commits
Select commit Hold shift + click to select a range
8b56400
Add new module and add v11 thrift protocol
AngersZhuuuu Oct 22, 2019
e30f686
add pom lincense
AngersZhuuuu Oct 23, 2019
4a50bc9
update code folder name
AngersZhuuuu Oct 23, 2019
7ffdc3b
all like hive
AngersZhuuuu Oct 23, 2019
b74d5e0
remove py/cpp/r/php
AngersZhuuuu Oct 23, 2019
fc2648f
Maven generate thrift source code
wangyum Oct 24, 2019
5365dcf
org.apache.thrift.tools:maven-thrift-plugin -> org.apache.thrift:thri…
wangyum Oct 25, 2019
95d8137
save some basic code
AngersZhuuuu Oct 26, 2019
4dc5c7e
Merge pull request #2 from wangyum/SPARK-29108
AngersZhuuuu Oct 27, 2019
24ce6d4
Merge branch 'SPARK-29018-V11' of https://github.com/AngersZhuuuu/spa…
AngersZhuuuu Oct 27, 2019
5efe8cb
Revert "save some basic code"
AngersZhuuuu Oct 27, 2019
6f7d48a
Update TCLIService.thrift
AngersZhuuuu Oct 27, 2019
cf24306
Add basic data
AngersZhuuuu Oct 27, 2019
24fce6b
save code
AngersZhuuuu Oct 28, 2019
bc36bdf
change
AngersZhuuuu Oct 28, 2019
c438673
format code
AngersZhuuuu Oct 28, 2019
a7ec8b0
format code
AngersZhuuuu Oct 29, 2019
5cf7eb0
Update SparkSQLEnv.scala
AngersZhuuuu Oct 29, 2019
0023bcb
fix logger conflict
AngersZhuuuu Oct 29, 2019
723450b
fix scala style
AngersZhuuuu Oct 29, 2019
3eb7672
start with execution hive
AngersZhuuuu Oct 29, 2019
5aa4d9d
format code add file header
AngersZhuuuu Oct 29, 2019
782d36b
FOR BUILD spark-thriftserver
AngersZhuuuu Oct 29, 2019
f2b5346
add UT class
AngersZhuuuu Oct 29, 2019
a14a9e9
fix UT case, remove can't supported UT
AngersZhuuuu Oct 29, 2019
e1bb6e1
fix for UT
AngersZhuuuu Oct 29, 2019
74fb240
fix UT
AngersZhuuuu Oct 29, 2019
2f90ed9
fix for UT
AngersZhuuuu Oct 30, 2019
cd92f3f
fix return error
AngersZhuuuu Oct 30, 2019
60dc24f
fix client convert row map
AngersZhuuuu Oct 30, 2019
409a1a3
add UT of TCLIServiceClient
AngersZhuuuu Oct 31, 2019
5771a9b
fix java code style
AngersZhuuuu Oct 31, 2019
77578c3
add processGlobalInitFile
AngersZhuuuu Oct 31, 2019
4f7cbac
Update ThriftServerSessionImpl.scala
AngersZhuuuu Oct 31, 2019
d93ab72
Keep type in scala and remove unused type
AngersZhuuuu Oct 31, 2019
ac9ad54
basic service to java
AngersZhuuuu Oct 31, 2019
795ada1
Handle to java
AngersZhuuuu Oct 31, 2019
63076d3
Operation Type State Status to java
AngersZhuuuu Oct 31, 2019
000443a
Fetch type and orientation to java
AngersZhuuuu Oct 31, 2019
5de2bf9
PatternOrIdentifier to java
AngersZhuuuu Oct 31, 2019
a6b0ed1
Interface to Java
AngersZhuuuu Oct 31, 2019
2e6d221
fix bug
AngersZhuuuu Nov 1, 2019
158b298
remove setApplicationName
AngersZhuuuu Nov 1, 2019
745ca60
Session Basic Class to java
AngersZhuuuu Nov 1, 2019
f62e577
auth and thrift service to java
AngersZhuuuu Nov 1, 2019
5fae842
Update ColumnBasedSet.scala
AngersZhuuuu Nov 1, 2019
fe39db0
Update RowBasedSet.scala
AngersZhuuuu Nov 1, 2019
1c303ba
Update type map
AngersZhuuuu Nov 1, 2019
9a32af8
make RowSetFactory clean
AngersZhuuuu Nov 1, 2019
287c6be
update
AngersZhuuuu Nov 2, 2019
f6a7736
format code
AngersZhuuuu Nov 2, 2019
f8b7351
add thrift file
AngersZhuuuu Nov 6, 2019
272ba3c
not check current java code now
AngersZhuuuu Nov 6, 2019
0de3191
remove import jdk.tools dependency
AngersZhuuuu Nov 6, 2019
6374f42
follow comment
AngersZhuuuu Nov 6, 2019
f464773
Merge branch 'master' into SPARK-29018-V11-STEP4-ADD-TEST
AngersZhuuuu Nov 7, 2019
13da926
fix for `mvn install`
AngersZhuuuu Nov 7, 2019
d0de49f
fix for `mvn install`
AngersZhuuuu Nov 7, 2019
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
save some basic code
  • Loading branch information
AngersZhuuuu committed Oct 26, 2019
commit 95d8137cc9a747e15db0622c751303206b73f3fb
Original file line number Diff line number Diff line change
@@ -0,0 +1,137 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.spark.sql.thriftserver

import java.util

import scala.collection.JavaConverters._

import org.apache.hadoop.hive.conf.HiveConf

import org.apache.spark.internal.Logging
import org.apache.spark.sql.thriftserver.Service._

/**
* Construct the service.
*
* @param name
* service name
*/
abstract class AbstractService(val name: String) extends Service with Logging {

/**
* Service state: initially {@link STATE#NOTINITED}.
*/
private var state: STATE = NOTINITED
/**
* Service start time. Will be zero until the service is started.
*/
private var startTime = 0L
/**
* The configuration. Will be null until the service is initialized.
*/
private var hiveConf: HiveConf = null
/**
* List of state change listeners; it is final to ensure
* that it will never be null.
*/
final private val listeners = new util.ArrayList[ServiceStateChangeListener]

def getServiceState: Service.STATE = synchronized {
state
}

/**
* {@inheritDoc }
*
* @throws IllegalStateException
* if the current service state does not permit
* this action
*/
def init(hiveConf: HiveConf): Unit = {
ensureCurrentState(NOTINITED)
this.hiveConf = hiveConf
changeState(INITED)
logInfo("Service:" + getName + " is inited.")
}

def start(): Unit = {
startTime = System.currentTimeMillis
ensureCurrentState(INITED)
changeState(STARTED)
logInfo("Service:" + getName + " is started.")
}

def stop(): Unit = {
if ((state eq STOPPED) ||
(state eq INITED) ||
(state eq NOTINITED)) {
// already stopped, or else it was never
// started (eg another service failing canceled startup)
return
}
ensureCurrentState(STARTED)
changeState(STOPPED)
logInfo("Service:" + getName + " is stopped.")
}

def register(l: ServiceStateChangeListener): Unit = {
listeners.add(l)
}

def unregister(l: ServiceStateChangeListener): Unit = {
listeners.remove(l)
}

def getName: String = name

def getHiveConf: HiveConf = hiveConf

def getStartTime: Long = startTime

/**
* Verify that a service is in a given state.
*
* @param currentState
* the desired state
* @throws IllegalStateException
* if the service state is different from
* the desired state
*/
private def ensureCurrentState(currentState: STATE): Unit = {
ServiceOperations.ensureCurrentState(state, currentState)
}

/**
* Change to a new state and notify all listeners.
* This is a private method that is only invoked from synchronized methods,
* which avoid having to clone the listener list. It does imply that
* the state change listener methods should be short lived, as they
* will delay the state transition.
*
* @param newState
* new service state
*/
private def changeState(newState: STATE): Unit = {
state = newState
// notify listeners
for (l <- listeners.asScala) {
l.stateChanged(this)
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,113 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.spark.sql.thriftserver

import java.util
import java.util.Collections

import scala.collection.JavaConverters._

import org.apache.hadoop.hive.conf.HiveConf

import org.apache.spark.internal.Logging
import org.apache.spark.sql.thriftserver.Service.STOPPED

class CompositeService(name: String) extends AbstractService(name) with Logging {


private val serviceList: util.ArrayList[Service] = new util.ArrayList[Service]

def getServices: util.Collection[Service] = Collections.unmodifiableList(serviceList)

protected def addService(service: Service): Unit = {
serviceList.add(service)
}

protected def removeService(service: Service): Boolean = serviceList.remove(service)

override def init(hiveConf: HiveConf): Unit = {
for (service <- serviceList.asScala) {
service.init(hiveConf)
}
super.init(hiveConf)
}

override def start(): Unit = {
var i = 0
try {
val n = serviceList.size
while (i < n) {
val service = serviceList.get(i)
service.start
i += 1
}
super.start
} catch {
case e: Throwable =>
logError("Error starting services " + getName, e)
// Note that the state of the failed service is still INITED and not
// STARTED. Even though the last service is not started completely, still
// call stop() on all services including failed service to make sure cleanup
// happens.
stop(i)
throw new ServiceException("Failed to Start " + getName, e)
}
}

override def stop(): Unit = {
if (this.getServiceState eq STOPPED) {
// The base composite-service is already stopped, don't do anything again.
return
}
if (serviceList.size > 0) {
stop(serviceList.size - 1)
}
super.stop
}

private def stop(numOfServicesStarted: Int): Unit = {
// stop in reserve order of start
var i = numOfServicesStarted
while (i >= 0) {
val service = serviceList.get(i)
try {
service.stop
} catch {
case t: Throwable =>
logInfo("Error stopping " + service.getName, t)
}
i -= 1
}
}

/**
* JVM Shutdown hook for CompositeService which will stop the given
* CompositeService gracefully in case of JVM shutdown.
*/
class CompositeServiceShutdownHook(val compositeService: CompositeService) extends Runnable {
override def run(): Unit = {
try // Stop the Composite Service
compositeService.stop
catch {
case t: Throwable =>
logInfo("Error stopping " + compositeService.getName, t)
}
}
}

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,102 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.spark.sql.thriftserver

import java.security.{MessageDigest, NoSuchAlgorithmException}

import org.apache.commons.codec.binary.Base64

import org.apache.spark.internal.Logging

/**
* The cookie signer generates a signature based on SHA digest
* and appends it to the cookie value generated at the
* server side. It uses SHA digest algorithm to sign and verify signatures.
*/
object CookieSigner {
private val SIGNATURE = "&s="
private val SHA_STRING = "SHA"
}

class CookieSigner extends Logging {
private var secretBytes: Array[Byte] = null

def this(secret: Array[Byte]) {
this()
if (secret == null) {
throw new IllegalArgumentException("NULL Secret Bytes")
}
this.secretBytes = secret.clone
}


/**
* Sign the cookie given the string token as input.
*
* @param str Input token
* @return Signed token that can be used to create a cookie
*/
def signCookie(str: String): String = {
if (str == null || str.isEmpty) {
throw new IllegalArgumentException("NULL or empty string to sign")
}
val signature = getSignature(str)
logDebug("Signature generated for " + str + " is " + signature)
str + CookieSigner.SIGNATURE + signature
}

/**
* Verify a signed string and extracts the original string.
*
* @param signedStr The already signed string
* @return Raw Value of the string without the signature
*/
def verifyAndExtract(signedStr: String): String = {
val index = signedStr.lastIndexOf(CookieSigner.SIGNATURE)
if (index == -1) {
throw new IllegalArgumentException("Invalid input sign: " + signedStr)
}
val originalSignature = signedStr.substring(index + CookieSigner.SIGNATURE.length)
val rawValue = signedStr.substring(0, index)
val currentSignature = getSignature(rawValue)
logDebug("Signature generated for " + rawValue + " inside verify is " + currentSignature)
if (!(originalSignature == currentSignature)) {
throw new IllegalArgumentException("Invalid sign, original = " +
originalSignature + " current = " + currentSignature)
}
rawValue
}

/**
* Get the signature of the input string based on SHA digest algorithm.
*
* @param str Input token
* @return Signed String
*/
private def getSignature(str: String) = try {
val md = MessageDigest.getInstance(CookieSigner.SHA_STRING)
md.update(str.getBytes)
md.update(secretBytes)
val digest = md.digest
new Base64(0).encodeToString(digest)
} catch {
case ex: NoSuchAlgorithmException =>
throw new RuntimeException("Invalid SHA digest String: " +
CookieSigner.SHA_STRING + " " + ex.getMessage, ex)
}
}
Loading