Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 5 additions & 2 deletions recipe/meta.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -10,12 +10,15 @@ package:
source:
url: http://d3kbcqa49mib13.cloudfront.net/spark-{{ version }}-bin-hadoop{{ hadoop_version }}.tgz
sha256: {{ sha256 }}
patches:
- setup.py.patch

build:
number: 0
number: 1
script: >
cd python &&
python setup.py install --single-version-externally-managed --record=record.txt
python setup.py install --single-version-externally-managed --record=record.txt &&


requirements:
build:
Expand Down
95 changes: 95 additions & 0 deletions recipe/setup.py.patch
Original file line number Diff line number Diff line change
@@ -0,0 +1,95 @@
From 447136d810deddb9b8b73ef081168548464a4d35 Mon Sep 17 00:00:00 2001
From: Benjamin Zaitlen <[email protected]>
Date: Fri, 12 May 2017 16:20:22 +0000
Subject: [PATCH] add sbin directory

---
python/setup.py | 14 ++++++++++++++
1 file changed, 14 insertions(+)

diff --git a/python/setup.py b/python/setup.py
index f500354..4c1bfd2 100644
--- a/python/setup.py
+++ b/python/setup.py
@@ -69,10 +69,12 @@ elif len(JARS_PATH) == 0 and not os.path.exists(TEMP_PATH):

EXAMPLES_PATH = os.path.join(SPARK_HOME, "examples/src/main/python")
SCRIPTS_PATH = os.path.join(SPARK_HOME, "bin")
+SBIN_SCRIPTS_PATH = os.path.join(SPARK_HOME, "sbin")
DATA_PATH = os.path.join(SPARK_HOME, "data")
LICENSES_PATH = os.path.join(SPARK_HOME, "licenses")

SCRIPTS_TARGET = os.path.join(TEMP_PATH, "bin")
+SBIN_SCRIPTS_TARGET = os.path.join(TEMP_PATH, "sbin")
JARS_TARGET = os.path.join(TEMP_PATH, "jars")
EXAMPLES_TARGET = os.path.join(TEMP_PATH, "examples")
DATA_TARGET = os.path.join(TEMP_PATH, "data")
@@ -117,6 +119,7 @@ try:
if _supports_symlinks():
os.symlink(JARS_PATH, JARS_TARGET)
os.symlink(SCRIPTS_PATH, SCRIPTS_TARGET)
+ os.symlink(SBIN_SCRIPTS_PATH, SBIN_SCRIPTS_TARGET)
os.symlink(EXAMPLES_PATH, EXAMPLES_TARGET)
os.symlink(DATA_PATH, DATA_TARGET)
os.symlink(LICENSES_PATH, LICENSES_TARGET)
@@ -124,6 +127,7 @@ try:
# For windows fall back to the slower copytree
copytree(JARS_PATH, JARS_TARGET)
copytree(SCRIPTS_PATH, SCRIPTS_TARGET)
+ copytree(SBIN_SCRIPTS_PATH, SBIN_SCRIPTS_TARGET)
copytree(EXAMPLES_PATH, EXAMPLES_TARGET)
copytree(DATA_PATH, DATA_TARGET)
copytree(LICENSES_PATH, LICENSES_TARGET)
@@ -140,6 +144,11 @@ try:
# Scripts directive requires a list of each script path and does not take wild cards.
script_names = os.listdir(SCRIPTS_TARGET)
scripts = list(map(lambda script: os.path.join(SCRIPTS_TARGET, script), script_names))
+
+ sbin_script_names = os.listdir(SBIN_SCRIPTS_TARGET)
+ sbin_scripts = list(map(lambda script: os.path.join(SBIN_SCRIPTS_TARGET, script), sbin_script_names))
+
+ scripts.extend(sbin_scripts)
# We add find_spark_home.py to the bin directory we install so that pip installed PySpark
# will search for SPARK_HOME with Python.
scripts.append("pyspark/find_spark_home.py")
@@ -170,6 +179,7 @@ try:
'pyspark.sql',
'pyspark.streaming',
'pyspark.bin',
+ 'pyspark.sbin',
'pyspark.jars',
'pyspark.python.pyspark',
'pyspark.python.lib',
@@ -180,6 +190,7 @@ try:
package_dir={
'pyspark.jars': 'deps/jars',
'pyspark.bin': 'deps/bin',
+ 'pyspark.sbin': 'deps/sbin',
'pyspark.python.lib': 'lib',
'pyspark.data': 'deps/data',
'pyspark.licenses': 'deps/licenses',
@@ -188,6 +199,7 @@ try:
package_data={
'pyspark.jars': ['*.jar'],
'pyspark.bin': ['*'],
+ 'pyspark.sbin': ['*'],
'pyspark.python.lib': ['*.zip'],
'pyspark.data': ['*.txt', '*.data'],
'pyspark.licenses': ['*.txt'],
@@ -219,12 +231,14 @@ finally:
if _supports_symlinks():
os.remove(os.path.join(TEMP_PATH, "jars"))
os.remove(os.path.join(TEMP_PATH, "bin"))
+ os.remove(os.path.join(TEMP_PATH, "sbin"))
os.remove(os.path.join(TEMP_PATH, "examples"))
os.remove(os.path.join(TEMP_PATH, "data"))
os.remove(os.path.join(TEMP_PATH, "licenses"))
else:
rmtree(os.path.join(TEMP_PATH, "jars"))
rmtree(os.path.join(TEMP_PATH, "bin"))
+ rmtree(os.path.join(TEMP_PATH, "sbin"))
rmtree(os.path.join(TEMP_PATH, "examples"))
rmtree(os.path.join(TEMP_PATH, "data"))
rmtree(os.path.join(TEMP_PATH, "licenses"))
--
2.1.4