Skip to content

Instantly share code, notes, and snippets.

@seanorama
Last active April 19, 2019 16:39
Show Gist options
  • Save seanorama/99d13ca1898b33d6ebd1322193062b5d to your computer and use it in GitHub Desktop.
Save seanorama/99d13ca1898b33d6ebd1322193062b5d to your computer and use it in GitHub Desktop.
ambari-hdp31-stack-patches

Patch to the HDP stack in Ambari 2.7

On Ambari Server

wget https://gist.github.com/seanorama/99d13ca1898b33d6ebd1322193062b5d/raw/f5f43b6bac31e1f44f2320d72fb2d1ead651272c/hdp30_alert_hive_interactive_thrift_port.py.patch
sudo patch -b -d/ -p0 < hdp30_alert_hive_interactive_thrift_port.py.patch

wget https://gist.github.com/seanorama/99d13ca1898b33d6ebd1322193062b5d/raw/f5f43b6bac31e1f44f2320d72fb2d1ead651272c/hdp30_alert_spark2_thrift_port.patch
sudo patch -b -d/ -p0 < hdp30_alert_spark2_thrift_port.patch

wget https://gist.github.com/seanorama/99d13ca1898b33d6ebd1322193062b5d/raw/f5f43b6bac31e1f44f2320d72fb2d1ead651272c/hdp30_hbase-quicklinks.patch
sudo patch -b -d/ -p0 < hdp30_hbase-quicklinks.patch

wget https://gist.github.com/seanorama/99d13ca1898b33d6ebd1322193062b5d/raw/f5f43b6bac31e1f44f2320d72fb2d1ead651272c/hdp30_oozie-quicklinks.patch
sudo patch -b -d/ -p0 < hdp30_oozie-quicklinks.patch

sudo ambari-server refresh-stack-hash

On all hosts:

sudo ambari-agent restart
--- /var/lib/ambari-server/resources/stacks/HDP/3.0/services/HIVE/package/alerts/alert_hive_interactive_thrift_port.py.orig 2018-12-07 13:33:26.000000000 -0500
+++ /var/lib/ambari-server/resources/stacks/HDP/3.0/services/HIVE/package/alerts/alert_hive_interactive_thrift_port.py 2019-02-27 08:57:14.397224622 -0500
@@ -43,8 +43,8 @@
SMOKEUSER_PRINCIPAL_KEY = '{{cluster-env/smokeuser_principal_name}}'
SMOKEUSER_KEY = '{{cluster-env/smokeuser}}'
HIVE_SSL = '{{hive-site/hive.server2.use.SSL}}'
-HIVE_SSL_KEYSTORE_PATH = '{{hive-interactive-site/hive.server2.keystore.path}}'
-HIVE_SSL_KEYSTORE_PASSWORD = '{{hive-interactive-site/hive.server2.keystore.password}}'
+HIVE_SSL_TRUSTSTORE_PATH = '{{hive-interactive-site/hive.server2.truststore.path}}'
+HIVE_SSL_TRUSTSTORE_PASSWORD = '{{hive-interactive-site/hive.server2.truststore.password}}'
HIVE_LDAP_USERNAME = '{{hive-env/alert_ldap_username}}'
HIVE_LDAP_PASSWORD = '{{hive-env/alert_ldap_password}}'
@@ -86,7 +86,7 @@
HIVE_SERVER_INTERACTIVE_PRINCIPAL_KEY, SMOKEUSER_KEYTAB_KEY, SMOKEUSER_PRINCIPAL_KEY,
HIVE_SERVER_INTERACTIVE_THRIFT_HTTP_PORT_KEY, HIVE_SERVER_INTERACTIVE_TRANSPORT_MODE_KEY,
HIVE_SERVER_TRANSPORT_MODE_KEY, KERBEROS_EXECUTABLE_SEARCH_PATHS_KEY, HIVE_SSL,
- HIVE_SSL_KEYSTORE_PATH, HIVE_SSL_KEYSTORE_PASSWORD, HIVE_LDAP_USERNAME, HIVE_LDAP_PASSWORD,
+ HIVE_SSL_TRUSTSTORE_PATH, HIVE_SSL_TRUSTSTORE_PASSWORD, HIVE_LDAP_USERNAME, HIVE_LDAP_PASSWORD,
HIVE_USER_KEY)
def execute(configurations={}, parameters={}, host_name=None):
@@ -132,13 +132,13 @@
if HIVE_SSL in configurations:
hive_ssl = configurations[HIVE_SSL]
- hive_ssl_keystore_path = None
- if HIVE_SSL_KEYSTORE_PATH in configurations:
- hive_ssl_keystore_path = configurations[HIVE_SSL_KEYSTORE_PATH]
-
- hive_ssl_keystore_password = None
- if HIVE_SSL_KEYSTORE_PASSWORD in configurations:
- hive_ssl_keystore_password = configurations[HIVE_SSL_KEYSTORE_PASSWORD]
+ hive_ssl_truststore_path = None
+ if HIVE_SSL_TRUSTSTORE_PATH in configurations:
+ hive_ssl_truststore_path = configurations[HIVE_SSL_TRUSTSTORE_PATH]
+
+ hive_ssl_truststore_password = None
+ if HIVE_SSL_TRUSTSTORE_PASSWORD in configurations:
+ hive_ssl_truststore_password = configurations[HIVE_SSL_TRUSTSTORE_PASSWORD]
# defaults
smokeuser_keytab = SMOKEUSER_KEYTAB_DEFAULT
@@ -205,7 +205,7 @@
try:
hive_check.check_thrift_port_sasl(host_name, port, hive_server2_authentication, hive_server_principal,
kinitcmd, smokeuser, hive_user = hive_user, transport_mode=transport_mode, ssl=hive_ssl,
- ssl_keystore=hive_ssl_keystore_path, ssl_password=hive_ssl_keystore_password,
+ ssl_keystore=hive_ssl_truststore_path, ssl_password=hive_ssl_truststore_password,
check_command_timeout=int(check_command_timeout), ldap_username=ldap_username,
ldap_password=ldap_password)
result_code = 'OK'
From 24b88b8be312d45f86493584437a7f4149bba95d Mon Sep 17 00:00:00 2001
From: Sean Roberts <[email protected]>
Date: Wed, 9 Jan 2019 18:49:31 +0000
Subject: [PATCH 1/3] BUG-116681 Ambari: "Spark2 Thrift Server" alert does not
work with HTTPS (SSL)
---
/var/lib/ambari-server/resources/stacks/HDP/3.0/services/SPARK2/package/scripts/alerts/alert_spark2_thrift_port.py | 25 +++++++++++++------
1 file changed, 18 insertions(+), 7 deletions(-)
diff --git /var/lib/ambari-server/resources/stacks/HDP/3.0/services/SPARK2/package/scripts/alerts/alert_spark2_thrift_port.py.orig /var/lib/ambari-server/resources/stacks/HDP/3.0/services/SPARK2/package/scripts/alerts/alert_spark2_thrift_port.py
index 8fe11a82..b1daf01f 100755
--- /var/lib/ambari-server/resources/stacks/HDP/3.0/services/SPARK2/package/scripts/alerts/alert_spark2_thrift_port.py.orig
+++ /var/lib/ambari-server/resources/stacks/HDP/3.0/services/SPARK2/package/scripts/alerts/alert_spark2_thrift_port.py
@@ -37,9 +37,9 @@
HIVE_SERVER_THRIFT_PORT_KEY = '{{spark2-hive-site-override/hive.server2.thrift.port}}'
HIVE_SERVER_THRIFT_HTTP_PORT_KEY = '{{spark2-hive-site-override/hive.server2.thrift.http.port}}'
HIVE_SERVER_TRANSPORT_MODE_KEY = '{{spark2-hive-site-override/hive.server2.transport.mode}}'
+HIVE_SERVER2_USE_SSL_KEY = '{{spark2-hive-site-override/hive.server2.use.SSL}}'
SECURITY_ENABLED_KEY = '{{cluster-env/security_enabled}}'
-HIVE_SERVER2_AUTHENTICATION_KEY = '{{hive-site/hive.server2.authentication}}'
HIVE_SERVER2_KERBEROS_KEYTAB = '{{spark2-hive-site-override/hive.server2.authentication.kerberos.keytab}}'
HIVE_SERVER2_PRINCIPAL_KEY = '{{spark2-hive-site-override/hive.server2.authentication.kerberos.principal}}'
@@ -62,7 +62,7 @@ def get_tokens():
Returns a tuple of tokens in the format {{site/property}} that will be used
to build the dictionary passed into execute
"""
- return (HIVE_SERVER_THRIFT_PORT_KEY, HIVE_SERVER_THRIFT_HTTP_PORT_KEY, HIVE_SERVER_TRANSPORT_MODE_KEY, SECURITY_ENABLED_KEY,
+ return (HIVE_SERVER_THRIFT_PORT_KEY, HIVE_SERVER_THRIFT_HTTP_PORT_KEY, HIVE_SERVER_TRANSPORT_MODE_KEY, HIVE_SERVER2_USE_SSL_KEY, SECURITY_ENABLED_KEY,
KERBEROS_EXECUTABLE_SEARCH_PATHS_KEY, SPARK_USER_KEY, HIVE_SERVER2_KERBEROS_KEYTAB, HIVE_SERVER2_PRINCIPAL_KEY)
@OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
@@ -89,7 +89,12 @@ def execute(configurations={}, parameters={}, host_name=None):
if transport_mode.lower() == 'binary' and HIVE_SERVER_THRIFT_PORT_KEY in configurations:
port = int(configurations[HIVE_SERVER_THRIFT_PORT_KEY])
elif transport_mode.lower() == 'http' and HIVE_SERVER_THRIFT_HTTP_PORT_KEY in configurations:
- port = int(configurations[HIVE_SERVER_THRIFT_HTTP_PORT_KEY])
+ port = int(configurations[HIVE_SERVER_THRIFT_HTTP_PORT_KEY])
+
+ ssl_enabled = False
+ if (HIVE_SERVER2_USE_SSL_KEY in configurations
+ and str(configurations[HIVE_SERVER2_USE_SSL_KEY]).upper() == 'TRUE'):
+ ssl_enabled = True
security_enabled = False
if SECURITY_ENABLED_KEY in configurations:
@@ -132,11 +137,17 @@ def execute(configurations={}, parameters={}, host_name=None):
if host_name is None:
host_name = socket.getfqdn()
+ beeline_url = ["jdbc:hive2://{host_name}:{port}/default"]
+
if security_enabled:
- beeline_url = ["jdbc:hive2://{host_name}:{port}/default;principal={hive_principal}","transportMode={transport_mode}"]
- else:
- beeline_url = ["jdbc:hive2://{host_name}:{port}/default","transportMode={transport_mode}"]
- # append url according to used transport
+ beeline_url.append("principal={hive_principal}")
+
+ beeline_url.append("transportMode={transport_mode}")
+
+ if transport_mode.lower() == 'http':
+ beeline_url.append("httpPath=cliservice")
+ if ssl_enabled:
+ beeline_url.append("ssl=true")
beeline_cmd = os.path.join(spark_home, "bin", "beeline")
cmd = "! %s -u '%s' -e '' 2>&1| awk '{print}'|grep -i -e 'Connection refused' -e 'Invalid URL' -e 'Error: Could not open'" % \
From 412f8e9012ef798b114caea4262a0cc750287044 Mon Sep 17 00:00:00 2001
From: Sean Roberts <[email protected]>
Date: Thu, 10 Jan 2019 11:30:37 +0000
Subject: [PATCH 2/3] BUG-116681: Ambari doesn't support "Spark2 Thrift Server"
with HTTPS (SSL) mode
- fix params.py
---
/var/lib/ambari-server/resources/stacks/HDP/3.0/services/SPARK2/package/scripts/params.py | 1 +
1 file changed, 1 insertion(+)
diff --git /var/lib/ambari-server/resources/stacks/HDP/3.0/services/SPARK2/package/scripts/params.py.orig /var/lib/ambari-server/resources/stacks/HDP/3.0/services/SPARK2/package/scripts/params.py
index 97912d1b..0dfceaae 100755
--- /var/lib/ambari-server/resources/stacks/HDP/3.0/services/SPARK2/package/scripts/params.py.orig
+++ /var/lib/ambari-server/resources/stacks/HDP/3.0/services/SPARK2/package/scripts/params.py
@@ -197,6 +197,7 @@
spark_thrift_port = int(config['configurations']['spark2-hive-site-override']['hive.server2.thrift.port'])
elif spark_transport_mode.lower() == 'http':
spark_thrift_port = int(config['configurations']['spark2-hive-site-override']['hive.server2.thrift.http.port'])
+ spark_thrift_ssl_enabled = default("configurations/spark2-hive-site-override/hive.server2.use.SSL", False)
# thrift server support - available on HDP 2.3 or higher
spark_thrift_sparkconf = None
From 73b9edcff73a9b202a75b07dec273bdc04af4aba Mon Sep 17 00:00:00 2001
From: Sean Roberts <[email protected]>
Date: Thu, 10 Jan 2019 11:30:56 +0000
Subject: [PATCH 3/3] BUG-116681: Ambari doesn't support "Spark2 Thrift Server"
with HTTPS (SSL) mode
- Fix spark_service.py
---
/var/lib/ambari-server/resources/stacks/HDP/3.0/services/SPARK2/package/scripts/spark_service.py | 15 +++++++++++----
1 file changed, 11 insertions(+), 4 deletions(-)
diff --git /var/lib/ambari-server/resources/stacks/HDP/3.0/services/SPARK2/package/scripts/spark_service.py.orig /var/lib/ambari-server/resources/stacks/HDP/3.0/services/SPARK2/package/scripts/spark_service.py
index c0bc11a0..2b668f07 100755
--- /var/lib/ambari-server/resources/stacks/HDP/3.0/services/SPARK2/package/scripts/spark_service.py.orig
+++ /var/lib/ambari-server/resources/stacks/HDP/3.0/services/SPARK2/package/scripts/spark_service.py
@@ -142,11 +142,18 @@ def spark_service(name, upgrade_type=None, action=None):
while i < 15:
time.sleep(30)
Logger.info("Check connection to STS is created.")
+
+ beeline_url = ["jdbc:hive2://{fqdn}:{spark_thrift_port}/default"]
+
if params.security_enabled:
- beeline_url = ["jdbc:hive2://{fqdn}:{spark_thrift_port}/default;principal={hive_kerberos_principal}","transportMode={spark_transport_mode}"]
- else:
- beeline_url = ["jdbc:hive2://{fqdn}:{spark_thrift_port}/default","transportMode={spark_transport_mode}"]
- # append url according to used transport
+ beeline_url.append("principal={hive_kerberos_principal}")
+
+ beeline_url.append("transportMode={spark_transport_mode}")
+
+ if params.spark_transport_mode.lower() == 'http':
+ beeline_url.append("httpPath=cliservice")
+ if params.spark_thrift_ssl_enabled:
+ beeline_url.append("ssl=true")
beeline_cmd = os.path.join(params.spark_home, "bin", "beeline")
cmd = "! %s -u '%s' -e '' 2>&1| awk '{print}'|grep -i -e 'Connection refused' -e 'Invalid URL' -e 'Error: Could not open'" % \
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment