Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

AMBARI-26075: Wrong datanode pid file when security enabled and unprivileged port is used #3784

Open
wants to merge 1 commit into
base: trunk
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@
from resource_management.libraries.functions.get_architecture import get_architecture
from resource_management.libraries.functions.setup_ranger_plugin_xml import get_audit_configs, generate_ranger_service_config
from resource_management.libraries.functions.namenode_ha_utils import get_properties_for_all_nameservices, namenode_federation_enabled
from status_params import *

config = Script.get_config()
tmp_dir = Script.get_tmp_dir()
Expand Down Expand Up @@ -85,22 +86,13 @@
stack_supports_ranger_audit_db = check_stack_feature(StackFeature.RANGER_AUDIT_DB_SUPPORT, version_for_stack_feature_checks)
stack_supports_zk_security = check_stack_feature(StackFeature.SECURE_ZOOKEEPER, version_for_stack_feature_checks)

security_enabled = config['configurations']['cluster-env']['security_enabled']
hdfs_user = status_params.hdfs_user
root_user = "root"
hadoop_pid_dir_prefix = status_params.hadoop_pid_dir_prefix
namenode_pid_file = status_params.namenode_pid_file
zkfc_pid_file = status_params.zkfc_pid_file
datanode_pid_file = status_params.datanode_pid_file

# Some datanode settings
dfs_dn_addr = default('/configurations/hdfs-site/dfs.datanode.address', None)
dfs_dn_http_addr = default('/configurations/hdfs-site/dfs.datanode.http.address', None)
dfs_dn_https_addr = default('/configurations/hdfs-site/dfs.datanode.https.address', None)
dfs_http_policy = default('/configurations/hdfs-site/dfs.http.policy', None)
dfs_dn_ipc_address = config['configurations']['hdfs-site']['dfs.datanode.ipc.address']
secure_dn_ports_are_in_use = False

hdfs_tmp_dir = default("/configurations/hadoop-env/hdfs_tmp_dir", "/tmp")
namenode_backup_dir = default("/configurations/hadoop-env/namenode_backup_dir", "/tmp/upgrades")

Expand All @@ -113,28 +105,9 @@
hadoop_mapred_home = stack_select.get_hadoop_dir("mapred_home")
hadoop_lib_home = stack_select.get_hadoop_dir("lib")
hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
hadoop_secure_dn_user = hdfs_user
hadoop_conf_secure_dir = os.path.join(hadoop_conf_dir, "secure")
mapreduce_libs_path = format("{hadoop_mapred_home}/*")

if not security_enabled:
hadoop_secure_dn_user = '""'
else:
dfs_dn_port = utils.get_port(dfs_dn_addr)
dfs_dn_http_port = utils.get_port(dfs_dn_http_addr)
dfs_dn_https_port = utils.get_port(dfs_dn_https_addr)
# We try to avoid inability to start datanode as a plain user due to usage of root-owned ports
if dfs_http_policy == "HTTPS_ONLY":
secure_dn_ports_are_in_use = utils.is_secure_port(dfs_dn_port) or utils.is_secure_port(dfs_dn_https_port)
elif dfs_http_policy == "HTTP_AND_HTTPS":
secure_dn_ports_are_in_use = utils.is_secure_port(dfs_dn_port) or utils.is_secure_port(dfs_dn_http_port) or utils.is_secure_port(dfs_dn_https_port)
else: # params.dfs_http_policy == "HTTP_ONLY" or not defined:
secure_dn_ports_are_in_use = utils.is_secure_port(dfs_dn_port) or utils.is_secure_port(dfs_dn_http_port)
if secure_dn_ports_are_in_use:
hadoop_secure_dn_user = hdfs_user
else:
hadoop_secure_dn_user = '""'

# Parameters for upgrade packs
skip_namenode_save_namespace_express = default("/configurations/cluster-env/stack_upgrade_express_skip_namenode_save_namespace", False)
skip_namenode_namedir_backup_express = default("/configurations/cluster-env/stack_upgrade_express_skip_backup_namenode_dir", False)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,39 @@
from resource_management.libraries.functions import get_kinit_path
from resource_management.libraries.script.script import Script

import utils

config = Script.get_config()

hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
# Some datanode settings
dfs_dn_addr = default('/configurations/hdfs-site/dfs.datanode.address', None)
dfs_dn_http_addr = default('/configurations/hdfs-site/dfs.datanode.http.address', None)
dfs_dn_https_addr = default('/configurations/hdfs-site/dfs.datanode.https.address', None)
dfs_http_policy = default('/configurations/hdfs-site/dfs.http.policy', None)
dfs_dn_ipc_address = config['configurations']['hdfs-site']['dfs.datanode.ipc.address']
secure_dn_ports_are_in_use = False
hadoop_secure_dn_user = hdfs_user

security_enabled = config['configurations']['cluster-env']['security_enabled']
if not security_enabled:
hadoop_secure_dn_user = '""'
else:
dfs_dn_port = utils.get_port(dfs_dn_addr)
dfs_dn_http_port = utils.get_port(dfs_dn_http_addr)
dfs_dn_https_port = utils.get_port(dfs_dn_https_addr)
# We try to avoid inability to start datanode as a plain user due to usage of root-owned ports
if dfs_http_policy == "HTTPS_ONLY":
secure_dn_ports_are_in_use = utils.is_secure_port(dfs_dn_port) or utils.is_secure_port(dfs_dn_https_port)
elif dfs_http_policy == "HTTP_AND_HTTPS":
secure_dn_ports_are_in_use = utils.is_secure_port(dfs_dn_port) or utils.is_secure_port(dfs_dn_http_port) or utils.is_secure_port(dfs_dn_https_port)
else: # params.dfs_http_policy == "HTTP_ONLY" or not defined:
secure_dn_ports_are_in_use = utils.is_secure_port(dfs_dn_port) or utils.is_secure_port(dfs_dn_http_port)
if secure_dn_ports_are_in_use:
hadoop_secure_dn_user = hdfs_user
else:
hadoop_secure_dn_user = '""'

if OSCheck.is_windows_family():
namenode_win_service_name = "namenode"
datanode_win_service_name = "datanode"
Expand All @@ -35,14 +66,12 @@
zkfc_win_service_name = "zkfc"
else:
hadoop_pid_dir_prefix = config['configurations']['hadoop-env']['hadoop_pid_dir_prefix']
hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
hadoop_pid_dir = format("{hadoop_pid_dir_prefix}/{hdfs_user}")

root_user = 'root'
security_enabled = config['configurations']['cluster-env']['security_enabled']
datanode_pid_file = format("{hadoop_pid_dir}/hadoop-{hdfs_user}-datanode.pid")
datanode_secure_pid_file = format("{hadoop_pid_dir}/hadoop-{hdfs_user}-{root_user}-datanode.pid")
if security_enabled:
if secure_dn_ports_are_in_use:
datanode_pid_file = datanode_secure_pid_file

namenode_pid_file = format("{hadoop_pid_dir}/hadoop-{hdfs_user}-namenode.pid")
Expand All @@ -53,7 +82,6 @@

# Security related/required params
hostname = config['agentLevelParams']['hostname']
security_enabled = config['configurations']['cluster-env']['security_enabled']
hdfs_user_principal = config['configurations']['hadoop-env']['hdfs_principal_name']
hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']

Expand Down