Skip to content

Commit

Permalink
Remove code related to webhdfs
Browse files Browse the repository at this point in the history
The webhdfsbrowser extension is no longer deployed in the new
AlmaLinux 9 user image, so the code snippets that were removed
are no longer useful
  • Loading branch information
PMax5 authored and etejedor committed Sep 30, 2024
1 parent c1dd061 commit decac1b
Showing 1 changed file with 4 additions and 7 deletions.
11 changes: 4 additions & 7 deletions SwanSpawner/swanspawner/swandockerspawner.py
Original file line number Diff line number Diff line change
Expand Up @@ -194,7 +194,6 @@ async def start(self):
hadoop_container_path = '/spark'

# Ensure that env variables are properly cleared
self.env.pop('WEBHDFS_TOKEN', None)
self.env.pop('HADOOP_TOKEN_FILE_LOCATION', None)
self.env.pop('KUBECONFIG', None)

Expand Down Expand Up @@ -236,11 +235,9 @@ async def start(self):
# Set default location for krb5cc in tmp directory for yarn
self.env['KRB5CCNAME'] = '/tmp/krb5cc'

# set location of hadoop token file and webhdfs token for Spark
if os.path.exists(hadoop_host_path + '/hadoop.toks') and os.path.exists(hadoop_host_path + '/webhdfs.toks'):
# set location of hadoop token file token for Spark
if os.path.exists(hadoop_host_path + '/hadoop.toks'):
self.env['HADOOP_TOKEN_FILE_LOCATION'] = hadoop_container_path + '/hadoop.toks'
with open(hadoop_host_path + '/webhdfs.toks', 'r') as webhdfs_token_file:
self.env['WEBHDFS_TOKEN'] = webhdfs_token_file.read()
else:
if cluster == 'hadoop-nxcals':
raise ValueError(
Expand All @@ -249,10 +246,10 @@ async def start(self):
Please <a href="http://nxcals-docs.web.cern.ch/current/user-guide/data-access/nxcals-access-request/" target="_blank">request access</a>
""")
elif cluster == 'k8s':
# if there is no HADOOP_TOKEN_FILE or WEBHDFS_TOKEN with K8s we ignore (no HDFS access granted)
# if there is no HADOOP_TOKEN_FILE with K8s we ignore (no HDFS access granted)
pass
else:
# yarn clusters require HADOOP_TOKEN_FILE and WEBHDFS_TOKEN containing YARN and HDFS tokens
# yarn clusters require HADOOP_TOKEN_FILE containing YARN and HDFS tokens
raise ValueError(
"""
Access to the Analytix cluster is not granted.
Expand Down

0 comments on commit decac1b

Please sign in to comment.