Skip to content

Commit

Permalink
remove unused code for testing locally (jupyter-server#405)
Browse files Browse the repository at this point in the history
  • Loading branch information
Zsailer authored and GitHub Enterprise committed Jun 9, 2022
1 parent 2a5f6db commit 6309798
Show file tree
Hide file tree
Showing 10 changed files with 56 additions and 257 deletions.
4 changes: 0 additions & 4 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -31,10 +31,6 @@ test: test-python
check-manifest -v
pre-commit run --all-files

run-remote:
test -f myenv.sh && source myenv.sh && jupyter datastudio --mode local-cluster
test -f myenv.sh || echo "no myenv.sh found" && jupyter datastudio --mode local-cluster

run-local:
export DATASTUDIO_NOTEBOOK_ID="local-notebook"; jupyter datastudio --mode local-local --ServerApp.open_browser=False

Expand Down
35 changes: 35 additions & 0 deletions conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@

import pytest
from jupyter_server.serverapp import ServerApp
from jwcrypto.jwk import JWK
from jwcrypto.jwt import JWT

from data_studio_jupyter_extensions import constants
from data_studio_jupyter_extensions.app import DS_JUPYTER_SERVER_CONFIG_OVERRIDES
Expand Down Expand Up @@ -133,3 +135,36 @@ def mock_kernelspec():
resource_dir="Unicode()",
metadata={},
)


@pytest.fixture
def json_web_key():
return {
"d": "AjY--3WmGQNg5B7qouRlFmezMT5yrSyg8P3nbB29tb9xlnuKlvRpXbw0Ty2O3WTwjKMyjDADSRoJq9mqhHwB7TVFGfOdThWad8rroIKy66zAFcV0KTB5D24WQWO-gkn5o90m6qTAj3-xsalaedyXLdoBHz3RXnxm_wY0Yk7NmtXIu0H28DgGD8xJQ5HxGN3-7foRarB2O0XjV2nvH_a3YY9bSvT57hKeywqbOT22sLy5jg2xF8-VlksYn7IVOHlc13WImuV2FH8Lk2dMmyXwXEkIngZ-KmR3rqGO7aXLXPmTqADgI2TlLQZwpT2YJggMLq8XHpKz2TQ60Ac8wkhHZy_nlNogC_oaeUQw5dJzabF5MHoikDgdTNmz7wHoDOo8y4okR7rkFwoxte7NdvQOk1abQwJOMMXn5IQrnkYdGJ5hJlXSYPB6fyPxZItLj9GnukNvtFoOyudjtlPi8wRRQqQnMVgfYInl9KBMJsTNi86uUv39mE6yXftCIiqMLJI_zqOprOvDrkE2MGm_D_N5axTIVxNL044SvjLDuJ5aixCJkGpnOlAufmAEWCriKfGmCNsB44n75EwOy0Vkpp2Vimac51IcL1ldGV_8CuF5jDwdV4vzbg4Zy7bphwvXm6av5GBJ0XhFZSFCgDh9v9RvtW1AJ48na-3jL7BI0SA4oJE",
"dp": "GnBb1FW583tVk74jQ-yYnzvMwY_LMP7Sl0UealGFthWBQkhH9blw8FDGHM-WDlpP9RUByzdrVjeQyy6GZ7ir1Ckb5MgwRfZw0reqeC_TJCUJZPcpFUwIavTuLxqlX4Kis5DpYGu6oZ73V3OoKs4HajbtRaXRJ8gPldHJ-PW7APsOIjmPh450j1EgtXOwns1IaexY3DTg2CxbwA2_E5O7XrugL6IYNRdrQ3u_foH6XulidfT77JwSt63UoawXZYsUJpyr_axPm9fSCwSd5A9hwYlaQTsngOO7Sg1umTqJgZ5gXdZs8pDiRAT7gIG7hOfWT5on3W3PiPvoxLI3Xck50Q",
"dq": "MP69nX6s8stmGqi10ATwTx36Xrqvnad9vLTqEDorKSZD8AKaRqFcRVdzRPFaG3h0B-G1Bmlye_-x_Rja2irUttWZOZlV647tIPziPWyydi__YRScli_OnN75W9jK6pQ5HO6pQPun5lf0_kpYDGOC2hHFaTip84BzJl1JozxSAHYMUR6sm8uWhRNcmHN9Ln37vgSx3LiCu0rZwdFXsIgHw61TWPEo27ECZI6QziFq9OjnVgPH7VjuMEcNcT6fw5M3aHZMiBTbbpy2m1d3HckbkImOn64VMAr_sfItP5-KrB8uoGU1A4gSVAKwF3Yp9bBPApfwFI3GCCVF8xyDktl49w",
"e": "AQAB",
"kid": "5RoiFp22GMfkoZmLENDLdjN5eAaYgJKzlPY867ffcgA",
"kty": "RSA",
"n": "3KQeRymFIOqILq7bsCJIAd2Nz4sS8hdskH_3b4DoCD4VyqsVn8HH8pWklMtp91n8t9DsquKSKdGLD7erHW2m2quRCT2qKOCnUGtRNxFQBhsuklQjUa8M0hQjpNJfFmYPhfVk1vUrWVQ9UElOywiWk7-yjjLkdAbP2YC-WU2BnMpy9oesr7eiah7OdV1i5Yo_HNVKt94R0hFiTzTQCIKqxjrHn3i_VlO486_V27z0GLWw0I8PAKdQnCBPBI-_iP6QfZ0NziaQ7ud66iGDfIK7HovYqxlm0qhbmyyjV_Ps0Tf3Q53dCV1ZMuT8XmODKoxbdo9Qhr_fO6WbkiNgDm6JvDuXc5za0SlQbFD6P9iXTLwamV-p8itka9mP_1qAL6CedOzDGcdorDaGdUpEVXKWhLilazB5X_qVbHRJX8Rl7dqKk4iUNq54HxRbkN0XKTARMeyJV17sh0V3Ao2qNoqf6NhCOVTAzX__qu2-hHEsZWHxnc2bo6ya9EFRPgpGrwbMQ4hQMN5eNfENyL8C-EonJU9xti9UwqHmj5nTYvo_vsoA9FFJSMAQFYAqztdYUOAHqhqZAr-UxpcWRg8oE0SUEk5TBZppOKEfq3Zn6Y9-kF7i4MxT19TxJ8Al8ErV-ilnMKVSnWtxBp5F6uLI2yPuTWZx0xNcq9VKPhwWHqKxlJs",
"p": "_KhcwBOlQwP_EdUcEs74dAiCs38rZ8YzJMuULH5SQffDXswyZNb2lYclUCu095PVRf1rcLQE_Qwh2QDhZlIbNNFXWFKcQ3E1uMuPaUcflDOSLdw9rxz01p2Rzpuai-h9p-PQkVMO6D5zEAORv5P6oj4-u83JHKOgD5R8VRkofaaHBzr4WhxfxuAfTZV3eFgQ_gM9UHiJQmpPJvPMYvN-MDDvUuFPVJKj6BKgTIEKaRu_xSY4WeMoOyYpV3NirvCb8pemzB_aFG9PUUB2lMPRvyoGN-VzDHBwzJaIkqtds2Wux-MjDSImQh5LAvUs97Bi00w0ixMS1-NloZhgwYiuyQ",
"q": "349UipnuqoGqKaZ_mBoZfRvZXwAqo9qhhVBqdnWpC2EIeJqbViul7qKGt7Npue19gHMoDTvHRof1P6m7paD6_FDrisQ6nq44SQqOPNSrOqFCjfuApZ-o9jUeTACenl1s-eHqyJpL7azAiN-Mn4x9XRFnhOlnHMGuvS_teQoFZYqy-DFwEK-13aJQPQeIZ0phx7iPnKh1aw8LQjLS9SP0lnNNF3uROUzpBIbFP4JmGWn3LQZBSkAe9pfsU_xhyJ8umljQlAgebuafhZ_nSH4Sh7qLXeOtDSOpCa0I4_Ej8bPxPAtupOMtQWmtg-GVKbGyF2bD0YyVwJJDDyP2XSomQw",
"qi": "ywRu8HqEgqhmbM4EDbspHWFbN4CKJcYQWyi6Mf4Ws6YVSYd-RLbOVw5j6BHVOtZD7Hx6Bzcke6uiY46BYJmyOXb40qNyg8NwqN1TOiQHWBBwqUHLFC58Hg6jT2eeM964b5jv8IEiuAgH3PG8t79SrVJEEazjyzweiqtF79Rj7KZjxASKvyayF_oAOETNbnhcqJkfu0ZRdVrp71vJkFO3Sje8TrsLM3QKFzZyo-R8gGQGSm29aHQqSUH43sUohLLibaWttp2yDbofcPvw6bigweMgdDhbh8qu4WV25Ng8gK__ThTC26I9xwMlriiJhWR3HWfvaH5WZjMPU-A0PMhI4A",
}


@pytest.fixture
def json_web_token(json_web_key):
"""JSON web token, generated using"""
key = JWK(**json_web_key)
jwt = JWT(
header={"alg": "RS256"},
default_claims={"exp": None},
claims={
"client_id": "client_id",
"aud": "notebook-server",
"iss": "iam.corp.apple.com",
},
)
jwt.make_signed_token(key)
return jwt.serialize()
57 changes: 3 additions & 54 deletions data_studio_jupyter_extensions/configurables/notebook_service.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import base64
import json
import secrets
import ssl
Expand Down Expand Up @@ -49,7 +48,7 @@ class NotebookServiceClient(RefreshTokenConfigurable):
client_secret = UnicodeFromEnv(name="IAS_CLIENT_SECRET", allow_none=True).tag(
config=True
)
request_token = Unicode(allow_none=True)
request_token = Unicode(allow_none=True).tag(config=True)

request_timeout = IntFromEnv(
name=constants.DS_API_REQUEST_TIMEOUT, default_value=120, allow_none=True
Expand All @@ -61,43 +60,6 @@ class NotebookServiceClient(RefreshTokenConfigurable):
def _default_http_client(self): # pragma: no cover
return AsyncHTTPClient()

async def fetch_token(self): # pragma: no cover
# From DS-INT UI
# These were all in data-platform-ui/config/dsp-ui-dev.json
scope = "openid offline corpds:ds:dsid corpds:ds:firstName corpds:ds:lastName corpds:ds:email"
audience = "notebook-service-int datastudio-int"
mayActSub = "2320309053"
url = "https://iam.corp.apple.com/oauth2/token"
data = dict(
scope=scope,
audience=audience,
subject_token=self.api_token,
grant_type="urn:ietf:params:oauth:grant-type:token-exchange",
subject_token_type="urn:ietf:params:oauth:token-type:id_token",
requested_token_type="urn:ietf:params:oauth:token-type:id_token",
may_act_sub=mayActSub,
)

encoded = base64.b64encode(
f"{self.client_id}:{self.client_secret}".encode("utf-8")
).decode("utf-8")
headers = {
"Content-Type": "application/x-www-form-urlencoded",
"Authorization": "Basic " + encoded,
}
request = HTTPRequest(
url=url,
method="POST",
headers=headers,
body=urllib.parse.urlencode(data),
allow_nonstandard_methods=True,
ca_certs=self.ssl_cert_file,
)
response = await self.http_client.fetch(request)
resp_body = json.loads(response.body)
token = resp_body["access_token"]
return token

def get_headers(self, trace_id=None):
"""Get the headers needed for a request"""
headers = {
Expand Down Expand Up @@ -138,7 +100,7 @@ async def fetch(self, *parts, method="GET", data=None):
if self.local_mode:
self.request_token = ""
elif not self.request_token:
self.request_token = await self.fetch_token()
raise Exception("No request token has been set. Try logging in again.")
elif not self.is_token_valid(self.request_token):
self.request_token = await self.fetch_id_token_from_refresh_token()

Expand All @@ -153,12 +115,7 @@ async def fetch(self, *parts, method="GET", data=None):
request = self._get_request(url, method=method, data=data)
response = await self.http_client.fetch(request)
except HTTPClientError as err:
# Fresh token
if err.code == 401 and not self.local_mode:
self.request_token = await self.fetch_token()
request = self._get_request(url, method=method, data=data)
response = await self.http_client.fetch(request)
elif err.code >= 500:
if err.code >= 500:
# Get response from notebook service
response = err.response
code = err.code
Expand Down Expand Up @@ -249,11 +206,3 @@ async def initialize_namespace_for_spark_kernels(
async def get_profile_properties(self, kerneltype_id): # pragma: no cover
"""Get profile properties by kernel type Id."""
raise NotImplementedError


if __name__ == "__main__": # pragma: no cover
import asyncio

nbservice = NotebookServiceClient()
r = asyncio.run(nbservice.get_list_of_kernelspecs_for_notebook())
print(r)
134 changes: 1 addition & 133 deletions data_studio_jupyter_extensions/configurables/provisioner.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,6 @@
captured by the client and rendered in the notebook status bar.
"""
import asyncio
import os
import socket
import time
from typing import Any
from typing import Dict
Expand All @@ -15,9 +13,6 @@

from jupyter_client import provisioning
from jupyter_client.connect import KernelConnectionInfo
from kubernetes_asyncio import client as kclient
from kubernetes_asyncio import config as kconfig
from kubernetes_asyncio.client.api_client import ApiClient
from tornado.escape import json_decode
from tornado.httpclient import HTTPClientError
from tornado.web import HTTPError
Expand All @@ -39,7 +34,6 @@
from data_studio_jupyter_extensions.traits import IntFromEnv
from data_studio_jupyter_extensions.traits import UnicodeFromEnv
from data_studio_jupyter_extensions.utils import get_available_mode_names
from data_studio_jupyter_extensions.utils import run_async


class KernelFailedError(Exception):
Expand Down Expand Up @@ -127,118 +121,6 @@ def connection_info(self) -> KernelConnectionInfo:
)
return info

async def _run_kube_method(
self, method_type, name, quiet=False
): # pragma: no cover
if not quiet:
self.log.debug(f"{method_type}ing service {name}")
await kconfig.load_kube_config()

# use the context manager to close http sessions automatically
async with ApiClient() as api:

v1 = kclient.CoreV1Api(api)
method = getattr(v1, f"{method_type}_namespaced_service")
return await method(namespace=self.namespace, name=name)

async def open_route(self, process_id): # pragma: no cover
"""Connect to a remote kernel given by a process id
Returns a map of port by port name
Prerequisites:
kcli init
Representative url:
https://ds-int.apple.com/projects/2j1cd6f8ekj1/notebooks/servers/p4xpnb1208u4/kernels/launch?notebookName=Untitled.ipynb&kernelspecId=bkvgsydsi1rd&kernelspecDisplayName=Python+3&kernelspecLang=python&kernelId=vhz7kghpaujd
The process id is the kernelId in the above url
Used them when launching the kernel instead of using the env variables
only if LOCAL_MODE is set
"""
logger = self.log
here = os.path.abspath(os.path.dirname(__file__))

# First check if the kernel is already set up and accessible
try:
name = f"ipython-{process_id}-kernel-service-iopub"
ret = await self._run_kube_method("read", name)
ip = ret.spec.external_i_ps[0]

message = f"Connecting to running kernel for {process_id}"
self._emit_kernel_message(message)
connected = True
except Exception as e:
logger.error(str(e))
connected = False

# Create the connection if needed
if not connected:
message = f"Creating a connection to kernel: {process_id}"
self._emit_kernel_message(message)

# Remove the root service and network policy created by notebook-service
name = f"ipython-{process_id}-kernel-service"
try:
await self._run_kube_method("delete", name)
except Exception as e:
logger.error(e)

cmd = f"kubectl delete AppleNetworkPolicy ipython-{process_id}"
await run_async(cmd, logger)

# Install the helm chart
cmd = "helm uninstall remote-kernel"
await run_async(cmd, logger)

helm_file = os.path.abspath(os.path.join(here, "..", "helm"))
cmd = f"helm install --set process={process_id} remote-kernel {helm_file}"
ret = await run_async(cmd, logger)
assert ret == 0, "Could not run helm install"

# Get the port map
ports = {}
annotation = "pie.traffic.plb/tcp_service_port"

for name in ["hb", "control", "iopub", "shell", "stdin"]:
service = f"ipython-{process_id}-kernel-service-{name}"
self._emit_kernel_message(f"> {cmd}")
attempts = 0
while attempts < 100:
ret = await self._run_kube_method("read", service, quiet=True)
port = ret.metadata.annotations.get(annotation, "")
if port:
ports[name] = int(port)
break
await asyncio.sleep(0.1)
if attempts == 10:
raise ValueError(f"Could not find port for {name}")

# Establish a connection to the shell and iopub ports
name = f"ipython-{process_id}-kernel-service-iopub"
ret = await self._run_kube_method("read", name)
ip = ret.spec.external_i_ps[0]

for name in ["shell", "iopub"]:
port = ports[name]
location = (ip, port)
attempts = 0
message = f"Connecting to {name} on {ip}:{port}"
self._emit_kernel_message(message)
allowed_attempts = 6000
while attempts < allowed_attempts:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(0.1)
result = sock.connect_ex(location)
if result == 0:
break
attempts += 1
await asyncio.sleep(0.1)
if attempts == allowed_attempts:
raise ValueError(f"Could not connect to port for {name}")

return ip, ports

@property
def has_process(self) -> bool:
"""
Expand Down Expand Up @@ -403,22 +285,8 @@ async def pre_launch(self, **kwargs: Any) -> Dict[str, Any]:

async def _fetch_connection_info(self) -> None:
"""
Fetch connection info from notebook-service (or the control
plane in local-cluster mode).
Fetch connection info from notebook-service
"""
if self.mode == "local-cluster":
# Ports and host will depend on the ingress routes
# set by the kubernetes cluster.
host, port_map = await self.open_route(self.process_id)
# set up the connection info
self.iopub_port = port_map["iopub"]
self.hb_port = port_map["hb"]
self.control_port = port_map["control"]
self.shell_port = port_map["shell"]
self.stdin_port = port_map["stdin"]
self.ip = self.ip or host
return

r = await self.nbservice_client.get_kernel_details(self.process_id)
kernel_info = json_decode(r.body)
self.ip = self.ip or kernel_info["host"]
Expand Down
4 changes: 0 additions & 4 deletions data_studio_jupyter_extensions/modes/local_cluster.py

This file was deleted.

36 changes: 0 additions & 36 deletions data_studio_jupyter_extensions/tests/auth/conftest.py

This file was deleted.

Loading

0 comments on commit 6309798

Please sign in to comment.